Commit 6b4fb32132aea88d6502a06aba1292e96e49d1f3

Authored by root
0 parents

add scripts

Showing 63 changed files with 4811 additions and 0 deletions

Too many changes to show.

To preserve performance only 63 of 489 files are displayed.

  1 +<#
  2 +.Synopsis
  3 +Activate a Python virtual environment for the current PowerShell session.
  4 +
  5 +.Description
  6 +Pushes the python executable for a virtual environment to the front of the
  7 +$Env:PATH environment variable and sets the prompt to signify that you are
  8 +in a Python virtual environment. Makes use of the command line switches as
  9 +well as the `pyvenv.cfg` file values present in the virtual environment.
  10 +
  11 +.Parameter VenvDir
  12 +Path to the directory that contains the virtual environment to activate. The
  13 +default value for this is the parent of the directory that the Activate.ps1
  14 +script is located within.
  15 +
  16 +.Parameter Prompt
  17 +The prompt prefix to display when this virtual environment is activated. By
  18 +default, this prompt is the name of the virtual environment folder (VenvDir)
  19 +surrounded by parentheses and followed by a single space (ie. '(.venv) ').
  20 +
  21 +.Example
  22 +Activate.ps1
  23 +Activates the Python virtual environment that contains the Activate.ps1 script.
  24 +
  25 +.Example
  26 +Activate.ps1 -Verbose
  27 +Activates the Python virtual environment that contains the Activate.ps1 script,
  28 +and shows extra information about the activation as it executes.
  29 +
  30 +.Example
  31 +Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
  32 +Activates the Python virtual environment located in the specified location.
  33 +
  34 +.Example
  35 +Activate.ps1 -Prompt "MyPython"
  36 +Activates the Python virtual environment that contains the Activate.ps1 script,
  37 +and prefixes the current prompt with the specified string (surrounded in
  38 +parentheses) while the virtual environment is active.
  39 +
  40 +.Notes
  41 +On Windows, it may be required to enable this Activate.ps1 script by setting the
  42 +execution policy for the user. You can do this by issuing the following PowerShell
  43 +command:
  44 +
  45 +PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
  46 +
  47 +For more information on Execution Policies:
  48 +https://go.microsoft.com/fwlink/?LinkID=135170
  49 +
  50 +#>
  51 +Param(
  52 + [Parameter(Mandatory = $false)]
  53 + [String]
  54 + $VenvDir,
  55 + [Parameter(Mandatory = $false)]
  56 + [String]
  57 + $Prompt
  58 +)
  59 +
  60 +<# Function declarations --------------------------------------------------- #>
  61 +
  62 +<#
  63 +.Synopsis
  64 +Remove all shell session elements added by the Activate script, including the
  65 +addition of the virtual environment's Python executable from the beginning of
  66 +the PATH variable.
  67 +
  68 +.Parameter NonDestructive
  69 +If present, do not remove this function from the global namespace for the
  70 +session.
  71 +
  72 +#>
  73 +function global:deactivate ([switch]$NonDestructive) {
  74 + # Revert to original values
  75 +
  76 + # The prior prompt:
  77 + if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
  78 + Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
  79 + Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
  80 + }
  81 +
  82 + # The prior PYTHONHOME:
  83 + if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
  84 + Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
  85 + Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
  86 + }
  87 +
  88 + # The prior PATH:
  89 + if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
  90 + Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
  91 + Remove-Item -Path Env:_OLD_VIRTUAL_PATH
  92 + }
  93 +
  94 + # Just remove the VIRTUAL_ENV altogether:
  95 + if (Test-Path -Path Env:VIRTUAL_ENV) {
  96 + Remove-Item -Path env:VIRTUAL_ENV
  97 + }
  98 +
  99 + # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
  100 + if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
  101 + Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
  102 + }
  103 +
  104 + # Leave deactivate function in the global namespace if requested:
  105 + if (-not $NonDestructive) {
  106 + Remove-Item -Path function:deactivate
  107 + }
  108 +}
  109 +
  110 +<#
  111 +.Description
  112 +Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
  113 +given folder, and returns them in a map.
  114 +
  115 +For each line in the pyvenv.cfg file, if that line can be parsed into exactly
  116 +two strings separated by `=` (with any amount of whitespace surrounding the =)
  117 +then it is considered a `key = value` line. The left hand string is the key,
  118 +the right hand is the value.
  119 +
  120 +If the value starts with a `'` or a `"` then the first and last character is
  121 +stripped from the value before being captured.
  122 +
  123 +.Parameter ConfigDir
  124 +Path to the directory that contains the `pyvenv.cfg` file.
  125 +#>
  126 +function Get-PyVenvConfig(
  127 + [String]
  128 + $ConfigDir
  129 +) {
  130 + Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
  131 +
  132 + # Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
  133 + $pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
  134 +
  135 + # An empty map will be returned if no config file is found.
  136 + $pyvenvConfig = @{ }
  137 +
  138 + if ($pyvenvConfigPath) {
  139 +
  140 + Write-Verbose "File exists, parse `key = value` lines"
  141 + $pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
  142 +
  143 + $pyvenvConfigContent | ForEach-Object {
  144 + $keyval = $PSItem -split "\s*=\s*", 2
  145 + if ($keyval[0] -and $keyval[1]) {
  146 + $val = $keyval[1]
  147 +
  148 + # Remove extraneous quotations around a string value.
  149 + if ("'""".Contains($val.Substring(0, 1))) {
  150 + $val = $val.Substring(1, $val.Length - 2)
  151 + }
  152 +
  153 + $pyvenvConfig[$keyval[0]] = $val
  154 + Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
  155 + }
  156 + }
  157 + }
  158 + return $pyvenvConfig
  159 +}
  160 +
  161 +
  162 +<# Begin Activate script --------------------------------------------------- #>
  163 +
  164 +# Determine the containing directory of this script
  165 +$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
  166 +$VenvExecDir = Get-Item -Path $VenvExecPath
  167 +
  168 +Write-Verbose "Activation script is located in path: '$VenvExecPath'"
  169 +Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
  170 +Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
  171 +
  172 +# Set values required in priority: CmdLine, ConfigFile, Default
  173 +# First, get the location of the virtual environment, it might not be
  174 +# VenvExecDir if specified on the command line.
  175 +if ($VenvDir) {
  176 + Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
  177 +}
  178 +else {
  179 + Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
  180 + $VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
  181 + Write-Verbose "VenvDir=$VenvDir"
  182 +}
  183 +
  184 +# Next, read the `pyvenv.cfg` file to determine any required value such
  185 +# as `prompt`.
  186 +$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
  187 +
  188 +# Next, set the prompt from the command line, or the config file, or
  189 +# just use the name of the virtual environment folder.
  190 +if ($Prompt) {
  191 + Write-Verbose "Prompt specified as argument, using '$Prompt'"
  192 +}
  193 +else {
  194 + Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
  195 + if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
  196 + Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
  197 + $Prompt = $pyvenvCfg['prompt'];
  198 + }
  199 + else {
  200 + Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virutal environment)"
  201 + Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
  202 + $Prompt = Split-Path -Path $venvDir -Leaf
  203 + }
  204 +}
  205 +
  206 +Write-Verbose "Prompt = '$Prompt'"
  207 +Write-Verbose "VenvDir='$VenvDir'"
  208 +
  209 +# Deactivate any currently active virtual environment, but leave the
  210 +# deactivate function in place.
  211 +deactivate -nondestructive
  212 +
  213 +# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
  214 +# that there is an activated venv.
  215 +$env:VIRTUAL_ENV = $VenvDir
  216 +
  217 +if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
  218 +
  219 + Write-Verbose "Setting prompt to '$Prompt'"
  220 +
  221 + # Set the prompt to include the env name
  222 + # Make sure _OLD_VIRTUAL_PROMPT is global
  223 + function global:_OLD_VIRTUAL_PROMPT { "" }
  224 + Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
  225 + New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
  226 +
  227 + function global:prompt {
  228 + Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
  229 + _OLD_VIRTUAL_PROMPT
  230 + }
  231 +}
  232 +
  233 +# Clear PYTHONHOME
  234 +if (Test-Path -Path Env:PYTHONHOME) {
  235 + Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
  236 + Remove-Item -Path Env:PYTHONHOME
  237 +}
  238 +
  239 +# Add the venv to the PATH
  240 +Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
  241 +$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
... ...
  1 +# This file must be used with "source bin/activate" *from bash*
  2 +# you cannot run it directly
  3 +
  4 +deactivate () {
  5 + # reset old environment variables
  6 + if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
  7 + PATH="${_OLD_VIRTUAL_PATH:-}"
  8 + export PATH
  9 + unset _OLD_VIRTUAL_PATH
  10 + fi
  11 + if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
  12 + PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
  13 + export PYTHONHOME
  14 + unset _OLD_VIRTUAL_PYTHONHOME
  15 + fi
  16 +
  17 + # This should detect bash and zsh, which have a hash command that must
  18 + # be called to get it to forget past commands. Without forgetting
  19 + # past commands the $PATH changes we made may not be respected
  20 + if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
  21 + hash -r
  22 + fi
  23 +
  24 + if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
  25 + PS1="${_OLD_VIRTUAL_PS1:-}"
  26 + export PS1
  27 + unset _OLD_VIRTUAL_PS1
  28 + fi
  29 +
  30 + unset VIRTUAL_ENV
  31 + if [ ! "${1:-}" = "nondestructive" ] ; then
  32 + # Self destruct!
  33 + unset -f deactivate
  34 + fi
  35 +}
  36 +
  37 +# unset irrelevant variables
  38 +deactivate nondestructive
  39 +
  40 +VIRTUAL_ENV="/home/ubuntu/dbocl_pip"
  41 +export VIRTUAL_ENV
  42 +
  43 +_OLD_VIRTUAL_PATH="$PATH"
  44 +PATH="$VIRTUAL_ENV/bin:$PATH"
  45 +export PATH
  46 +
  47 +# unset PYTHONHOME if set
  48 +# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
  49 +# could use `if (set -u; : $PYTHONHOME) ;` in bash
  50 +if [ -n "${PYTHONHOME:-}" ] ; then
  51 + _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
  52 + unset PYTHONHOME
  53 +fi
  54 +
  55 +if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
  56 + _OLD_VIRTUAL_PS1="${PS1:-}"
  57 + if [ "x(dbocl_pip) " != x ] ; then
  58 + PS1="(dbocl_pip) ${PS1:-}"
  59 + else
  60 + if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then
  61 + # special case for Aspen magic directories
  62 + # see https://aspen.io/
  63 + PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1"
  64 + else
  65 + PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1"
  66 + fi
  67 + fi
  68 + export PS1
  69 +fi
  70 +
  71 +# This should detect bash and zsh, which have a hash command that must
  72 +# be called to get it to forget past commands. Without forgetting
  73 +# past commands the $PATH changes we made may not be respected
  74 +if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
  75 + hash -r
  76 +fi
... ...
  1 +# This file must be used with "source bin/activate.csh" *from csh*.
  2 +# You cannot run it directly.
  3 +# Created by Davide Di Blasi <davidedb@gmail.com>.
  4 +# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
  5 +
  6 +alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate'
  7 +
  8 +# Unset irrelevant variables.
  9 +deactivate nondestructive
  10 +
  11 +setenv VIRTUAL_ENV "/home/ubuntu/dbocl_pip"
  12 +
  13 +set _OLD_VIRTUAL_PATH="$PATH"
  14 +setenv PATH "$VIRTUAL_ENV/bin:$PATH"
  15 +
  16 +
  17 +set _OLD_VIRTUAL_PROMPT="$prompt"
  18 +
  19 +if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
  20 + if ("dbocl_pip" != "") then
  21 + set env_name = "dbocl_pip"
  22 + else
  23 + if (`basename "VIRTUAL_ENV"` == "__") then
  24 + # special case for Aspen magic directories
  25 + # see https://aspen.io/
  26 + set env_name = `basename \`dirname "$VIRTUAL_ENV"\``
  27 + else
  28 + set env_name = `basename "$VIRTUAL_ENV"`
  29 + endif
  30 + endif
  31 + set prompt = "[$env_name] $prompt"
  32 + unset env_name
  33 +endif
  34 +
  35 +alias pydoc python -m pydoc
  36 +
  37 +rehash
... ...
  1 +# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org)
  2 +# you cannot run it directly
  3 +
  4 +function deactivate -d "Exit virtualenv and return to normal shell environment"
  5 + # reset old environment variables
  6 + if test -n "$_OLD_VIRTUAL_PATH"
  7 + set -gx PATH $_OLD_VIRTUAL_PATH
  8 + set -e _OLD_VIRTUAL_PATH
  9 + end
  10 + if test -n "$_OLD_VIRTUAL_PYTHONHOME"
  11 + set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
  12 + set -e _OLD_VIRTUAL_PYTHONHOME
  13 + end
  14 +
  15 + if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
  16 + functions -e fish_prompt
  17 + set -e _OLD_FISH_PROMPT_OVERRIDE
  18 + functions -c _old_fish_prompt fish_prompt
  19 + functions -e _old_fish_prompt
  20 + end
  21 +
  22 + set -e VIRTUAL_ENV
  23 + if test "$argv[1]" != "nondestructive"
  24 + # Self destruct!
  25 + functions -e deactivate
  26 + end
  27 +end
  28 +
  29 +# unset irrelevant variables
  30 +deactivate nondestructive
  31 +
  32 +set -gx VIRTUAL_ENV "/home/ubuntu/dbocl_pip"
  33 +
  34 +set -gx _OLD_VIRTUAL_PATH $PATH
  35 +set -gx PATH "$VIRTUAL_ENV/bin" $PATH
  36 +
  37 +# unset PYTHONHOME if set
  38 +if set -q PYTHONHOME
  39 + set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
  40 + set -e PYTHONHOME
  41 +end
  42 +
  43 +if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
  44 + # fish uses a function instead of an env var to generate the prompt.
  45 +
  46 + # save the current fish_prompt function as the function _old_fish_prompt
  47 + functions -c fish_prompt _old_fish_prompt
  48 +
  49 + # with the original prompt function renamed, we can override with our own.
  50 + function fish_prompt
  51 + # Save the return status of the last command
  52 + set -l old_status $status
  53 +
  54 + # Prompt override?
  55 + if test -n "(dbocl_pip) "
  56 + printf "%s%s" "(dbocl_pip) " (set_color normal)
  57 + else
  58 + # ...Otherwise, prepend env
  59 + set -l _checkbase (basename "$VIRTUAL_ENV")
  60 + if test $_checkbase = "__"
  61 + # special case for Aspen magic directories
  62 + # see https://aspen.io/
  63 + printf "%s[%s]%s " (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal)
  64 + else
  65 + printf "%s(%s)%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal)
  66 + end
  67 + end
  68 +
  69 + # Restore the return status of the previous command.
  70 + echo "exit $old_status" | .
  71 + _old_fish_prompt
  72 + end
  73 +
  74 + set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
  75 +end
... ...
  1 +#!/home/ubuntu/dbocl_pip/bin/python3.8
  2 +# -*- coding: utf-8 -*-
  3 +import re
  4 +import sys
  5 +from setuptools.command.easy_install import main
  6 +if __name__ == '__main__':
  7 + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
  8 + sys.exit(main())
... ...
  1 +#!/home/ubuntu/dbocl_pip/bin/python3.8
  2 +# -*- coding: utf-8 -*-
  3 +import re
  4 +import sys
  5 +from setuptools.command.easy_install import main
  6 +if __name__ == '__main__':
  7 + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
  8 + sys.exit(main())
... ...
  1 +#!/home/ubuntu/dbocl_pip/bin/python3.8
  2 +# -*- coding: utf-8 -*-
  3 +import re
  4 +import sys
  5 +from pip._internal.cli.main import main
  6 +if __name__ == '__main__':
  7 + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
  8 + sys.exit(main())
... ...
  1 +#!/home/ubuntu/dbocl_pip/bin/python3.8
  2 +# -*- coding: utf-8 -*-
  3 +import re
  4 +import sys
  5 +from pip._internal.cli.main import main
  6 +if __name__ == '__main__':
  7 + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
  8 + sys.exit(main())
... ...
  1 +#!/home/ubuntu/dbocl_pip/bin/python3.8
  2 +# -*- coding: utf-8 -*-
  3 +import re
  4 +import sys
  5 +from pip._internal.cli.main import main
  6 +if __name__ == '__main__':
  7 + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
  8 + sys.exit(main())
... ...
  1 +python3.8
\ No newline at end of file
... ...
  1 +python3.8
\ No newline at end of file
... ...
  1 +/usr/bin/python3.8
\ No newline at end of file
... ...
  1 +Copyright 2021 Vincent Nambatac
  2 +
  3 +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
  4 +
  5 +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
  6 +
  7 +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
... ...
  1 +from dbocl_pkg import cdep
\ No newline at end of file
... ...
  1 +#!/usr/bin/env python
  2 +#
  3 +# AWS cloud deployment tool for autoscaled applications.
  4 +# Usage:
  5 +# $ ./cdep.py -h
  6 +#
  7 +# Requires:
  8 +# boto3 >= 1.6.6
  9 +# interruptingcow = 0.8
  10 +# pytz
  11 +#
  12 +
  13 +
  14 +
  15 +import boto3
  16 +import socket
  17 +import time
  18 +import subprocess
  19 +import re
  20 +import os
  21 +import json
  22 +import logging
  23 +import sys
  24 +import glob
  25 +import argparse
  26 +import logging as log
  27 +import pytz
  28 +import itertools
  29 +
  30 +from shutil import copy
  31 +from interruptingcow import timeout
  32 +from datetime import datetime
  33 +from botocore.exceptions import ClientError
  34 +
  35 +logger = logging.getLogger()
  36 +logger.setLevel(logging.INFO)
  37 +
  38 +# define boto3 clients
  39 +asg_client = boto3.client('autoscaling')
  40 +
  41 +
  42 +CDEP_VERSION = '1.2.2'
  43 +
  44 +TMP_DIR = '/tmp'
  45 +
  46 +DEFAULT_REGION = 'us-west-2'
  47 +DEFAULT_VER = '$Default'
  48 +DEFAULT_ASG_WAIT = 300
  49 +
  50 +STATE_PENDING = 0
  51 +
  52 +ASG_MIN_MINSIZE = 2
  53 +ASG_MIN_MAXSIZE = 3
  54 +
  55 +SSH_KEYWORD_HOST = 'TARGET_IP'
  56 +SSH_KEYWORD_PORT = 'TARGET_PORT'
  57 +SSH_KEYWORD_COMMENT = 'TARGET_COMMENT'
  58 +
  59 +
  60 +class Cleanable(object):
  61 + """
  62 + An interface for classes that need to perform
  63 + some kind of cleanup process at the end of the script run.
  64 + """
  65 + def _get_removable_files(self):
  66 + """
  67 + Return a string or a list of strings (file pathnames).
  68 + """
  69 + return []
  70 +
  71 + def _get_cleanup_funs(self):
  72 + """
  73 + Return a dict or a list of dicts with the following keys/values:
  74 + [
  75 + {
  76 + 'fun': <function>,
  77 + 'args': <list>,
  78 + 'kwargs': <dict>,
  79 + }
  80 + ]
  81 + """
  82 + return []
  83 +
  84 +
  85 +class Cleaner(object):
  86 + """
  87 + Just a class that gathers filename paths and deletes them.
  88 + And also executes functions, supposedly as part of cleanup.
  89 + """
  90 + def __init__(self):
  91 + self.files = []
  92 + self.funs = []
  93 +
  94 +
  95 + def mark(self, cleanable):
  96 + """
  97 + Tag all deletable files from a Cleanable object.
  98 + Also get the cleanup functions to be executed later.
  99 + """
  100 + if not isinstance(cleanable, Cleanable):
  101 + log.error(
  102 + 'Cleaner.mark(cleanable): Argument is '
  103 + 'not an instance of Cleanable class.'
  104 + )
  105 + raise RuntimeError('Arg not an instance of Cleanable')
  106 +
  107 + v = cleanable._get_removable_files()
  108 + if isinstance(v, list):
  109 + self.files += v
  110 + elif isinstance(v, str):
  111 + self.files.append(v)
  112 +
  113 + v = cleanable._get_cleanup_funs()
  114 + if isinstance(v, list):
  115 + self.funs += v
  116 + elif isinstance(v, dict):
  117 + self.funs.append(v)
  118 +
  119 +
  120 + def cleanup(self):
  121 + for f in self.files:
  122 + log.debug('(cleaner) Removing file %s', f)
  123 + try: os.remove(f)
  124 + except OSError: pass
  125 +
  126 + for f in self.funs:
  127 + f['fun'](*f['args'], **f['kwargs'])
  128 +
  129 +
  130 +class AppDeployer(Cleanable):
  131 + """
  132 + Enables custom deploy methods on nodes.
  133 + """
  134 + def __init__(self, **kwargs):
  135 + self.ident = None
  136 + self.app_project_dir = kwargs.get('app_project_dir', '.')
  137 + self.mark_file_path = kwargs.get(
  138 + 'mark_file_path', TMP_DIR + '/cd-app-deployed'
  139 + )
  140 + log.debug(
  141 + '[%s] app_project_dir: %s',
  142 + self.__class__.__name__,
  143 + self.app_project_dir
  144 + )
  145 +
  146 +
  147 + def deploy(self, ident=None):
  148 + self.ident = ident
  149 +
  150 + if self.ident:
  151 + log.debug(
  152 + '%s idempotent deploy with id: %s',
  153 + self.__class__.__name__, self.ident
  154 + )
  155 + mark_file = self.mark_file_path + '_' + self.ident
  156 +
  157 + if os.path.isfile(mark_file):
  158 + log.debug('Deploy mark file found: %s', mark_file)
  159 + log.info(
  160 + 'Skipping deploy. Existing deploy mark found for %s',
  161 + self.ident
  162 + )
  163 + else:
  164 + log.debug('%s deploying now', self.__class__.__name__)
  165 + if self._do_deploy():
  166 + log.debug('Writing deploy mark file to: %s', mark_file)
  167 + open(mark_file, 'w').close()
  168 + else:
  169 + log.debug('%s non-idempotent deploy', self.__class__.__name__)
  170 + self._do_deploy()
  171 +
  172 +
  173 + def _do_deploy(self):
  174 + """
  175 + Please override this implementation in the subclass.
  176 + Return True if successful.
  177 + """
  178 + return True
  179 +
  180 +
  181 + def _get_removable_files(self):
  182 + if self.ident:
  183 + return self.mark_file_path + '_' + self.ident
  184 + return []
  185 +
  186 +
  187 +class TestDeployer(AppDeployer):
  188 + """
  189 + A test deployer that does nothing
  190 + """
  191 + def __init__(self, **kwargs):
  192 + self.ssh = kwargs.get('ssh')
  193 + self.target_hostname = kwargs.get('target_hostname', 'test_target')
  194 + super(TestDeployer, self).__init__(**kwargs)
  195 +
  196 +
  197 + def _do_deploy(self):
  198 + if subprocess.call(['ssh', self.target_hostname, 'uname -a']) != 0:
  199 + raise RuntimeError('Unable to uname -a')
  200 + return True
  201 +
  202 +
  203 +class CapDeployer(AppDeployer):
  204 + """
  205 + Customize deploy as Capistrano for Ruby-on-Rails.
  206 + """
  207 + def __init__(self, **kwargs):
  208 + self.ssh = kwargs.get('ssh')
  209 + self.cap_env = kwargs.get('cap_env', 'production')
  210 + log.debug('[%s] env: %s', self.__class__.__name__, self.cap_env)
  211 +
  212 + super(CapDeployer, self).__init__(**kwargs)
  213 +
  214 +
  215 + def _do_deploy(self):
  216 + with SSHAgentEnv([self.ssh.id_rsa_file_path]) as senv:
  217 + com = ['bundle', 'exec', 'cap', self.cap_env]
  218 +
  219 + com_1 = com + ['linked_files:upload_files']
  220 + log.debug('Working dir: %s', self.app_project_dir)
  221 + log.debug('Executing command: %s', ' '.join(com_1))
  222 + if subprocess.call(com_1, cwd=self.app_project_dir, env=senv) != 0:
  223 + log.error(
  224 + '%s non-zero return value on: %s',
  225 + self.__class__.__name__, ' '.join(com_1)
  226 + )
  227 + raise RuntimeError('Unable to upload linked_files')
  228 +
  229 + com_2 = com + ['deploy']
  230 + log.debug('Working dir: %s', self.app_project_dir)
  231 + log.debug('Executing command: %s', ' '.join(com_2))
  232 + if subprocess.call(com_2, cwd=self.app_project_dir, env=senv) != 0:
  233 + log.error(
  234 + '%s non-zero return value on: %s',
  235 + self.__class__.__name__, ' '.join(com_2)
  236 + )
  237 + raise RuntimeError('Unable to deploy')
  238 + return True
  239 +
  240 +
  241 +class SSHAgentEnv(object):
  242 + """
  243 + Takes care of deleting and renewing the SSH agent process.
  244 + Use this when doing some SSH-agent-dependent RPC on a remote IP.
  245 + """
  246 + def __init__(self, keys):
  247 + self.keys = keys
  248 + self.environ = None
  249 +
  250 +
  251 + def __enter__(self):
  252 + self.environ = os.environ.copy()
  253 + subprocess.call(['pkill', 'ssh-agent'])
  254 +
  255 + try:
  256 + stro = subprocess.check_output(['ssh-agent', '-s'])
  257 + except subprocess.CalledProcessError as e:
  258 + log.exception('Error occurred while doing command: ssh-agent -s')
  259 + raise RuntimeError(
  260 + 'ssh-agent -s error: {0}: {1}'.format(e.returncode, e.output)
  261 + )
  262 +
  263 + for name, val in re.findall(r'([A-Z_]+)=([^;]+);', stro):
  264 + self.environ[name] = val
  265 + for key in self.keys:
  266 + subprocess.call(['ssh-add', key], env=self.environ)
  267 +
  268 + return self.environ
  269 +
  270 +
  271 + def __exit__(self, type, value, traceback):
  272 + subprocess.call(['ssh-agent', '-k'], env=self.environ)
  273 +
  274 +
  275 +class SSHInfo(object):
  276 + """
  277 + Contains information about current SSH config setup.
  278 + Also contains methods for configuring/reconfiguring user SSH client
  279 + for use when new remote nodes are created.
  280 + """
  281 + def __init__(self, **kwargs):
  282 + config_dir = kwargs.get('config_dir', os.path.expanduser('~/.ssh'))
  283 + log.debug(
  284 + '[%s] config_dir: %s',
  285 + self.__class__.__name__, config_dir
  286 + )
  287 +
  288 + # Some default SSH client values
  289 + self.port = kwargs.get('port', 22)
  290 + self.wait_timeout = kwargs.get('wait_timeout', 60)
  291 + self.wait_interval = kwargs.get('wait_interval', 5)
  292 + self.wait_gap = kwargs.get('wait_gap', 3)
  293 + log.debug(
  294 + '[%s] port: %i',
  295 + self.__class__.__name__, self.port
  296 + )
  297 + log.debug(
  298 + '[%s] wait_timeout: %i',
  299 + self.__class__.__name__, self.wait_timeout
  300 + )
  301 + log.debug(
  302 + '[%s] wait_interval: %i',
  303 + self.__class__.__name__, self.wait_interval
  304 + )
  305 + log.debug(
  306 + '[%s] wait_gap: %i',
  307 + self.__class__.__name__, self.wait_gap
  308 + )
  309 +
  310 + # Client SSH files location
  311 + self.config_file_path = kwargs.get(
  312 + 'config_file_path', config_dir + '/config'
  313 + )
  314 + self.config_template_file_path = kwargs.get(
  315 + 'config_template_file_path', config_dir + '/config.template'
  316 + )
  317 + self.id_rsa_file_path = kwargs.get(
  318 + 'id_rsa_file_path', config_dir + '/id_rsa'
  319 + )
  320 + self.known_hosts_file_path = kwargs.get(
  321 + 'known_hosts_file_path', config_dir + '/known_hosts'
  322 + )
  323 + log.debug(
  324 + '[%s] config_file_path: %s',
  325 + self.__class__.__name__, self.config_file_path
  326 + )
  327 + log.debug(
  328 + '[%s] config_template_file_path: %s',
  329 + self.__class__.__name__, self.config_template_file_path
  330 + )
  331 + log.debug(
  332 + '[%s] id_rsa_file_path: %s',
  333 + self.__class__.__name__, self.id_rsa_file_path
  334 + )
  335 + log.debug(
  336 + '[%s] known_hosts_file_path: %s',
  337 + self.__class__.__name__, self.known_hosts_file_path
  338 + )
  339 +
  340 + # Used for the SSH config template (def: ~/.ssh/config.template)
  341 + self.host_keyword = kwargs.get('host_keyword', SSH_KEYWORD_HOST)
  342 + self.port_keyword = kwargs.get('port_keyword', SSH_KEYWORD_PORT)
  343 + self.comment_keyword = kwargs.get('comment_keyword', SSH_KEYWORD_COMMENT)
  344 + log.debug(
  345 + '[%s] host_keyword: %s',
  346 + self.__class__.__name__, self.host_keyword
  347 + )
  348 + log.debug(
  349 + '[%s] port_keyword: %s',
  350 + self.__class__.__name__, self.port_keyword
  351 + )
  352 + log.debug(
  353 + '[%s] comment_keyword: %s',
  354 + self.__class__.__name__, self.comment_keyword
  355 + )
  356 +
  357 +
  358 + def wait_for_ssh(self, ip_address):
  359 + """
  360 + Wait for SSH port on IP address to be open and accessible.
  361 + """
  362 + try:
  363 + with timeout(self.wait_timeout, exception=SystemError):
  364 + while True:
  365 + try:
  366 + log.info('Trying SSH to %s', ip_address)
  367 + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
  368 + with timeout(self.wait_interval, exception=RuntimeError):
  369 + s.connect((ip_address, self.port))
  370 + break;
  371 + except RuntimeError:
  372 + log.debug(
  373 + '(Socket timeout: %i second/s)',
  374 + self.wait_interval
  375 + )
  376 + pass
  377 + except socket.error as e:
  378 + log.debug('(Socket error: %s)', e)
  379 + pass
  380 + finally:
  381 + s.close()
  382 + log.debug('Waiting for %i second/s', self.wait_gap)
  383 + time.sleep(self.wait_gap)
  384 +
  385 + except SystemError:
  386 + log.exception('SSH timeout error: %i second/s', self.wait_timeout)
  387 + raise
  388 +
  389 +
  390 + def generate_ssh_config(self, ip_address, comment='node deploy'):
  391 + """
  392 + Add public RSA keys of IP address to current known hosts.
  393 + Generate new .ssh/config file.
  394 + """
  395 + if os.path.exists(self.known_hosts_file_path):
  396 + log.debug(
  397 + 'Removing %s from %s',
  398 + ip_address, self.known_hosts_file_path
  399 + )
  400 + subprocess.call([
  401 + 'ssh-keygen', '-R', ip_address,
  402 + '-f', self.known_hosts_file_path
  403 + ])
  404 + with open(self.known_hosts_file_path, 'a') as f:
  405 + log.debug(
  406 + 'Adding %s to %s',
  407 + ip_address, self.known_hosts_file_path
  408 + )
  409 + subprocess.call(['ssh-keyscan', '-H', ip_address], stdout=f)
  410 +
  411 + log.debug(
  412 + 'Edit %s to pipe. Replace "%s" with "%s".',
  413 + self.config_template_file_path,
  414 + self.comment_keyword, comment
  415 + )
  416 + sedcomment = subprocess.Popen(
  417 + [
  418 + 'sed',
  419 + 's/{0}/{1}/'.format(self.comment_keyword, comment),
  420 + self.config_template_file_path
  421 + ],
  422 + stdout=subprocess.PIPE
  423 + )
  424 + log.debug(
  425 + 'Edit %s to pipe. Replace "%s" with "%s".',
  426 + self.config_template_file_path,
  427 + self.host_keyword, ip_address
  428 + )
  429 + sedhost = subprocess.Popen(
  430 + ['sed', 's/{0}/{1}/'.format(self.host_keyword, ip_address)],
  431 + stdin=sedcomment.stdout,
  432 + stdout=subprocess.PIPE
  433 + )
  434 + log.debug(
  435 + 'Edit %s to %s. Replace "%s" with "%s".',
  436 + self.config_template_file_path,
  437 + self.config_file_path,
  438 + self.port_keyword, self.port
  439 + )
  440 + with open(self.config_file_path, 'w') as f:
  441 + subprocess.call(
  442 + ['sed', 's/{0}/{1}/'.format(self.port_keyword, self.port)],
  443 + stdin=sedhost.stdout,
  444 + stdout=f
  445 + )
  446 + log.debug('Waiting on OS sed pipes')
  447 + sedcomment.wait()
  448 + sedhost.wait()
  449 +
  450 +
  451 +class AWSDefault(object):
  452 +
  453 + def __init__(self, **kwargs):
  454 + self.ident = kwargs.get('Ident', '')
  455 + self.profile_name = kwargs.get('ProfileName', None)
  456 + self.region = kwargs.get('Region', DEFAULT_REGION)
  457 + log.debug(
  458 + '[%s] profile_name: %s',
  459 + self.__class__.__name__, self.profile_name
  460 + )
  461 + log.debug(
  462 + '[%s] region: %s',
  463 + self.__class__.__name__, self.region
  464 + )
  465 +
  466 + self.sess = boto3.session.Session(
  467 + region_name=self.region, profile_name=self.profile_name
  468 + )
  469 + self.ec2 = self.sess.resource('ec2')
  470 + self.asg = self.sess.client('autoscaling')
  471 + self.ecc = self.sess.client('ec2')
  472 +
  473 +
  474 + def _suff_id(self, string):
  475 + """
  476 + Just suffix ident to a string if ident is not an empty string.
  477 + """
  478 + if len(self.ident) > 0:
  479 + return string + '_' + self.ident
  480 + else:
  481 + return string
  482 +
  483 +
  484 +class AppNode(AWSDefault, Cleanable):
  485 + """
  486 + The application server node. Plus the AMI, snapshot,
  487 + and Launch Templates involved.
  488 + """
  489 + def __init__(self, **kwargs):
  490 + super(AppNode, self).__init__(**kwargs)
  491 + log.debug('Now using %s ident: %s',
  492 + self.__class__.__name__, self.ident
  493 + )
  494 + self.instance_id = None
  495 + self.ami_id = None
  496 + self.snapshot_id = None
  497 +
  498 + # Mandatory args
  499 + self.subnet = kwargs.get('Subnet')
  500 + self.launch_template_id = kwargs.get('LaunchTemplateId')
  501 + log.debug(
  502 + '[%s] subnet: %s',
  503 + self.__class__.__name__, self.subnet
  504 + )
  505 + log.debug(
  506 + '[%s] launch_template_id: %s',
  507 + self.__class__.__name__, self.launch_template_id
  508 + )
  509 +
  510 + # Optional args (with defaults)
  511 + self.launch_template_version = kwargs.get(
  512 + 'LaunchTemplateVersion', DEFAULT_VER
  513 + )
  514 + self.ami_name = kwargs.get('AmiName', 'code_deploy')
  515 + self.ami_desc = kwargs.get(
  516 + 'AmiDesc', 'Generated by cloud_deploy script'
  517 + )
  518 + self.ami_wait_timeout = kwargs.get('AmiWaitTimeout', 300)
  519 + self.ami_wait_interval = kwargs.get('AmiWaitInterval', 7)
  520 + log.debug(
  521 + '[%s] launch_template_version: %s',
  522 + self.__class__.__name__, self.launch_template_version
  523 + )
  524 + log.debug(
  525 + '[%s] ami_name: %s',
  526 + self.__class__.__name__, self.ami_name
  527 + )
  528 + log.debug(
  529 + '[%s] ami_desc: %s',
  530 + self.__class__.__name__, self.ami_desc
  531 + )
  532 + log.debug(
  533 + '[%s] ami_wait_timeout: %i',
  534 + self.__class__.__name__, self.ami_wait_timeout
  535 + )
  536 + log.debug(
  537 + '[%s] ami_wait_interval: %i',
  538 + self.__class__.__name__, self.ami_wait_interval
  539 + )
  540 +
  541 + self.instance_id_file_path = kwargs.get(
  542 + 'InstanceIdFilePath',
  543 + self._suff_id(TMP_DIR + '/cd-instance-id')
  544 + )
  545 + self.ami_id_file_path = kwargs.get(
  546 + 'AmiIdFilePath',
  547 + self._suff_id(TMP_DIR + '/cd-ami-id')
  548 + )
  549 + self.target_lt_base_file_path = kwargs.get(
  550 + 'TargetLTBaseFilePath',
  551 + self._suff_id(TMP_DIR + '/cd-target-lt')
  552 + )
  553 +
  554 +
  555 + def launch_ec2(self):
  556 + """
  557 + Launch an EC2 instance from the given Launch Template and
  558 + return once the instance is at "running" state.
  559 + If there's already an instance running, use that instead.
  560 +
  561 + Populates self.instance_id.
  562 + """
  563 + try:
  564 + with open(self.instance_id_file_path) as f:
  565 + log.debug('EC2 id file found: %s', self.instance_id_file_path)
  566 + log.info('Existing EC2 found')
  567 + self.instance_id = f.read().strip()
  568 +
  569 + except IOError:
  570 + try:
  571 + log.info('Launching new EC2 from template %s version %s',
  572 + self.launch_template_id,
  573 + self.launch_template_version
  574 + )
  575 + insts = self.ec2.create_instances(
  576 + MaxCount=1, MinCount=1, SubnetId=self.subnet,
  577 + LaunchTemplate={
  578 + 'LaunchTemplateId': self.launch_template_id,
  579 + 'Version': self.launch_template_version
  580 + }
  581 + )
  582 + self.instance_id = insts[0].id
  583 + log.debug(
  584 + 'Writing EC2 id to file: %s',
  585 + self.instance_id_file_path
  586 + )
  587 + with open(self.instance_id_file_path, 'w') as f:
  588 + f.write(self.instance_id)
  589 +
  590 + except ClientError:
  591 + log.exception('Error launching EC2')
  592 + raise
  593 +
  594 + log.info('EC2 instance id: %s. Waiting to run.', self.instance_id)
  595 + self.ec2.Instance(self.instance_id).wait_until_running()
  596 +
  597 +
  598 + def generate_ami(self):
  599 + """
  600 + Create an AMI of self.instance_id, and tag both AMI and its snapshot.
  601 +
  602 + Populates self.ami_id and self.snapshot_id.
  603 + """
  604 + self._create_image()
  605 + self._tag_ami_snaps()
  606 +
  607 +
  608 + def _create_image(self):
  609 + """
  610 + Create an AMI from deployed node.
  611 + return once the AMI has been fully built.
  612 + If there's already an existing AMI, use that instead.
  613 + """
  614 + if self.instance_id is None:
  615 + log.error('No EC2 instance_id found')
  616 + raise RuntimeError('No EC2 instance found')
  617 +
  618 + try:
  619 + with open(self.ami_id_file_path) as f:
  620 + log.debug('AMI id file found: %s', self.ami_id_file_path)
  621 + log.info('Existing AMI found')
  622 + self.ami_id = f.read().strip()
  623 +
  624 + except IOError:
  625 + try:
  626 + log.info('Creating AMI from instance %s', self.instance_id)
  627 + i = self.ec2.Instance(self.instance_id)
  628 + image = i.create_image(
  629 + BlockDeviceMappings=[self._get_block_device_mapping()],
  630 + Description=self.ami_desc,
  631 + Name=self._suff_id(self.ami_name)
  632 + )
  633 +
  634 + self.ami_id = image.id
  635 + log.debug('Writing AMI id to file: %s', self.ami_id_file_path)
  636 + with open(self.ami_id_file_path, 'w') as f:
  637 + f.write(self.ami_id)
  638 +
  639 + except ClientError:
  640 + log.exception('Error creating AMI')
  641 + raise
  642 +
  643 + log.info('Image id: %s. Waiting to exist.', self.ami_id)
  644 + self.ec2.Image(self.ami_id).wait_until_exists()
  645 + try:
  646 + with timeout(self.ami_wait_timeout, exception=RuntimeError):
  647 + while True:
  648 + log.info('Waiting for AMI to be available')
  649 + ami_state = self.ec2.Image(self.ami_id).state
  650 + if ami_state == 'available': break
  651 + log.debug('AMI %s state: %s', self.ami_id, ami_state)
  652 + log.debug('Waiting for %i second/s', self.ami_wait_interval)
  653 + time.sleep(self.ami_wait_interval)
  654 + except RuntimeError:
  655 + log.exception(
  656 + 'AMI completion timeout error: %i seconds/s',
  657 + self.ami_wait_timeout
  658 + )
  659 + raise
  660 +
  661 + self.snapshot_id = (
  662 + self.ec2.Image(self.ami_id)
  663 + .block_device_mappings[0]['Ebs']['SnapshotId']
  664 + )
  665 + log.info('Obtained snapshot id: %s', self.snapshot_id)
  666 +
  667 +
  668 + def _get_block_device_mapping(self):
  669 + i = self.ec2.Instance(self.instance_id)
  670 + ebs = i.block_device_mappings[0]['Ebs']
  671 + vol = self.ec2.Volume(ebs['VolumeId'])
  672 +
  673 + log.debug(
  674 + '(BDMap) DeviceName: %s',
  675 + i.block_device_mappings[0]['DeviceName']
  676 + )
  677 + log.debug(
  678 + '(BDMap) Ebs.DeleteOnTermination: %s',
  679 + ebs['DeleteOnTermination']
  680 + )
  681 + log.debug('(BDMap) Ebs.VolumeSize: %s', vol.size)
  682 + log.debug('(BDMap) Ebs.VolumeType: %s', vol.volume_type)
  683 +
  684 + return {
  685 + 'DeviceName': i.block_device_mappings[0]['DeviceName'],
  686 + 'Ebs': {
  687 + 'DeleteOnTermination': ebs['DeleteOnTermination'],
  688 + 'VolumeSize': vol.size,
  689 + 'VolumeType': vol.volume_type
  690 + }
  691 + }
  692 +
  693 +
  694 + def _tag_ami_snaps(self):
  695 + """
  696 + Tag both snapshot and AMI with Name tags.
  697 + """
  698 + name_val = self._suff_id(self.ami_name)
  699 + tags = [{
  700 + 'Key': 'Name',
  701 + 'Value': name_val
  702 + }]
  703 +
  704 + log.debug('Tagging AMI with: Name => %s', name_val)
  705 + self.ec2.Image(self.ami_id).create_tags(Tags=tags)
  706 +
  707 + log.debug('Tagging snapshot with: Name => %s', name_val)
  708 + self.ec2.Snapshot(self.snapshot_id).create_tags(Tags=tags)
  709 +
  710 +
  711 + def create_new_lt_version(
  712 + self, lt_id, lt_source_ver=DEFAULT_VER, lt_desc='deployed'
  713 + ):
  714 + """
  715 + Create a new version for the target 'deployed' launch template
  716 + basing off the given source version. The new version
  717 + will now contain the new AMI of the previously deployed EC2 server.
  718 +
  719 + The new LT version will be made the $Default version.
  720 + """
  721 + new_ver = None
  722 + new_ver_file_path = self.target_lt_base_file_path + '_' + lt_id
  723 +
  724 + try:
  725 + with open(new_ver_file_path) as f:
  726 + log.debug('Version marker file found: %s', new_ver_file_path)
  727 + log.info('New version already created for LT: %s', lt_id)
  728 + new_ver = int(f.read().strip())
  729 +
  730 + except IOError:
  731 + log.debug(
  732 + 'Inserting snapshot id %s into block device mapping',
  733 + self.snapshot_id
  734 + )
  735 + bdmap = self._get_block_device_mapping()
  736 + bdmap['Ebs']['SnapshotId'] = self.snapshot_id
  737 + if len(self.ident) > 0:
  738 + lt_desc += ' ' + self.ident
  739 +
  740 + try:
  741 + log.info('Creating new version for LT: %s', lt_id)
  742 + res = self.ecc.create_launch_template_version(
  743 + LaunchTemplateId=lt_id,
  744 + SourceVersion=lt_source_ver,
  745 + VersionDescription=lt_desc,
  746 + LaunchTemplateData={
  747 + 'ImageId': self.ami_id,
  748 + 'BlockDeviceMappings': [bdmap]
  749 + }
  750 + )
  751 +
  752 + new_ver = res['LaunchTemplateVersion']['VersionNumber']
  753 + log.debug(
  754 + 'Writing new LT version to file: %s',
  755 + new_ver_file_path
  756 + )
  757 + with open(new_ver_file_path, 'w') as f:
  758 + f.write(str(new_ver))
  759 +
  760 + except ClientError:
  761 + log.exception('Error creating new launch template version')
  762 + raise
  763 +
  764 + log.info('New launch template version: %i', new_ver)
  765 +
  766 + try:
  767 + log.info('Updating designated %s version', DEFAULT_VER)
  768 + res = self.ecc.modify_launch_template(
  769 + LaunchTemplateId=lt_id,
  770 + DefaultVersion=str(new_ver)
  771 + )
  772 +
  773 + if new_ver != res['LaunchTemplate']['DefaultVersionNumber']:
  774 + raise RuntimeError('Client result mismatch')
  775 +
  776 + except ClientError:
  777 + log.exception('Error modifying launch template')
  778 + raise
  779 +
  780 +
  781 + def get_ip_address(self, address_type='private'):
  782 + if self.instance_id:
  783 + i = self.ec2.Instance(self.instance_id)
  784 + if address_type == 'private':
  785 + return i.private_ip_address
  786 + if address_type == 'public':
  787 + return i.public_ip_address
  788 + return None
  789 +
  790 +
  791 + def _get_removable_files(self):
  792 + return [
  793 + self.instance_id_file_path,
  794 + self.ami_id_file_path,
  795 + ] + glob.glob(self.target_lt_base_file_path + '_*')
  796 +
  797 +
  798 + def _get_cleanup_funs(self):
  799 + return [
  800 + {
  801 + 'fun': log.info,
  802 + 'args': ['Terminating EC2 instance id: %s', self.instance_id],
  803 + 'kwargs': {},
  804 + },
  805 + {
  806 + 'fun': self.ec2.Instance(self.instance_id).terminate,
  807 + 'args': [],
  808 + 'kwargs': {},
  809 + },
  810 + ]
  811 +
  812 +
  813 +class ASGroup(AWSDefault, Cleanable):
  814 + """
  815 + The AWS Autoscaling Group. This class contains the
  816 + methods to perform and automated rolling termination deploy.
  817 + """
  818 + def __init__(self, **kwargs):
  819 + super(ASGroup, self).__init__(**kwargs)
  820 + log.debug('Now using %s ident: %s',
  821 + self.__class__.__name__, self.ident
  822 + )
  823 +
  824 + # Optional args (with defaults)
  825 + self.wait_interval = kwargs.get('WaitInterval', 5)
  826 + log.debug(
  827 + '[%s] wait_interval: %i',
  828 + self.__class__.__name__, self.wait_interval
  829 + )
  830 +
  831 + self.props_file_path = kwargs.get(
  832 + 'PropsFilePath',
  833 + self._suff_id(TMP_DIR + '/cd-asg-props')
  834 + )
  835 + self.ins_file_path = kwargs.get(
  836 + 'InstancesFilePath',
  837 + self._suff_id(TMP_DIR + '/cd-asg-instances')
  838 + )
  839 + self.mark_file_path = kwargs.get(
  840 + 'MarkFilePath',
  841 + self._suff_id(TMP_DIR + '/cd-asg-deployed')
  842 + )
  843 +
  844 +
  845 + def get_asg(self, asg_name):
  846 + """
  847 + Get the details of an autoscaling group.
  848 + """
  849 + try:
  850 + log.debug('Describe ASG details for %s', asg_name)
  851 + res = self.asg.describe_auto_scaling_groups(
  852 + AutoScalingGroupNames=[asg_name]
  853 + )
  854 + except ClientError:
  855 + log.exception('Error describing ASG %s', asg_name)
  856 + raise
  857 + asgs = res['AutoScalingGroups']
  858 +
  859 + if len(asgs) != 1:
  860 + log.error(
  861 + 'Number of results found for ASG %s: %i',
  862 + asg_name, len(asgs)
  863 + )
  864 + raise RuntimeError('Either ASG not found or found too many')
  865 +
  866 + return asgs[0]
  867 +
  868 +
  869 + def get_ips_by_asg(self, asg_name, address_type='private'):
  870 + """
  871 + Get the IP addresses of the instances currently under an ASG.
  872 + """
  873 + asg = self.get_asg(asg_name)
  874 + ips = []
  875 +
  876 + for i in asg['Instances']:
  877 + ins = self.ec2.Instance(i['InstanceId'])
  878 + if address_type == 'private':
  879 + ips.append(ins.private_ip_address)
  880 + elif address_type == 'public':
  881 + ips.append(ins.public_ip_address)
  882 + return ips
  883 +
  884 +
  885 + def rolling_termination_deploy(self, asg_name):
  886 + """
  887 + Perform a rolling termination deploy for the ASG.
  888 + This process makes sure there is no downtime when
  889 + it comes to "InService" instances.
  890 + """
  891 + asg = self.get_asg(asg_name)
  892 +
  893 + if asg['DesiredCapacity'] == 0:
  894 + log.info('ASG DesiredCapacity is 0. Rolling deploy not needed.')
  895 + return True
  896 +
  897 + # Data-gathering
  898 + minsize, maxsize, dessize, grace_prd = self._get_original_props(asg)
  899 + defunct_ins = self._get_defunct_instances(asg)
  900 +
  901 + # In case we are resuming from a previous
  902 + # interrupted operation
  903 + all_ins = [i['InstanceId'] for i in asg['Instances']]
  904 + waiting_ins = list(set(all_ins) - set(defunct_ins))
  905 + if len(waiting_ins) > 0:
  906 + log.info(
  907 + 'Pending instances before deployment:\n %s',
  908 + '\n '.join(waiting_ins)
  909 + )
  910 + self._wait_for_instances(waiting_ins, grace_prd)
  911 +
  912 + # Adjust ASG sizes if they are too low
  913 + if asg['MinSize'] < ASG_MIN_MINSIZE:
  914 + if asg['MaxSize'] < ASG_MIN_MAXSIZE:
  915 + self._increase_to_floor_size(asg_name, increase_max=True)
  916 + else:
  917 + self._increase_to_floor_size(asg_name)
  918 +
  919 + log.info('ASG size adjusted. Waiting for autoscaling events.')
  920 + self._wait_for_instances(
  921 + self._get_pending_instances(asg_name, grace_prd),
  922 + grace_prd
  923 + )
  924 +
  925 + # Actual rolling termination deployment.
  926 + # But instead of termination, the instances are simply detached.
  927 + mark_file = self.mark_file_path + '_' + asg_name
  928 + try:
  929 + with open(mark_file) as f:
  930 + log.debug('ASG deploy mark file found: %s', mark_file)
  931 + stamp = f.read().strip()
  932 + log.info(
  933 + 'Rolling deploy for ASG %s (%s) already completed on %s',
  934 + asg_name, self.ident, stamp
  935 + )
  936 +
  937 + except IOError:
  938 + for i in defunct_ins:
  939 + is_attached = False
  940 + is_detach_ok = False
  941 + log.info('Removing old instance %s', i)
  942 +
  943 + try:
  944 + log.debug('Lookup instance %s', i)
  945 + res = self.asg.describe_auto_scaling_instances(
  946 + InstanceIds=[i]
  947 + )
  948 + if len(res['AutoScalingInstances']) == 1:
  949 + is_attached = True
  950 + except ClientError as e:
  951 + log.warning('Error on instance %s lookup: %s', i, e)
  952 +
  953 + if is_attached:
  954 + try:
  955 + log.debug('Detach instance %s', i)
  956 + self.asg.detach_instances(
  957 + AutoScalingGroupName=asg_name,
  958 + InstanceIds=[i],
  959 + ShouldDecrementDesiredCapacity=False
  960 + )
  961 + is_detach_ok = True
  962 + except ClientError as e:
  963 + log.warning('Error detaching instance %s: %s', i, e)
  964 +
  965 + if is_detach_ok:
  966 + log.info('Waiting for replacement')
  967 + self._wait_for_instances(
  968 + self._get_pending_instances(asg_name, grace_prd),
  969 + grace_prd
  970 + )
  971 +
  972 + # Terminate all defunct instances at once
  973 + for i in defunct_ins:
  974 + try:
  975 + log.debug('Terminate instance %s', i)
  976 + self.ec2.Instance(i).terminate()
  977 + except ClientError as e:
  978 + log.warning('Error terminating instance %s: %s', i, e)
  979 +
  980 + # ASG deploy idempotency based on asg's name and deploy ident
  981 + stamp = datetime.now(pytz.utc).strftime('%Y-%b-%d %I:%M%p %Z')
  982 + log.info('%s - ASG %s rolling deploy done', stamp, asg_name)
  983 + log.debug('Marking ASG deploy as done: %s', mark_file)
  984 + with open(mark_file, 'w') as f:
  985 + f.write(stamp)
  986 +
  987 + # Get penultimate state of ASG data
  988 + minsize_new = None
  989 + try:
  990 + log.debug('Describe ASG details for %s', asg_name)
  991 + res = self.asg.describe_auto_scaling_groups(
  992 + AutoScalingGroupNames=[asg_name]
  993 + )
  994 + minsize_new = res['AutoScalingGroups'][0]['MinSize']
  995 + except ClientError:
  996 + log.exception('Error describing ASG %s', asg_name)
  997 + raise
  998 +
  999 + # Restore ASG sizes if they were modified
  1000 + if minsize != minsize_new:
  1001 + self._set_asg_size(asg_name, minsize, maxsize, dessize)
  1002 +
  1003 +
  1004 + def _get_defunct_instances(self, asg):
  1005 + """
  1006 + Get all currently healthy instances under an ASG.
  1007 + Presumably, they will be terminated later on as part
  1008 + of the rolling deploy.
  1009 +
  1010 + If instances have already been determined in this way
  1011 + through a previous operation, do not do it again.
  1012 + """
  1013 + asg_name = asg['AutoScalingGroupName']
  1014 + instances = asg['Instances']
  1015 + ins_file = self.ins_file_path + '_' + asg_name
  1016 +
  1017 + try:
  1018 + with open(ins_file) as f:
  1019 + log.debug('ASG instances file found: %s', ins_file)
  1020 + log.info(
  1021 + 'Defunct ASG instances already obtained: %s', asg_name
  1022 + )
  1023 + defunct_ins = [s.strip() for s in f.readlines()]
  1024 +
  1025 + except IOError:
  1026 + defunct_ins = []
  1027 + for i in instances:
  1028 + if ( i['HealthStatus'] == 'Healthy' and
  1029 + i['LifecycleState'] == 'InService' ):
  1030 + defunct_ins.append(i['InstanceId'])
  1031 +
  1032 + log.debug('Writing defunct ids to file: %s', ins_file)
  1033 + with open(ins_file, 'w') as f:
  1034 + f.writelines([s + '\n' for s in defunct_ins])
  1035 +
  1036 + if len(defunct_ins) > 0:
  1037 + log.info(
  1038 + 'Instances marked for removal:\n %s',
  1039 + '\n '.join(defunct_ins)
  1040 + )
  1041 +
  1042 + return defunct_ins
  1043 +
  1044 +
  1045 + def _get_original_props(self, asg):
  1046 + """
  1047 + Obtain the original MinSize, MaxSize, and DesiredCapacity of ASG.
  1048 + Also gets the ASG's HealthCheckGracePeriod.
  1049 + """
  1050 + asg_name = asg['AutoScalingGroupName']
  1051 + props_file = self.props_file_path + '_' + asg_name
  1052 +
  1053 + try:
  1054 + with open(props_file) as f:
  1055 + log.debug('ASG properties file found: %s', props_file)
  1056 + log.info('ASG sizes already obtained: %s', asg_name)
  1057 + for prop in f.readlines():
  1058 + x = prop.strip().split('=')
  1059 + if x[0] == 'MinSize': minsize = int(x[1])
  1060 + if x[0] == 'MaxSize': maxsize = int(x[1])
  1061 + if x[0] == 'DesiredCapacity': dessize = int(x[1])
  1062 +
  1063 + except IOError:
  1064 + minsize = asg['MinSize']
  1065 + maxsize = asg['MaxSize']
  1066 + dessize = asg['DesiredCapacity']
  1067 +
  1068 + log.debug(
  1069 + 'Writing original ASG properties to file: %s', props_file
  1070 + )
  1071 + props = [
  1072 + 'MinSize={}'.format(minsize),
  1073 + 'MaxSize={}'.format(maxsize),
  1074 + 'DesiredCapacity={}'.format(dessize),
  1075 + ]
  1076 + with open(props_file, 'w') as f:
  1077 + f.writelines([prop + '\n' for prop in props])
  1078 +
  1079 + log.info(
  1080 + 'ASG %s original sizes:\n' +
  1081 + ' MinSize: %i\n MaxSize: %i\n DesiredCapacity: %i',
  1082 + asg_name, minsize, maxsize, dessize
  1083 + )
  1084 +
  1085 + grace_prd = DEFAULT_ASG_WAIT
  1086 + if asg['HealthCheckGracePeriod'] > 0:
  1087 + grace_prd = asg['HealthCheckGracePeriod']
  1088 + log.info('Internal wait timeout: %i', grace_prd)
  1089 +
  1090 + return minsize, maxsize, dessize, grace_prd
  1091 +
  1092 +
  1093 + def _get_pending_instances(self, asg_name, timelimit):
  1094 + """
  1095 + Poll the ASG for any instances that are in a 'pending'
  1096 + run state, indicating that they have just been created.
  1097 +
  1098 + Return as soon as 1 or more pending instances are found in a poll.
  1099 +
  1100 + Throw an exception if timeout is reached and no
  1101 + pending instances are found.
  1102 + """
  1103 + iids = []
  1104 + try:
  1105 + with timeout(timelimit, exception=RuntimeError):
  1106 + while len(iids) == 0:
  1107 + log.info('Polling for new ASG instances')
  1108 + try:
  1109 + log.debug('Describe ASG details for %s', asg_name)
  1110 + res = self.asg.describe_auto_scaling_groups(
  1111 + AutoScalingGroupNames=[asg_name]
  1112 + )
  1113 +
  1114 + for i in res['AutoScalingGroups'][0]['Instances']:
  1115 + ins = self.ec2.Instance(i['InstanceId'])
  1116 + if ins.state['Code'] == STATE_PENDING:
  1117 + iids.append(i['InstanceId'])
  1118 +
  1119 + except ClientError as e:
  1120 + log.warning('Error describing ASG %s: %s', asg_name, e)
  1121 +
  1122 + if len(iids) == 0:
  1123 + log.debug('Waiting %i second/s', self.wait_interval)
  1124 + time.sleep(self.wait_interval)
  1125 +
  1126 + except RuntimeError:
  1127 + log.exception(
  1128 + '[%s] Timeout waiting for new instances: %i second/s',
  1129 + self.__class__.__name__, timelimit
  1130 + )
  1131 + raise
  1132 +
  1133 + return iids
  1134 +
  1135 +
  1136 + def _wait_for_instances(self, iids, timelimit):
  1137 + """
  1138 + Wait for instances to be available and InService under an ASG.
  1139 + Time out after a certain period of time.
  1140 + """
  1141 + for iid in iids:
  1142 + log.info('Waiting for instance %s to be running', iid)
  1143 + i = self.ec2.Instance(iid)
  1144 + i.wait_until_running()
  1145 +
  1146 + try:
  1147 + with timeout(timelimit, exception=RuntimeError):
  1148 + while len(iids) > 0:
  1149 + try:
  1150 + log.debug('Describe ASG instances')
  1151 + res = self.asg.describe_auto_scaling_instances(
  1152 + InstanceIds=iids
  1153 + )
  1154 + for i in res['AutoScalingInstances']:
  1155 + if ( i['HealthStatus'] == 'HEALTHY' and
  1156 + i['LifecycleState'] == 'InService' ):
  1157 + iids.remove(i['InstanceId'])
  1158 + else:
  1159 + log.info(
  1160 + 'Waiting for instance %s to be InService',
  1161 + i['InstanceId']
  1162 + )
  1163 +
  1164 + except ClientError as e:
  1165 + log.warning(
  1166 + 'Error enumerating ASG instances: %s', e
  1167 + )
  1168 +
  1169 + if len(iids) > 0:
  1170 + log.debug('Waiting %i second/s', self.wait_interval)
  1171 + time.sleep(self.wait_interval)
  1172 +
  1173 + except RuntimeError:
  1174 + log.exception(
  1175 + '[%s] Timeout waiting for instances: %i second/s',
  1176 + self.__class__.__name__, timelimit
  1177 + )
  1178 + raise
  1179 +
  1180 +
  1181 + def _increase_to_floor_size(self, asg_name, increase_max=False):
  1182 + """
  1183 + Increase the MinSize and MaxSize to minimum values for rolling deploy.
  1184 + """
  1185 + args = {
  1186 + 'AutoScalingGroupName': asg_name,
  1187 + 'MinSize': ASG_MIN_MINSIZE,
  1188 + }
  1189 + log.info(
  1190 + 'Temporarily increasing ASG %s MinSize to %i',
  1191 + asg_name, ASG_MIN_MINSIZE
  1192 + )
  1193 + if increase_max:
  1194 + args['MaxSize'] = ASG_MIN_MAXSIZE
  1195 + log.info(
  1196 + 'Temporarily increasing ASG %s MaxSize to %i',
  1197 + asg_name, ASG_MIN_MAXSIZE
  1198 + )
  1199 + try:
  1200 + self.asg.update_auto_scaling_group(**args)
  1201 + except ClientError:
  1202 + log.exception('Error updating ASG settings')
  1203 + raise
  1204 +
  1205 +
  1206 + def _set_asg_size(self, asg_name, minsize, maxsize, dessize):
  1207 + """
  1208 + Just set ASG's MinSize and MaxSize.
  1209 + """
  1210 + log.info(
  1211 + 'Setting ASG %s MinSize (%i), MaxSize (%i), DesiredCapacity (%i)',
  1212 + asg_name, minsize, maxsize, dessize
  1213 + )
  1214 + try:
  1215 + self.asg.update_auto_scaling_group(
  1216 + AutoScalingGroupName=asg_name,
  1217 + MinSize=minsize,
  1218 + MaxSize=maxsize,
  1219 + DesiredCapacity=dessize
  1220 + )
  1221 + except ClientError:
  1222 + log.exception('Error updating ASG settings')
  1223 + raise
  1224 +
  1225 +
  1226 + def _get_removable_files(self):
  1227 + return glob.glob(self.props_file_path + '_*') \
  1228 + + glob.glob(self.ins_file_path + '_*') \
  1229 + + glob.glob(self.mark_file_path + '_*')
  1230 +
  1231 +
  1232 +def main():
  1233 + pr = argparse.ArgumentParser(
  1234 + description=get_desc(), formatter_class=argparse.RawTextHelpFormatter
  1235 + )
  1236 +
  1237 + ga = pr.add_argument_group('Actions')
  1238 + ga.add_argument(
  1239 + '-V', '--version', action='version',
  1240 + version='Cloud deploy v{}'.format(CDEP_VERSION)
  1241 + )
  1242 + ga.add_argument(
  1243 + '--full-deploy', action='store_true',
  1244 + help='Execute app deployment process. This is the default action.'
  1245 + )
  1246 + ga.add_argument(
  1247 + '--rolling-termination', action='store_true',
  1248 + help='Skip app deploy. Execute only rolling termination on ASGs.'
  1249 + )
  1250 + ga.add_argument(
  1251 + '--get-asg-ips', action='store_true',
  1252 + help='Get current IP addresses by ASG and exit'
  1253 + )
  1254 +
  1255 + gm = pr.add_argument_group('Modifiers')
  1256 + gm.add_argument(
  1257 + '-i', '--ident', help='Deployment identifier'
  1258 + )
  1259 + gm.add_argument(
  1260 + '-v', '--verbose', action='store_true', help='Increase verbosity'
  1261 + )
  1262 +
  1263 + awso = pr.add_argument_group('AWS options')
  1264 + awso.add_argument(
  1265 + '-s', '--aws-subnet',
  1266 + help='VPC subnet id. Required for full deploy.'
  1267 + )
  1268 + awso.add_argument(
  1269 + '-l', '--aws-launch-template',
  1270 + help='Source LT id. Required for full deploy.'
  1271 + )
  1272 + awso.add_argument(
  1273 + '-p', '--aws-profile', help='Local IAM credentials (None)'
  1274 + )
  1275 + awso.add_argument(
  1276 + '-r', '--aws-region', help='(us-east-1)'
  1277 + )
  1278 + awso.add_argument(
  1279 + '--aws-launch-template-ver',
  1280 + type=int, help='Source LT version ($Default)'
  1281 + )
  1282 + awso.add_argument(
  1283 + '-L', '--aws-target-launch-template', nargs='*',
  1284 + help='Target LT ids (None)'
  1285 + )
  1286 + awso.add_argument(
  1287 + '--aws-target-launch-template-ver',
  1288 + type=int, help='Target LT base version ($Default)'
  1289 + )
  1290 + awso.add_argument(
  1291 + '-Y', '--aws-target-launch-template-desc',
  1292 + help='Target LT description (deployed)'
  1293 + )
  1294 + awso.add_argument(
  1295 + '-a', '--aws-autoscaling-group',
  1296 + nargs='*', help='Autoscaling group names (None)'
  1297 + )
  1298 + awso.add_argument(
  1299 + '-A', '--aws-ami-name', help='Generated AMI name (code_deploy)'
  1300 + )
  1301 + awso.add_argument(
  1302 + '-X', '--aws-ami-desc', help='Generated AMI description (<boilerplate>)'
  1303 + )
  1304 + awso.add_argument(
  1305 + '--aws-ami-wait-timeout',
  1306 + type=int, help='Timeout for waiting on AMI (300)'
  1307 + )
  1308 + awso.add_argument(
  1309 + '--aws-ami-wait-interval', type=int, help='Interval between checks (7)'
  1310 + )
  1311 + awso.add_argument(
  1312 + '--aws-asg-wait-interval', type=int, help='Interval between checks (5)'
  1313 + )
  1314 + awso.add_argument(
  1315 + '--aws-mark-ec2-file',
  1316 + help='Marker file ({}/cd-instance-id)'.format(TMP_DIR)
  1317 + )
  1318 + awso.add_argument(
  1319 + '--aws-mark-ami-file',
  1320 + help='Marker file ({}/cd-ami-id)'.format(TMP_DIR)
  1321 + )
  1322 + awso.add_argument(
  1323 + '--aws-mark-lt-file',
  1324 + help='Marker file ({}/cd-target-lt)'.format(TMP_DIR)
  1325 + )
  1326 + awso.add_argument(
  1327 + '--aws-mark-props-file',
  1328 + help='Marker file ({}/cd-asg-props)'.format(TMP_DIR)
  1329 + )
  1330 + awso.add_argument(
  1331 + '--aws-mark-instances-file',
  1332 + help='Marker file ({}/cd-asg-instances)'.format(TMP_DIR)
  1333 + )
  1334 + awso.add_argument(
  1335 + '--aws-mark-rolling-file',
  1336 + help='Marker file ({}/cd-asg-rolling)'.format(TMP_DIR)
  1337 + )
  1338 +
  1339 + appo = pr.add_argument_group('App deployment options')
  1340 + appo.add_argument(
  1341 + '-d', '--app-project-dir', help='Location of project code (<cwd>)'
  1342 + )
  1343 + appo.add_argument(
  1344 + '-e', '--app-cap-env', help='Capistrano env (production)'
  1345 + )
  1346 + appo.add_argument(
  1347 + '-H', '--app-halt-after',
  1348 + action='store_true', help='Halt script after application deploy'
  1349 + )
  1350 + appo.add_argument(
  1351 + '--app-mark-file',
  1352 + help='Marker file ({}/cd-app-deployed)'.format(TMP_DIR)
  1353 + )
  1354 +
  1355 + ssho = pr.add_argument_group('SSH options')
  1356 + ssho.add_argument(
  1357 + '--ssh-public', action='store_true', help='Target is a public IP'
  1358 + )
  1359 + ssho.add_argument(
  1360 + '--ssh-port', type=int, help='Target SSH port (22)'
  1361 + )
  1362 + ssho.add_argument(
  1363 + '--ssh-wait-timeout',
  1364 + type=int, help='Overall timeout for SSH connection (60)'
  1365 + )
  1366 + ssho.add_argument(
  1367 + '--ssh-wait-interval', type=int, help='Timeout for each SSH attempt (5)'
  1368 + )
  1369 + ssho.add_argument(
  1370 + '--ssh-wait-gap', type=int, help='Wait time between SSH attempts (3)'
  1371 + )
  1372 +
  1373 + sshco = pr.add_argument_group('SSH client config options')
  1374 + sshco.add_argument(
  1375 + '--ssh-config-dir', help='Client SSH dir (~/.ssh)'
  1376 + )
  1377 + sshco.add_argument(
  1378 + '--ssh-config-file', help='(<config_dir>/config)'
  1379 + )
  1380 + sshco.add_argument(
  1381 + '--ssh-config-template-file', help='(<config_dir>/config.template)'
  1382 + )
  1383 + sshco.add_argument(
  1384 + '--ssh-id-rsa-file', help='(<config_dir>/id_rsa)'
  1385 + )
  1386 + sshco.add_argument(
  1387 + '--ssh-known-hosts-file', help='(<config_dir>/known_hosts)'
  1388 + )
  1389 +
  1390 + sshto = pr.add_argument_group('SSH config template options')
  1391 + sshto.add_argument(
  1392 + '--ssh-host-keyword', help='Replaceable keyword for Host (TARGET_IP)'
  1393 + )
  1394 + sshto.add_argument(
  1395 + '--ssh-port-keyword', help='Replaceable keyword for Port (TARGET_PORT)'
  1396 + )
  1397 + sshto.add_argument(
  1398 + '--ssh-comment-keyword',
  1399 + help='Replaceable keyword as comments (TARGET_COMMENT)'
  1400 + )
  1401 +
  1402 + ar = pr.parse_args()
  1403 +
  1404 + # Output verbosity settings
  1405 + loglevel = log.INFO
  1406 + if ar.verbose:
  1407 + loglevel = log.DEBUG
  1408 + log.basicConfig(
  1409 + datefmt='%y-%m-%d %H:%M:%S',
  1410 + format='%(asctime)s %(levelname)s:%(message)s',
  1411 + level=loglevel,
  1412 + )
  1413 +
  1414 + cleaner = Cleaner()
  1415 +
  1416 + # Target autoscaling groups
  1417 + target_asgs = []
  1418 + if ar.aws_autoscaling_group:
  1419 + target_asgs = ar.aws_autoscaling_group
  1420 +
  1421 + # AWS settings for ASGroup
  1422 + asg_opts = options_asgroup(ar)
  1423 + asgroup = ASGroup(**asg_opts)
  1424 +
  1425 + # IP address type (relative to VPC)
  1426 + add_type = 'private'
  1427 + if ar.ssh_public:
  1428 + add_type = 'public'
  1429 +
  1430 + is_default_action = (not any([
  1431 + ar.rolling_termination,
  1432 + ar.get_asg_ips
  1433 + ])) or ar.full_deploy
  1434 +
  1435 + ### Action (default): Rolling Application Deploy ###
  1436 +
  1437 + if is_default_action:
  1438 + if not all([ar.aws_subnet, ar.aws_launch_template]):
  1439 + pr.error('Missing argument/s: aws-subnet, aws-launch-template')
  1440 +
  1441 + # SSH client settings
  1442 + si_opts = options_sshinfo(ar)
  1443 + ssh_info = SSHInfo(**si_opts)
  1444 +
  1445 + # Capistrano application deployer settings
  1446 + #ad_opts = options_capdeployer(ar)
  1447 + #ad_opts['ssh'] = ssh_info
  1448 + #cap = CapDeployer(**ad_opts)
  1449 +
  1450 + # Test deployer
  1451 + topts = {}
  1452 + topts['ssh'] = ssh_info
  1453 + td = TestDeployer(**topts)
  1454 +
  1455 + # AWS settings for AppNode
  1456 + aws_opts = options_appnode(ar)
  1457 + aws_opts['Subnet'] = ar.aws_subnet
  1458 + aws_opts['LaunchTemplateId'] = ar.aws_launch_template
  1459 + node = AppNode(**aws_opts)
  1460 +
  1461 + ident = 'none'
  1462 + if ar.ident:
  1463 + ident = ar.ident
  1464 + log.info('Now using deploy identifier: %s', ident)
  1465 +
  1466 + # Begin creating a new target EC2 node
  1467 + node.launch_ec2()
  1468 + log.info('New EC2 instance launched from Launch Template')
  1469 +
  1470 + # Wait for the new EC2 server, then begin
  1471 + # deploying the application (with Capistrano)
  1472 + node_ip = node.get_ip_address(address_type=add_type)
  1473 + if node_ip:
  1474 + ssh_info.wait_for_ssh(node_ip)
  1475 + ssh_info.generate_ssh_config(
  1476 + node_ip, comment='Deployment Tag: {}'.format(ident)
  1477 + )
  1478 +
  1479 + deploy_ident = ident + '_ip' + node_ip
  1480 + #cap.deploy(deploy_ident)
  1481 + td.deploy(deploy_ident)
  1482 + log.info('Application deployed to EC2 instance')
  1483 +
  1484 + # If directed, stop the script
  1485 + if ar.app_halt_after: non_graceful_halt()
  1486 +
  1487 + # Begin creating a new AMI out of the EC2 node
  1488 + node.generate_ami()
  1489 + log.info('AMI generated from EC2 instance')
  1490 +
  1491 + # Calculate target launch templates
  1492 + target_lts = []
  1493 + arg_lt_desc = ar.aws_target_launch_template_desc
  1494 +
  1495 + if ar.aws_target_launch_template:
  1496 + # If target LTs are provided, modify LTs.
  1497 + # Deploy may proceed depending on ASGs provided.
  1498 + for lt in ar.aws_target_launch_template:
  1499 + x = { 'lt_id': lt }
  1500 + if ar.aws_target_launch_template_ver:
  1501 + x['lt_source_ver'] = ar.aws_target_launch_template_ver
  1502 + if arg_lt_desc:
  1503 + x['lt_desc'] = arg_lt_desc
  1504 + target_lts.append(x)
  1505 +
  1506 + else:
  1507 + # If NO target LTs are provided, infer LTs
  1508 + # from ASGs if ASGs are provided.
  1509 + for asg_name in target_asgs:
  1510 + asg = asgroup.get_asg(asg_name)
  1511 + if asg['LaunchTemplate']:
  1512 + xid = asg['LaunchTemplate']['LaunchTemplateId']
  1513 + xver = asg['LaunchTemplate']['Version']
  1514 + x = {
  1515 + 'lt_id': xid,
  1516 + 'lt_source_ver': xver,
  1517 + }
  1518 + if arg_lt_desc:
  1519 + x['lt_desc'] = arg_lt_desc
  1520 + log.info(
  1521 + 'Obtained target LT %s ver %s from ASG %s',
  1522 + xid, xver, asg_name
  1523 + )
  1524 + target_lts.append(x)
  1525 +
  1526 + # Create new versions of the Launch Templates.
  1527 + # New versions will now use the generated AMI.
  1528 + for lt_args in target_lts:
  1529 + node.create_new_lt_version(**lt_args)
  1530 + log.info('Target Launch Template modified to use new AMI')
  1531 +
  1532 + if len(target_asgs) > 0:
  1533 + log.info('Proceeding to termination deploy of autoscaling groups')
  1534 +
  1535 + cleaner.mark(node)
  1536 + #cleaner.mark(cap)
  1537 + cleaner.mark(td)
  1538 +
  1539 + ### Action: Autoscaling Groups rolling termination ###
  1540 +
  1541 + if is_default_action or ar.rolling_termination:
  1542 + for asg_name in target_asgs:
  1543 + asgroup.rolling_termination_deploy(asg_name)
  1544 + log.info('Rolling deploy done for %s', asg_name)
  1545 +
  1546 + ### Action: Get IP addresses ###
  1547 +
  1548 + if ar.get_asg_ips:
  1549 + for asg_name in target_asgs:
  1550 + print(('[{}]'.format(asg_name)))
  1551 + for ip in asgroup.get_ips_by_asg(asg_name, address_type=add_type):
  1552 + print(ip)
  1553 +
  1554 + cleaner.mark(asgroup)
  1555 + cleaner.cleanup()
  1556 + sys.exit(0)
  1557 +
  1558 +
  1559 +def non_graceful_halt():
  1560 + log.info(
  1561 + '---------------\n'
  1562 + 'Script execution paused by user argument.'
  1563 + 'You may SSH into the deploy target instance and '
  1564 + 'perform commands, and/or continue the process later.'
  1565 + '\n\nTo resume execution, simply repeat the deploy command '
  1566 + 'without the "-H" or "--app-halt-after" flag.\n'
  1567 + '---------------'
  1568 + )
  1569 + sys.exit(0)
  1570 +
  1571 +
  1572 +def get_desc():
  1573 + return (
  1574 + 'DESCRIPTION'
  1575 +
  1576 + '\n\nPerform a rolling termination deployment on an AWS autoscaling '
  1577 + 'group. Works best under the following conditions:'
  1578 +
  1579 + '\n\n- The autoscaling group (ASG) uses Launch Templates, instead of '
  1580 + 'Launch Configurations.'
  1581 + '\n- The ASG uses the $Default version of its Launch Template.'
  1582 + '\n- At the start of deploy, when a fresh EC2 is created, the script '
  1583 + 'will ask for a Launch Template, not an AMI directly.'
  1584 +
  1585 + '\n\nPROCESS'
  1586 +
  1587 + '\n\nGiven the source launch template ID, target launch template ID/s, '
  1588 + 'target autoscaling group name/s, and a Ruby on Rails app project '
  1589 + 'directory, this script will:'
  1590 +
  1591 + '\n\n1) Create a temporary EC2 instance from source launch template.'
  1592 + '\n2) Perform a Capistrano deploy on that instance. (Other app '
  1593 + 'deployment methods will be added in the future.)'
  1594 + '\n3) Generate an AMI from that instance.'
  1595 + '\n4) Create a new version of the target launch template/s which '
  1596 + 'now uses the new AMI ID obtained in (3).'
  1597 + '\n5) Do a rolling termination on the existing instances of the '
  1598 + 'target autoscaling group/s, allowing them to be automatically '
  1599 + 'replaced by the ASG as they are being removed one by one.'
  1600 + '\n6) Terminate the temporary instance created in (1).'
  1601 +
  1602 + '\n\nCLIENT SSH CONFIG TEMPLATING'
  1603 +
  1604 + '\n\nIn order to SSH to the new EC2 instance, this script utilizes '
  1605 + 'a template for the client SSH config (by default, it is located at '
  1606 + '~/.ssh/config.template). This template contains a Host stub with '
  1607 + 'a designated keyword in place of an IP address. When the new EC2 '
  1608 + 'is created, the template is used to create a new client SSH config '
  1609 + "file (~/.ssh/config), with the keyword replaced by the instance's "
  1610 + "actual IP address. The project's Capistrano deploy configuration "
  1611 + 'should be made to reflect this setup.'
  1612 +
  1613 + '\n\nEXAMPLE USAGE'
  1614 +
  1615 + '\n\n$ ./cdep.py -s subnet-123abc -l lt-abc123xyz '
  1616 + "-A myapp-ami -X 'Myapp server image' -Y 'Myapp LT' "
  1617 + '-a mygroup-asg -d /home/john/myproj -i v1.0.0 -H'
  1618 +
  1619 + '\n\nThe command above creates a new instance from launch template '
  1620 + 'lt-abc123xyz, then does a Capistrano production deploy to it. After '
  1621 + 'deployment, the script is halted so the user can SSH to the instance '
  1622 + 'and perform additional operations, if desired.'
  1623 +
  1624 + '\n\nWhen ready, the command should be repeated without the -H flag. '
  1625 + 'If so, the deploy continues by creating an AMI from the finalized '
  1626 + 'instance. The target launch templates, inferred from the given '
  1627 + 'autoscaling group mygroup-asg, is updated to '
  1628 + 'use this new AMI. Finally, existing mygroup-asg instances are removed '
  1629 + 'one by one for auto-replacement to trigger with new instances.'
  1630 +
  1631 + '\n\nFILES'
  1632 +
  1633 + '\n\n/tmp/cd-*'
  1634 + '\n~/.ssh/config.template'
  1635 + '\n~/.ssh/config'
  1636 + )
  1637 +
  1638 +
  1639 +def options_sshinfo(args):
  1640 + x = {}
  1641 + if args.ssh_port:
  1642 + x['port'] = args.ssh_port
  1643 + if args.ssh_wait_timeout:
  1644 + x['wait_timeout'] = args.ssh_wait_timeout
  1645 + if args.ssh_wait_interval:
  1646 + x['wait_interval'] = args.ssh_wait_interval
  1647 + if args.ssh_wait_gap:
  1648 + x['wait_gap'] = args.ssh_wait_gap
  1649 +
  1650 + if args.ssh_host_keyword:
  1651 + x['host_keyword'] = args.ssh_host_keyword
  1652 + if args.ssh_port_keyword:
  1653 + x['port_keyword'] = args.ssh_port_keyword
  1654 + if args.ssh_comment_keyword:
  1655 + x['comment_keyword'] = args.ssh_comment_keyword
  1656 +
  1657 + if args.ssh_config_dir:
  1658 + x['config_dir'] = args.ssh_config_dir
  1659 + if args.ssh_config_file:
  1660 + x['config_file_path'] = args.ssh_config_file
  1661 + if args.ssh_config_template_file:
  1662 + x['config_template_file_path'] = args.ssh_config_template_file
  1663 + if args.ssh_id_rsa_file:
  1664 + x['id_rsa_file_path'] = args.ssh_id_rsa_file
  1665 + if args.ssh_known_hosts_file:
  1666 + x['known_hosts_file_path'] = args.ssh_known_hosts_file
  1667 + return x
  1668 +
  1669 +
  1670 +def options_capdeployer(args):
  1671 + x = {}
  1672 + if args.app_project_dir:
  1673 + x['app_project_dir'] = args.app_project_dir
  1674 + if args.app_cap_env:
  1675 + x['cap_env'] = args.app_cap_env
  1676 + if args.app_mark_file:
  1677 + x['mark_file_path'] = args.app_mark_file
  1678 + return x
  1679 +
  1680 +
  1681 +def options_appnode(args):
  1682 + x = {}
  1683 + if args.ident:
  1684 + x['Ident'] = args.ident
  1685 + if args.aws_profile:
  1686 + x['ProfileName'] = args.aws_profile
  1687 + if args.aws_region:
  1688 + x['Region'] = args.aws_region
  1689 + if args.aws_launch_template_ver:
  1690 + x['LaunchTemplateVersion'] = args.aws_launch_template_ver
  1691 + if args.aws_ami_name:
  1692 + x['AmiName'] = args.aws_ami_name
  1693 + if args.aws_ami_desc:
  1694 + x['AmiDesc'] = args.aws_ami_desc
  1695 + if args.aws_ami_wait_timeout:
  1696 + x['AmiWaitTimeout'] = args.aws_ami_wait_timeout
  1697 + if args.aws_ami_wait_interval:
  1698 + x['AmiWaitInterval'] = args.aws_ami_wait_interval
  1699 + if args.aws_mark_ec2_file:
  1700 + x['InstanceIdFilePath'] = args.aws_mark_ec2_file
  1701 + if args.aws_mark_ami_file:
  1702 + x['AmiIdFilePath'] = args.aws_mark_ami_file
  1703 + if args.aws_mark_lt_file:
  1704 + x['TargetLTBaseFilePath'] = args.aws_mark_lt_file
  1705 + return x
  1706 +
  1707 +
  1708 +def options_asgroup(args):
  1709 + x = {}
  1710 + if args.ident:
  1711 + x['Ident'] = args.ident
  1712 + if args.aws_profile:
  1713 + x['ProfileName'] = args.aws_profile
  1714 + if args.aws_region:
  1715 + x['Region'] = args.aws_region
  1716 + if args.aws_ami_wait_interval:
  1717 + x['WaitInterval'] = args.aws_ami_wait_interval
  1718 + if args.aws_mark_props_file:
  1719 + x['PropsFilePath'] = args.aws_mark_props_file
  1720 + if args.aws_mark_instances_file:
  1721 + x['InstancesFilePath'] = args.aws_mark_instances_file
  1722 + if args.aws_mark_rolling_file:
  1723 + x['MarkFilePath'] = args.aws_mark_rolling_file
  1724 + return x
  1725 +
  1726 +
  1727 +if __name__ == '__main__':
  1728 + main()
... ...
  1 +[build-system]
  2 +requires = [
  3 + "setuptools>=54"
  4 + "wheel"
  5 + "boto3>=1.6.6"
  6 + "interruptingcow"
  7 + "pytz"
  8 +]
  9 +
  10 +build-backend = "setuptools.build_metadata"
\ No newline at end of file
... ...
  1 +
  2 +# Setting up
  3 +[metadata]
  4 +name = application-deployer
  5 +version = 0.0.1
  6 +author = leecher (ChromediaInc)
  7 +author_email = vincent.nambatac@chromedia.com
  8 +description = App deployer for aws environment
  9 +long_description_content_type = text/markdown
  10 +# long_description=long_description,
  11 +
  12 +# keywords=['python', 'video', 'stream', 'video stream', 'camera stream', 'sockets'],
  13 +classifiers =
  14 + Development Status :: 1 - Staging
  15 + Intended Audience :: Developers
  16 + Programming Language :: Python :: 3
  17 + Operating System :: Unix
  18 + Operating System :: MacOS :: MacOS X
  19 + Operating System :: Microsoft :: Windows
  20 + License :: OSI Approved :: MIT License
  21 +
  22 +[options]
  23 +packages = find:
  24 +python_requires = >=3.7
  25 +include_package_data = True
  26 +
  27 +
  28 +
... ...
  1 +from setuptools import setup, find_packages
  2 +import codecs
  3 +import os
  4 +
  5 +
  6 +VERSION = '0.0.1'
  7 +DESCRIPTION = 'dbocl env deployment method'
  8 +#LONG_DESCRIPTION = 'A package that allows to deploy application and build auto scaled instances and ami'
  9 +
  10 +# Setting up
  11 +setup(
  12 + name="app-deployer",
  13 + version=VERSION,
  14 + author="leecher (ChromediaInc)",
  15 + author_email="info@chromedia.com",
  16 + description=DESCRIPTION,
  17 + long_description_content_type="text/markdown",
  18 +# long_description=long_description,
  19 + packages=find_packages(),
  20 + install_requires=['boto3', 'interruptingcow' , 'pytz', 'aws-cli'],
  21 +# keywords=['python', 'video', 'stream', 'video stream', 'camera stream', 'sockets'],
  22 + classifiers=[
  23 + "Development Status :: 1 - Planning",
  24 + "Intended Audience :: Developers",
  25 + "Programming Language :: Python :: 3",
  26 + "Operating System :: Unix",
  27 + "Operating System :: MacOS :: MacOS X",
  28 + "Operating System :: Microsoft :: Windows",
  29 + ]
  30 +)
  31 +
... ...
  1 +"""Run the EasyInstall command"""
  2 +
  3 +if __name__ == '__main__':
  4 + from setuptools.command.easy_install import main
  5 + main()
... ...
  1 +Copyright (c) 2008-2019 The pip developers (see AUTHORS.txt file)
  2 +
  3 +Permission is hereby granted, free of charge, to any person obtaining
  4 +a copy of this software and associated documentation files (the
  5 +"Software"), to deal in the Software without restriction, including
  6 +without limitation the rights to use, copy, modify, merge, publish,
  7 +distribute, sublicense, and/or sell copies of the Software, and to
  8 +permit persons to whom the Software is furnished to do so, subject to
  9 +the following conditions:
  10 +
  11 +The above copyright notice and this permission notice shall be
  12 +included in all copies or substantial portions of the Software.
  13 +
  14 +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
  15 +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
  16 +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
  17 +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
  18 +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
  19 +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
  20 +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
... ...
  1 +Metadata-Version: 2.1
  2 +Name: pip
  3 +Version: 20.0.2
  4 +Summary: The PyPA recommended tool for installing Python packages.
  5 +Home-page: https://pip.pypa.io/
  6 +Author: The pip developers
  7 +Author-email: pypa-dev@groups.google.com
  8 +License: MIT
  9 +Project-URL: Documentation, https://pip.pypa.io
  10 +Project-URL: Source, https://github.com/pypa/pip
  11 +Keywords: distutils easy_install egg setuptools wheel virtualenv
  12 +Platform: UNKNOWN
  13 +Classifier: Development Status :: 5 - Production/Stable
  14 +Classifier: Intended Audience :: Developers
  15 +Classifier: License :: OSI Approved :: MIT License
  16 +Classifier: Topic :: Software Development :: Build Tools
  17 +Classifier: Programming Language :: Python
  18 +Classifier: Programming Language :: Python :: 2
  19 +Classifier: Programming Language :: Python :: 2.7
  20 +Classifier: Programming Language :: Python :: 3
  21 +Classifier: Programming Language :: Python :: 3.5
  22 +Classifier: Programming Language :: Python :: 3.6
  23 +Classifier: Programming Language :: Python :: 3.7
  24 +Classifier: Programming Language :: Python :: 3.8
  25 +Classifier: Programming Language :: Python :: Implementation :: CPython
  26 +Classifier: Programming Language :: Python :: Implementation :: PyPy
  27 +Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*
  28 +
  29 +pip - The Python Package Installer
  30 +==================================
  31 +
  32 +.. image:: https://img.shields.io/pypi/v/pip.svg
  33 + :target: https://pypi.org/project/pip/
  34 +
  35 +.. image:: https://readthedocs.org/projects/pip/badge/?version=latest
  36 + :target: https://pip.pypa.io/en/latest
  37 +
  38 +pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes.
  39 +
  40 +Please take a look at our documentation for how to install and use pip:
  41 +
  42 +* `Installation`_
  43 +* `Usage`_
  44 +
  45 +Updates are released regularly, with a new version every 3 months. More details can be found in our documentation:
  46 +
  47 +* `Release notes`_
  48 +* `Release process`_
  49 +
  50 +If you find bugs, need help, or want to talk to the developers please use our mailing lists or chat rooms:
  51 +
  52 +* `Issue tracking`_
  53 +* `Discourse channel`_
  54 +* `User IRC`_
  55 +
  56 +If you want to get involved head over to GitHub to get the source code, look at our development documentation and feel free to jump on the developer mailing lists and chat rooms:
  57 +
  58 +* `GitHub page`_
  59 +* `Dev documentation`_
  60 +* `Dev mailing list`_
  61 +* `Dev IRC`_
  62 +
  63 +Code of Conduct
  64 +---------------
  65 +
  66 +Everyone interacting in the pip project's codebases, issue trackers, chat
  67 +rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_.
  68 +
  69 +.. _package installer: https://packaging.python.org/guides/tool-recommendations/
  70 +.. _Python Package Index: https://pypi.org
  71 +.. _Installation: https://pip.pypa.io/en/stable/installing.html
  72 +.. _Usage: https://pip.pypa.io/en/stable/
  73 +.. _Release notes: https://pip.pypa.io/en/stable/news.html
  74 +.. _Release process: https://pip.pypa.io/en/latest/development/release-process/
  75 +.. _GitHub page: https://github.com/pypa/pip
  76 +.. _Dev documentation: https://pip.pypa.io/en/latest/development
  77 +.. _Issue tracking: https://github.com/pypa/pip/issues
  78 +.. _Discourse channel: https://discuss.python.org/c/packaging
  79 +.. _Dev mailing list: https://groups.google.com/forum/#!forum/pypa-dev
  80 +.. _User IRC: https://webchat.freenode.net/?channels=%23pypa
  81 +.. _Dev IRC: https://webchat.freenode.net/?channels=%23pypa-dev
  82 +.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/
  83 +
  84 +
... ...
  1 +../../../bin/pip,sha256=gpPWkLN6MR4Uh1-v9bSu9ZjnCcIy7Vgo8eo4Q2La7GM,241
  2 +../../../bin/pip3,sha256=gpPWkLN6MR4Uh1-v9bSu9ZjnCcIy7Vgo8eo4Q2La7GM,241
  3 +../../../bin/pip3.8,sha256=gpPWkLN6MR4Uh1-v9bSu9ZjnCcIy7Vgo8eo4Q2La7GM,241
  4 +pip-20.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
  5 +pip-20.0.2.dist-info/LICENSE.txt,sha256=W6Ifuwlk-TatfRU2LR7W1JMcyMj5_y1NkRkOEJvnRDE,1090
  6 +pip-20.0.2.dist-info/METADATA,sha256=MSgjT2JTt8usp4Hopp5AGEmc-7sKR2Jd7HTMJqCoRhw,3352
  7 +pip-20.0.2.dist-info/RECORD,,
  8 +pip-20.0.2.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
  9 +pip-20.0.2.dist-info/entry_points.txt,sha256=HtfDOwpUlr9s73jqLQ6wF9V0_0qvUXJwCBz7Vwx0Ue0,125
  10 +pip-20.0.2.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
  11 +pip/__init__.py,sha256=U1AM82iShMaw90K6Yq0Q2-AZ1EsOcqQLQRB-rxwFtII,455
  12 +pip/__main__.py,sha256=NM95x7KuQr-lwPoTjAC0d_QzLJsJjpmAoxZg0mP8s98,632
  13 +pip/__pycache__/__init__.cpython-38.pyc,,
  14 +pip/__pycache__/__main__.cpython-38.pyc,,
  15 +pip/_internal/__init__.py,sha256=j5fiII6yCeZjpW7_7wAVRMM4DwE-gyARGVU4yAADDeE,517
  16 +pip/_internal/__pycache__/__init__.cpython-38.pyc,,
  17 +pip/_internal/__pycache__/build_env.cpython-38.pyc,,
  18 +pip/_internal/__pycache__/cache.cpython-38.pyc,,
  19 +pip/_internal/__pycache__/configuration.cpython-38.pyc,,
  20 +pip/_internal/__pycache__/exceptions.cpython-38.pyc,,
  21 +pip/_internal/__pycache__/legacy_resolve.cpython-38.pyc,,
  22 +pip/_internal/__pycache__/locations.cpython-38.pyc,,
  23 +pip/_internal/__pycache__/main.cpython-38.pyc,,
  24 +pip/_internal/__pycache__/pep425tags.cpython-38.pyc,,
  25 +pip/_internal/__pycache__/pyproject.cpython-38.pyc,,
  26 +pip/_internal/__pycache__/self_outdated_check.cpython-38.pyc,,
  27 +pip/_internal/__pycache__/wheel_builder.cpython-38.pyc,,
  28 +pip/_internal/build_env.py,sha256=--aNgzIdYrCOclHMwoAdpclCpfdFE_jooRuCy5gczwg,7532
  29 +pip/_internal/cache.py,sha256=16GrnDRLBQNlfKWIuIF6Sa-EFS78kez_w1WEjT3ykTI,11605
  30 +pip/_internal/cli/__init__.py,sha256=FkHBgpxxb-_gd6r1FjnNhfMOzAUYyXoXKJ6abijfcFU,132
  31 +pip/_internal/cli/__pycache__/__init__.cpython-38.pyc,,
  32 +pip/_internal/cli/__pycache__/autocompletion.cpython-38.pyc,,
  33 +pip/_internal/cli/__pycache__/base_command.cpython-38.pyc,,
  34 +pip/_internal/cli/__pycache__/cmdoptions.cpython-38.pyc,,
  35 +pip/_internal/cli/__pycache__/command_context.cpython-38.pyc,,
  36 +pip/_internal/cli/__pycache__/main.cpython-38.pyc,,
  37 +pip/_internal/cli/__pycache__/main_parser.cpython-38.pyc,,
  38 +pip/_internal/cli/__pycache__/parser.cpython-38.pyc,,
  39 +pip/_internal/cli/__pycache__/req_command.cpython-38.pyc,,
  40 +pip/_internal/cli/__pycache__/status_codes.cpython-38.pyc,,
  41 +pip/_internal/cli/autocompletion.py,sha256=ekGNtcDI0p7rFVc-7s4T9Tbss4Jgb7vsB649XJIblRg,6547
  42 +pip/_internal/cli/base_command.py,sha256=v6yl5XNRqye8BT9ep8wvpMu6lylP_Hu6D95r_HqbpbQ,7948
  43 +pip/_internal/cli/cmdoptions.py,sha256=f1TVHuu_fR3lLlMo6b367H_GsWFv26tLI9cAS-kZfE0,28114
  44 +pip/_internal/cli/command_context.py,sha256=ygMVoTy2jpNilKT-6416gFSQpaBtrKRBbVbi2fy__EU,975
  45 +pip/_internal/cli/main.py,sha256=8iq3bHe5lxJTB2EvKOqZ38NS0MmoS79_S1kgj4QuH8A,2610
  46 +pip/_internal/cli/main_parser.py,sha256=W9OWeryh7ZkqELohaFh0Ko9sB98ZkSeDmnYbOZ1imBc,2819
  47 +pip/_internal/cli/parser.py,sha256=O9djTuYQuSfObiY-NU6p4MJCfWsRUnDpE2YGA_fwols,9487
  48 +pip/_internal/cli/req_command.py,sha256=pAUAglpTn0mUA6lRs7KN71yOm1KDabD0ySVTQTqWTSA,12463
  49 +pip/_internal/cli/status_codes.py,sha256=F6uDG6Gj7RNKQJUDnd87QKqI16Us-t-B0wPF_4QMpWc,156
  50 +pip/_internal/commands/__init__.py,sha256=uTSj58QlrSKeXqCUSdL-eAf_APzx5BHy1ABxb0j5ZNE,3714
  51 +pip/_internal/commands/__pycache__/__init__.cpython-38.pyc,,
  52 +pip/_internal/commands/__pycache__/check.cpython-38.pyc,,
  53 +pip/_internal/commands/__pycache__/completion.cpython-38.pyc,,
  54 +pip/_internal/commands/__pycache__/configuration.cpython-38.pyc,,
  55 +pip/_internal/commands/__pycache__/debug.cpython-38.pyc,,
  56 +pip/_internal/commands/__pycache__/download.cpython-38.pyc,,
  57 +pip/_internal/commands/__pycache__/freeze.cpython-38.pyc,,
  58 +pip/_internal/commands/__pycache__/hash.cpython-38.pyc,,
  59 +pip/_internal/commands/__pycache__/help.cpython-38.pyc,,
  60 +pip/_internal/commands/__pycache__/install.cpython-38.pyc,,
  61 +pip/_internal/commands/__pycache__/list.cpython-38.pyc,,
  62 +pip/_internal/commands/__pycache__/search.cpython-38.pyc,,
  63 +pip/_internal/commands/__pycache__/show.cpython-38.pyc,,
  64 +pip/_internal/commands/__pycache__/uninstall.cpython-38.pyc,,
  65 +pip/_internal/commands/__pycache__/wheel.cpython-38.pyc,,
  66 +pip/_internal/commands/check.py,sha256=mgLNYT3bd6Kmynwh4zzcBmVlFZ-urMo40jTgk6U405E,1505
  67 +pip/_internal/commands/completion.py,sha256=UFQvq0Q4_B96z1bvnQyMOq82aPSu05RejbLmqeTZjC0,2975
  68 +pip/_internal/commands/configuration.py,sha256=6riioZjMhsNSEct7dE-X8SobGodk3WERKJvuyjBje4Q,7226
  69 +pip/_internal/commands/debug.py,sha256=a8llax2hRkxgK-tvwdJgaCaZCYPIx0fDvrlMDoYr8bQ,4209
  70 +pip/_internal/commands/download.py,sha256=zX_0-IeFb4C8dxSmGHxk-6H5kehtyTSsdWpjNpAhSww,5007
  71 +pip/_internal/commands/freeze.py,sha256=QS-4ib8jbKJ2wrDaDbTuyaB3Y_iJ5CQC2gAVHuAv9QU,3481
  72 +pip/_internal/commands/hash.py,sha256=47teimfAPhpkaVbSDaafck51BT3XXYuL83lAqc5lOcE,1735
  73 +pip/_internal/commands/help.py,sha256=Nhecq--ydFn80Gm1Zvbf9943EcRJfO0TnXUhsF0RO7s,1181
  74 +pip/_internal/commands/install.py,sha256=T4P3J1rw7CQrZX4OUamtcoWMkTrJBfUe6gWpTfZW1bQ,27286
  75 +pip/_internal/commands/list.py,sha256=2l0JiqHxjxDHNTCb2HZOjwwdo4duS1R0MsqZb6HSMKk,10660
  76 +pip/_internal/commands/search.py,sha256=7Il8nKZ9mM7qF5jlnBoPvSIFY9f-0-5IbYoX3miTuZY,5148
  77 +pip/_internal/commands/show.py,sha256=Vzsj2oX0JBl94MPyF3LV8YoMcigl8B2UsMM8zp0pH2s,6792
  78 +pip/_internal/commands/uninstall.py,sha256=8mldFbrQecSoWDZRqxBgJkrlvx6Y9Iy7cs-2BIgtXt4,2983
  79 +pip/_internal/commands/wheel.py,sha256=TMU5ZhjLo7BIZQApGPsYfoCsbGTnvP-N9jkgPJXhj1Y,7170
  80 +pip/_internal/configuration.py,sha256=MgKrLFBJBkF3t2VJM4tvlnEspfSuS4scp_LhHWh53nY,14222
  81 +pip/_internal/distributions/__init__.py,sha256=ECBUW5Gtu9TjJwyFLvim-i6kUMYVuikNh9I5asL6tbA,959
  82 +pip/_internal/distributions/__pycache__/__init__.cpython-38.pyc,,
  83 +pip/_internal/distributions/__pycache__/base.cpython-38.pyc,,
  84 +pip/_internal/distributions/__pycache__/installed.cpython-38.pyc,,
  85 +pip/_internal/distributions/__pycache__/sdist.cpython-38.pyc,,
  86 +pip/_internal/distributions/__pycache__/wheel.cpython-38.pyc,,
  87 +pip/_internal/distributions/base.py,sha256=ruprpM_L2T2HNi3KLUHlbHimZ1sWVw-3Q0Lb8O7TDAI,1425
  88 +pip/_internal/distributions/installed.py,sha256=YqlkBKr6TVP1MAYS6SG8ojud21wVOYLMZ8jMLJe9MSU,760
  89 +pip/_internal/distributions/sdist.py,sha256=D4XTMlCwgPlK69l62GLYkNSVTVe99fR5iAcVt2EbGok,4086
  90 +pip/_internal/distributions/wheel.py,sha256=95uD-TfaYoq3KiKBdzk9YMN4RRqJ28LNoSTS2K46gek,1294
  91 +pip/_internal/exceptions.py,sha256=6YRuwXAK6F1iyUWKIkCIpWWN2khkAn1sZOgrFA9S8Ro,10247
  92 +pip/_internal/index/__init__.py,sha256=vpt-JeTZefh8a-FC22ZeBSXFVbuBcXSGiILhQZJaNpQ,30
  93 +pip/_internal/index/__pycache__/__init__.cpython-38.pyc,,
  94 +pip/_internal/index/__pycache__/collector.cpython-38.pyc,,
  95 +pip/_internal/index/__pycache__/package_finder.cpython-38.pyc,,
  96 +pip/_internal/index/collector.py,sha256=YS7Ix4oylU7ZbPTPFugh-244GSRqMvdHsGUG6nmz2gE,17892
  97 +pip/_internal/index/package_finder.py,sha256=2Rg75AOpLj8BN1jyL8EI-Iw-Hv6ibJkrYVARCht3bX8,37542
  98 +pip/_internal/legacy_resolve.py,sha256=L7R72I7CjVgJlPTggmA1j4b-H8NmxNu_dKVhrpGXGps,16277
  99 +pip/_internal/locations.py,sha256=VifFEqhc7FWFV8QGoEM3CpECRY8Doq7kTytytxsEgx0,6734
  100 +pip/_internal/main.py,sha256=IVBnUQ-FG7DK6617uEXRB5_QJqspAsBFmTmTesYkbdQ,437
  101 +pip/_internal/models/__init__.py,sha256=3DHUd_qxpPozfzouoqa9g9ts1Czr5qaHfFxbnxriepM,63
  102 +pip/_internal/models/__pycache__/__init__.cpython-38.pyc,,
  103 +pip/_internal/models/__pycache__/candidate.cpython-38.pyc,,
  104 +pip/_internal/models/__pycache__/format_control.cpython-38.pyc,,
  105 +pip/_internal/models/__pycache__/index.cpython-38.pyc,,
  106 +pip/_internal/models/__pycache__/link.cpython-38.pyc,,
  107 +pip/_internal/models/__pycache__/scheme.cpython-38.pyc,,
  108 +pip/_internal/models/__pycache__/search_scope.cpython-38.pyc,,
  109 +pip/_internal/models/__pycache__/selection_prefs.cpython-38.pyc,,
  110 +pip/_internal/models/__pycache__/target_python.cpython-38.pyc,,
  111 +pip/_internal/models/__pycache__/wheel.cpython-38.pyc,,
  112 +pip/_internal/models/candidate.py,sha256=Y58Bcm6oXUj0iS-yhmerlGo5CQJI2p0Ww9h6hR9zQDw,1150
  113 +pip/_internal/models/format_control.py,sha256=ICzVjjGwfZYdX-eLLKHjMHLutEJlAGpfj09OG_eMqac,2673
  114 +pip/_internal/models/index.py,sha256=K59A8-hVhBM20Xkahr4dTwP7OjkJyEqXH11UwHFVgqM,1060
  115 +pip/_internal/models/link.py,sha256=y0H2ZOk0P6d1lfGUL2Pl09xFgZcRt5HwN2LElMifOpI,6827
  116 +pip/_internal/models/scheme.py,sha256=vvhBrrno7eVDXcdKHiZWwxhPHf4VG5uSCEkC0QDR2RU,679
  117 +pip/_internal/models/search_scope.py,sha256=2LXbU4wV8LwqdtXQXNXFYKv-IxiDI_QwSz9ZgbwtAfk,3898
  118 +pip/_internal/models/selection_prefs.py,sha256=rPeif2KKjhTPXeMoQYffjqh10oWpXhdkxRDaPT1HO8k,1908
  119 +pip/_internal/models/target_python.py,sha256=c-cFi6zCuo5HYbXNS3rVVpKRaHVh5yQlYEjEW23SidQ,3799
  120 +pip/_internal/models/wheel.py,sha256=6KLuLKH5b0C5goWQXGSISRaq2UZtkHUEAU1y1Zsrwms,2766
  121 +pip/_internal/network/__init__.py,sha256=jf6Tt5nV_7zkARBrKojIXItgejvoegVJVKUbhAa5Ioc,50
  122 +pip/_internal/network/__pycache__/__init__.cpython-38.pyc,,
  123 +pip/_internal/network/__pycache__/auth.cpython-38.pyc,,
  124 +pip/_internal/network/__pycache__/cache.cpython-38.pyc,,
  125 +pip/_internal/network/__pycache__/download.cpython-38.pyc,,
  126 +pip/_internal/network/__pycache__/session.cpython-38.pyc,,
  127 +pip/_internal/network/__pycache__/utils.cpython-38.pyc,,
  128 +pip/_internal/network/__pycache__/xmlrpc.cpython-38.pyc,,
  129 +pip/_internal/network/auth.py,sha256=K3G1ukKb3PiH8w_UnpXTz8qQsTULO-qdbfOE9zTo1fE,11119
  130 +pip/_internal/network/cache.py,sha256=51CExcRkXWrgMZ7WsrZ6cmijKfViD5tVgKbBvJHO1IE,2394
  131 +pip/_internal/network/download.py,sha256=3D9vdJmVwmCUMxzC-TaVI_GvVOpQna3BLEYNPCSx3Fc,6260
  132 +pip/_internal/network/session.py,sha256=u1IXQfv21R1xv86ulyiB58-be4sYm90eFB0Wp8fVMYw,14702
  133 +pip/_internal/network/utils.py,sha256=iiixo1OeaQ3niUWiBjg59PN6f1w7vvTww1vFriTD_IU,1959
  134 +pip/_internal/network/xmlrpc.py,sha256=AL115M3vFJ8xiHVJneb8Hi0ZFeRvdPhblC89w25OG5s,1597
  135 +pip/_internal/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
  136 +pip/_internal/operations/__pycache__/__init__.cpython-38.pyc,,
  137 +pip/_internal/operations/__pycache__/check.cpython-38.pyc,,
  138 +pip/_internal/operations/__pycache__/freeze.cpython-38.pyc,,
  139 +pip/_internal/operations/__pycache__/prepare.cpython-38.pyc,,
  140 +pip/_internal/operations/build/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
  141 +pip/_internal/operations/build/__pycache__/__init__.cpython-38.pyc,,
  142 +pip/_internal/operations/build/__pycache__/metadata.cpython-38.pyc,,
  143 +pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-38.pyc,,
  144 +pip/_internal/operations/build/__pycache__/wheel.cpython-38.pyc,,
  145 +pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-38.pyc,,
  146 +pip/_internal/operations/build/metadata.py,sha256=yHMi5gHYXcXyHcvUPWHdO-UyOo3McFWljn_nHfM1O9c,1307
  147 +pip/_internal/operations/build/metadata_legacy.py,sha256=4n6N7BTysqVmEpITzT2UVClyt0Peij_Im8Qm965IWB4,3957
  148 +pip/_internal/operations/build/wheel.py,sha256=ntltdNP6D2Tpr4V0agssu6rE0F9LaBpJkYT6zSdhEbw,1469
  149 +pip/_internal/operations/build/wheel_legacy.py,sha256=DYSxQKutwSZnmNvWkwsl2HzE2XQBxV0i0wTphjtUe90,3349
  150 +pip/_internal/operations/check.py,sha256=a6uHG0daoWpmSPCdL7iYJaGQYZ-CRvPvTnCv2PnIIs0,5353
  151 +pip/_internal/operations/freeze.py,sha256=td4BeRnW10EXFTZrx6VgygO3CrjqD5B9f0BGzjQm-Ew,10180
  152 +pip/_internal/operations/install/__init__.py,sha256=mX7hyD2GNBO2mFGokDQ30r_GXv7Y_PLdtxcUv144e-s,51
  153 +pip/_internal/operations/install/__pycache__/__init__.cpython-38.pyc,,
  154 +pip/_internal/operations/install/__pycache__/editable_legacy.cpython-38.pyc,,
  155 +pip/_internal/operations/install/__pycache__/legacy.cpython-38.pyc,,
  156 +pip/_internal/operations/install/__pycache__/wheel.cpython-38.pyc,,
  157 +pip/_internal/operations/install/editable_legacy.py,sha256=rJ_xs2qtDUjpY2-n6eYlVyZiNoKbOtZXZrYrcnIELt4,1488
  158 +pip/_internal/operations/install/legacy.py,sha256=eBV8gHbO9sBlBc-4nuR3Sd2nikHgEcnC9khfeLiypio,4566
  159 +pip/_internal/operations/install/wheel.py,sha256=xdCjH6uIUyg39Pf8tUaMFUN4a7eozJAFMb_wKcgQlsY,23012
  160 +pip/_internal/operations/prepare.py,sha256=ro2teBlbBpkRJhBKraP9CoJgVLpueSk62ziWhRToXww,20942
  161 +pip/_internal/pep425tags.py,sha256=SlIQokevkoKnXhoK3PZvXiDoj8hFKoJ7thDifDtga3k,5490
  162 +pip/_internal/pyproject.py,sha256=VJKsrXORGiGoDPVKCQhuu4tWlQSTOhoiRlVLRNu4rx4,7400
  163 +pip/_internal/req/__init__.py,sha256=UVaYPlHZVGRBQQPjvGC_6jJDQtewXm0ws-8Lxhg_TiY,2671
  164 +pip/_internal/req/__pycache__/__init__.cpython-38.pyc,,
  165 +pip/_internal/req/__pycache__/constructors.cpython-38.pyc,,
  166 +pip/_internal/req/__pycache__/req_file.cpython-38.pyc,,
  167 +pip/_internal/req/__pycache__/req_install.cpython-38.pyc,,
  168 +pip/_internal/req/__pycache__/req_set.cpython-38.pyc,,
  169 +pip/_internal/req/__pycache__/req_tracker.cpython-38.pyc,,
  170 +pip/_internal/req/__pycache__/req_uninstall.cpython-38.pyc,,
  171 +pip/_internal/req/constructors.py,sha256=w5-kWWVCqlSqcIBitw86yq7XGMPpKrHDfQZSE2mJ_xc,14388
  172 +pip/_internal/req/req_file.py,sha256=ECqRUicCw5Y08R1YynZAAp8dSKQhDXoc1Q-mY3a9b6I,18485
  173 +pip/_internal/req/req_install.py,sha256=wjsIr4lDpbVSLqANKJI9mXwRVHaRxcnj8q30UiHoLRA,30442
  174 +pip/_internal/req/req_set.py,sha256=GsrKmupRKhNMhjkofVfCEHEHfgEvYBxClaQH5xLBQHg,8066
  175 +pip/_internal/req/req_tracker.py,sha256=27fvVG8Y2MJS1KpU2rBMnQyUEMHG4lkHT_bzbzQK-c0,4723
  176 +pip/_internal/req/req_uninstall.py,sha256=DWnOsuyYGju6-sylyoCm7GtUNevn9qMAVhjAGLcdXUE,23609
  177 +pip/_internal/self_outdated_check.py,sha256=3KO1pTJUuYaiV9X0t87I9PimkGL82HbhLWbocqKZpBU,8009
  178 +pip/_internal/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
  179 +pip/_internal/utils/__pycache__/__init__.cpython-38.pyc,,
  180 +pip/_internal/utils/__pycache__/appdirs.cpython-38.pyc,,
  181 +pip/_internal/utils/__pycache__/compat.cpython-38.pyc,,
  182 +pip/_internal/utils/__pycache__/deprecation.cpython-38.pyc,,
  183 +pip/_internal/utils/__pycache__/distutils_args.cpython-38.pyc,,
  184 +pip/_internal/utils/__pycache__/encoding.cpython-38.pyc,,
  185 +pip/_internal/utils/__pycache__/entrypoints.cpython-38.pyc,,
  186 +pip/_internal/utils/__pycache__/filesystem.cpython-38.pyc,,
  187 +pip/_internal/utils/__pycache__/filetypes.cpython-38.pyc,,
  188 +pip/_internal/utils/__pycache__/glibc.cpython-38.pyc,,
  189 +pip/_internal/utils/__pycache__/hashes.cpython-38.pyc,,
  190 +pip/_internal/utils/__pycache__/inject_securetransport.cpython-38.pyc,,
  191 +pip/_internal/utils/__pycache__/logging.cpython-38.pyc,,
  192 +pip/_internal/utils/__pycache__/marker_files.cpython-38.pyc,,
  193 +pip/_internal/utils/__pycache__/misc.cpython-38.pyc,,
  194 +pip/_internal/utils/__pycache__/models.cpython-38.pyc,,
  195 +pip/_internal/utils/__pycache__/packaging.cpython-38.pyc,,
  196 +pip/_internal/utils/__pycache__/pkg_resources.cpython-38.pyc,,
  197 +pip/_internal/utils/__pycache__/setuptools_build.cpython-38.pyc,,
  198 +pip/_internal/utils/__pycache__/subprocess.cpython-38.pyc,,
  199 +pip/_internal/utils/__pycache__/temp_dir.cpython-38.pyc,,
  200 +pip/_internal/utils/__pycache__/typing.cpython-38.pyc,,
  201 +pip/_internal/utils/__pycache__/ui.cpython-38.pyc,,
  202 +pip/_internal/utils/__pycache__/unpacking.cpython-38.pyc,,
  203 +pip/_internal/utils/__pycache__/urls.cpython-38.pyc,,
  204 +pip/_internal/utils/__pycache__/virtualenv.cpython-38.pyc,,
  205 +pip/_internal/utils/__pycache__/wheel.cpython-38.pyc,,
  206 +pip/_internal/utils/appdirs.py,sha256=PVo_7-IQWHa9qNuNbWSFiF2QGqeLbSAR4eLcYYhQ9ek,1307
  207 +pip/_internal/utils/compat.py,sha256=D7FKGLBdQwWH-dHIGaoWMawDZWBYApvtJVL1kFPJ930,8869
  208 +pip/_internal/utils/deprecation.py,sha256=pBnNogoA4UGTxa_JDnPXBRRYpKMbExAhXpBwAwklOBs,3318
  209 +pip/_internal/utils/distutils_args.py,sha256=a56mblNxk9BGifbpEETG61mmBrqhjtjRkJ4HYn-oOEE,1350
  210 +pip/_internal/utils/encoding.py,sha256=hxZz0t3Whw3d4MHQEiofxalTlfKwxFdLc8fpeGfhKo8,1320
  211 +pip/_internal/utils/entrypoints.py,sha256=vHcNpnksCv6mllihU6hfifdsKPEjwcaJ1aLIXEaynaU,1152
  212 +pip/_internal/utils/filesystem.py,sha256=PXa3vMcz4mbEKtkD0joFI8pBwddLQxhfPFOkVH5xjfE,5255
  213 +pip/_internal/utils/filetypes.py,sha256=R2FwzoeX7b-rZALOXx5cuO8VPPMhUQ4ne7wm3n3IcWA,571
  214 +pip/_internal/utils/glibc.py,sha256=LOeNGgawCKS-4ke9fii78fwXD73dtNav3uxz1Bf-Ab8,3297
  215 +pip/_internal/utils/hashes.py,sha256=my-wSnAWEDvl_8rQaOQcVIWjwh1-f_QiEvGy9TPf53U,3942
  216 +pip/_internal/utils/inject_securetransport.py,sha256=M17ZlFVY66ApgeASVjKKLKNz0LAfk-SyU0HZ4ZB6MmI,810
  217 +pip/_internal/utils/logging.py,sha256=aJL7NldPhS5KGFof6Qt3o3MG5cjm5TOoo7bGRu9_wsg,13033
  218 +pip/_internal/utils/marker_files.py,sha256=CO5djQlrPIozJpJybViH_insoAaBGY1aqEt6-cC-iW0,741
  219 +pip/_internal/utils/misc.py,sha256=uIb58Hiu_g2HRORo2aMcgnW_7R5d-5wUAuoW0fA2ZME,26085
  220 +pip/_internal/utils/models.py,sha256=IA0hw_T4awQzui0kqfIEASm5yLtgZAB08ag59Nip5G8,1148
  221 +pip/_internal/utils/packaging.py,sha256=VtiwcAAL7LBi7tGL2je7LeW4bE11KMHGCsJ1NZY5XtM,3035
  222 +pip/_internal/utils/pkg_resources.py,sha256=ZX-k7V5q_aNWyDse92nN7orN1aCpRLsaxzpkBZ1XKzU,1254
  223 +pip/_internal/utils/setuptools_build.py,sha256=DouaVolV9olDDFIIN9IszaL-FHdNaZt10ufOZFH9ZAU,5070
  224 +pip/_internal/utils/subprocess.py,sha256=Ph3x5eHQBxFotyGhpZN8asSMBud-BBkmgaNfARG-di8,9922
  225 +pip/_internal/utils/temp_dir.py,sha256=87Ib8aNic_hoSDEmUYJHTQIn5-prL2AYL5u_yZ3s4sI,7768
  226 +pip/_internal/utils/typing.py,sha256=xkYwOeHlf4zsHXBDC4310HtEqwhQcYXFPq2h35Tcrl0,1401
  227 +pip/_internal/utils/ui.py,sha256=0FNxXlGtbpPtTviv2oXS9t8bQG_NBdfUgP4GbubhS9U,13911
  228 +pip/_internal/utils/unpacking.py,sha256=M944JTSiapBOSKLWu7lbawpVHSE7flfzZTEr3TAG7v8,9438
  229 +pip/_internal/utils/urls.py,sha256=aNV9wq5ClUmrz6sG-al7hEWJ4ToitOy7l82CmFGFNW8,1481
  230 +pip/_internal/utils/virtualenv.py,sha256=Q3S1WPlI7JWpGOT2jUVJ8l2chm_k7VPJ9cHA_cUluEU,3396
  231 +pip/_internal/utils/wheel.py,sha256=grTRwZtMQwApwbbSPmRVLtac6FKy6SVKeCXNkWyyePA,7302
  232 +pip/_internal/vcs/__init__.py,sha256=viJxJRqRE_mVScum85bgQIXAd6o0ozFt18VpC-qIJrM,617
  233 +pip/_internal/vcs/__pycache__/__init__.cpython-38.pyc,,
  234 +pip/_internal/vcs/__pycache__/bazaar.cpython-38.pyc,,
  235 +pip/_internal/vcs/__pycache__/git.cpython-38.pyc,,
  236 +pip/_internal/vcs/__pycache__/mercurial.cpython-38.pyc,,
  237 +pip/_internal/vcs/__pycache__/subversion.cpython-38.pyc,,
  238 +pip/_internal/vcs/__pycache__/versioncontrol.cpython-38.pyc,,
  239 +pip/_internal/vcs/bazaar.py,sha256=84q1-kj1_nJ9AMzMu8RmMp-riRZu81M7K9kowcYgi3U,3957
  240 +pip/_internal/vcs/git.py,sha256=CdLz3DTsZsLMLPZpEuUwiS40npvDaVB1CNRzoXgcuJQ,14352
  241 +pip/_internal/vcs/mercurial.py,sha256=2mg7BdYI_Fe00fF6omaNccFQLPHBsDBG5CAEzvqn5sA,5110
  242 +pip/_internal/vcs/subversion.py,sha256=Fpwy71AmuqXnoKi6h1SrXRtPjEMn8fieuM1O4j01IBg,12292
  243 +pip/_internal/vcs/versioncontrol.py,sha256=nqoaM1_rzx24WnHtihXA8RcPpnUae0sV2sR_LS_5HFA,22600
  244 +pip/_internal/wheel_builder.py,sha256=gr9jE14W5ZuYblpldo-tpRuyG0e0AVmHLttImuAvXlE,9441
  245 +pip/_vendor/__init__.py,sha256=RcHf8jwLPL0ZEaa6uMhTSfyCrA_TpWgDWAW5br9xD7Y,4975
  246 +pip/_vendor/__pycache__/__init__.cpython-38.pyc,,
... ...
  1 +Wheel-Version: 1.0
  2 +Generator: bdist_wheel (0.34.2)
  3 +Root-Is-Purelib: true
  4 +Tag: py2-none-any
  5 +Tag: py3-none-any
  6 +
... ...
  1 +[console_scripts]
  2 +pip = pip._internal.cli.main:main
  3 +pip3 = pip._internal.cli.main:main
  4 +pip3.8 = pip._internal.cli.main:main
  5 +
... ...
  1 +from pip._internal.utils.typing import MYPY_CHECK_RUNNING
  2 +
  3 +if MYPY_CHECK_RUNNING:
  4 + from typing import List, Optional
  5 +
  6 +
  7 +__version__ = "20.0.2"
  8 +
  9 +
  10 +def main(args=None):
  11 + # type: (Optional[List[str]]) -> int
  12 + """This is an internal API only meant for use by pip's own console scripts.
  13 +
  14 + For additional details, see https://github.com/pypa/pip/issues/7498.
  15 + """
  16 + from pip._internal.utils.entrypoints import _wrapper
  17 +
  18 + return _wrapper(args)
... ...
  1 +from __future__ import absolute_import
  2 +
  3 +import os
  4 +import sys
  5 +
  6 +# If we are running from a wheel, add the wheel to sys.path
  7 +# This allows the usage python pip-*.whl/pip install pip-*.whl
  8 +if __package__ == '':
  9 + # __file__ is pip-*.whl/pip/__main__.py
  10 + # first dirname call strips of '/__main__.py', second strips off '/pip'
  11 + # Resulting path is the name of the wheel itself
  12 + # Add that to sys.path so we can import pip
  13 + path = os.path.dirname(os.path.dirname(__file__))
  14 + sys.path.insert(0, path)
  15 +
  16 +from pip._internal.cli.main import main as _main # isort:skip # noqa
  17 +
  18 +if __name__ == '__main__':
  19 + sys.exit(_main())
... ...
  1 +#!/usr/bin/env python
  2 +import pip._internal.utils.inject_securetransport # noqa
  3 +from pip._internal.utils.typing import MYPY_CHECK_RUNNING
  4 +
  5 +if MYPY_CHECK_RUNNING:
  6 + from typing import Optional, List
  7 +
  8 +
  9 +def main(args=None):
  10 + # type: (Optional[List[str]]) -> int
  11 + """This is preserved for old console scripts that may still be referencing
  12 + it.
  13 +
  14 + For additional details, see https://github.com/pypa/pip/issues/7498.
  15 + """
  16 + from pip._internal.utils.entrypoints import _wrapper
  17 +
  18 + return _wrapper(args)
... ...
  1 +"""Build Environment used for isolation during sdist building
  2 +"""
  3 +
  4 +# The following comment should be removed at some point in the future.
  5 +# mypy: strict-optional=False
  6 +# mypy: disallow-untyped-defs=False
  7 +
  8 +import logging
  9 +import os
  10 +import sys
  11 +import textwrap
  12 +from collections import OrderedDict
  13 +from distutils.sysconfig import get_python_lib
  14 +from sysconfig import get_paths
  15 +
  16 +from pip._vendor.pkg_resources import Requirement, VersionConflict, WorkingSet
  17 +
  18 +from pip import __file__ as pip_location
  19 +from pip._internal.utils.subprocess import call_subprocess
  20 +from pip._internal.utils.temp_dir import TempDirectory
  21 +from pip._internal.utils.typing import MYPY_CHECK_RUNNING
  22 +from pip._internal.utils.ui import open_spinner
  23 +
  24 +if MYPY_CHECK_RUNNING:
  25 + from typing import Tuple, Set, Iterable, Optional, List
  26 + from pip._internal.index.package_finder import PackageFinder
  27 +
  28 +logger = logging.getLogger(__name__)
  29 +
  30 +
  31 +class _Prefix:
  32 +
  33 + def __init__(self, path):
  34 + # type: (str) -> None
  35 + self.path = path
  36 + self.setup = False
  37 + self.bin_dir = get_paths(
  38 + 'nt' if os.name == 'nt' else 'posix_prefix',
  39 + vars={'base': path, 'platbase': path}
  40 + )['scripts']
  41 + # Note: prefer distutils' sysconfig to get the
  42 + # library paths so PyPy is correctly supported.
  43 + purelib = get_python_lib(plat_specific=False, prefix=path)
  44 + platlib = get_python_lib(plat_specific=True, prefix=path)
  45 + if purelib == platlib:
  46 + self.lib_dirs = [purelib]
  47 + else:
  48 + self.lib_dirs = [purelib, platlib]
  49 +
  50 +
  51 +class BuildEnvironment(object):
  52 + """Creates and manages an isolated environment to install build deps
  53 + """
  54 +
  55 + def __init__(self):
  56 + # type: () -> None
  57 + self._temp_dir = TempDirectory(kind="build-env")
  58 +
  59 + self._prefixes = OrderedDict((
  60 + (name, _Prefix(os.path.join(self._temp_dir.path, name)))
  61 + for name in ('normal', 'overlay')
  62 + ))
  63 +
  64 + self._bin_dirs = [] # type: List[str]
  65 + self._lib_dirs = [] # type: List[str]
  66 + for prefix in reversed(list(self._prefixes.values())):
  67 + self._bin_dirs.append(prefix.bin_dir)
  68 + self._lib_dirs.extend(prefix.lib_dirs)
  69 +
  70 + # Customize site to:
  71 + # - ensure .pth files are honored
  72 + # - prevent access to system site packages
  73 + system_sites = {
  74 + os.path.normcase(site) for site in (
  75 + get_python_lib(plat_specific=False),
  76 + get_python_lib(plat_specific=True),
  77 + )
  78 + }
  79 + self._site_dir = os.path.join(self._temp_dir.path, 'site')
  80 + if not os.path.exists(self._site_dir):
  81 + os.mkdir(self._site_dir)
  82 + with open(os.path.join(self._site_dir, 'sitecustomize.py'), 'w') as fp:
  83 + fp.write(textwrap.dedent(
  84 + '''
  85 + import os, site, sys
  86 +
  87 + # First, drop system-sites related paths.
  88 + original_sys_path = sys.path[:]
  89 + known_paths = set()
  90 + for path in {system_sites!r}:
  91 + site.addsitedir(path, known_paths=known_paths)
  92 + system_paths = set(
  93 + os.path.normcase(path)
  94 + for path in sys.path[len(original_sys_path):]
  95 + )
  96 + original_sys_path = [
  97 + path for path in original_sys_path
  98 + if os.path.normcase(path) not in system_paths
  99 + ]
  100 + sys.path = original_sys_path
  101 +
  102 + # Second, add lib directories.
  103 + # ensuring .pth file are processed.
  104 + for path in {lib_dirs!r}:
  105 + assert not path in sys.path
  106 + site.addsitedir(path)
  107 + '''
  108 + ).format(system_sites=system_sites, lib_dirs=self._lib_dirs))
  109 +
  110 + def __enter__(self):
  111 + self._save_env = {
  112 + name: os.environ.get(name, None)
  113 + for name in ('PATH', 'PYTHONNOUSERSITE', 'PYTHONPATH')
  114 + }
  115 +
  116 + path = self._bin_dirs[:]
  117 + old_path = self._save_env['PATH']
  118 + if old_path:
  119 + path.extend(old_path.split(os.pathsep))
  120 +
  121 + pythonpath = [self._site_dir]
  122 +
  123 + os.environ.update({
  124 + 'PATH': os.pathsep.join(path),
  125 + 'PYTHONNOUSERSITE': '1',
  126 + 'PYTHONPATH': os.pathsep.join(pythonpath),
  127 + })
  128 +
  129 + def __exit__(self, exc_type, exc_val, exc_tb):
  130 + for varname, old_value in self._save_env.items():
  131 + if old_value is None:
  132 + os.environ.pop(varname, None)
  133 + else:
  134 + os.environ[varname] = old_value
  135 +
  136 + def cleanup(self):
  137 + # type: () -> None
  138 + self._temp_dir.cleanup()
  139 +
  140 + def check_requirements(self, reqs):
  141 + # type: (Iterable[str]) -> Tuple[Set[Tuple[str, str]], Set[str]]
  142 + """Return 2 sets:
  143 + - conflicting requirements: set of (installed, wanted) reqs tuples
  144 + - missing requirements: set of reqs
  145 + """
  146 + missing = set()
  147 + conflicting = set()
  148 + if reqs:
  149 + ws = WorkingSet(self._lib_dirs)
  150 + for req in reqs:
  151 + try:
  152 + if ws.find(Requirement.parse(req)) is None:
  153 + missing.add(req)
  154 + except VersionConflict as e:
  155 + conflicting.add((str(e.args[0].as_requirement()),
  156 + str(e.args[1])))
  157 + return conflicting, missing
  158 +
  159 + def install_requirements(
  160 + self,
  161 + finder, # type: PackageFinder
  162 + requirements, # type: Iterable[str]
  163 + prefix_as_string, # type: str
  164 + message # type: Optional[str]
  165 + ):
  166 + # type: (...) -> None
  167 + prefix = self._prefixes[prefix_as_string]
  168 + assert not prefix.setup
  169 + prefix.setup = True
  170 + if not requirements:
  171 + return
  172 + args = [
  173 + sys.executable, os.path.dirname(pip_location), 'install',
  174 + '--ignore-installed', '--no-user', '--prefix', prefix.path,
  175 + '--no-warn-script-location',
  176 + ] # type: List[str]
  177 + if logger.getEffectiveLevel() <= logging.DEBUG:
  178 + args.append('-v')
  179 + for format_control in ('no_binary', 'only_binary'):
  180 + formats = getattr(finder.format_control, format_control)
  181 + args.extend(('--' + format_control.replace('_', '-'),
  182 + ','.join(sorted(formats or {':none:'}))))
  183 +
  184 + index_urls = finder.index_urls
  185 + if index_urls:
  186 + args.extend(['-i', index_urls[0]])
  187 + for extra_index in index_urls[1:]:
  188 + args.extend(['--extra-index-url', extra_index])
  189 + else:
  190 + args.append('--no-index')
  191 + for link in finder.find_links:
  192 + args.extend(['--find-links', link])
  193 +
  194 + for host in finder.trusted_hosts:
  195 + args.extend(['--trusted-host', host])
  196 + if finder.allow_all_prereleases:
  197 + args.append('--pre')
  198 + args.append('--')
  199 + args.extend(requirements)
  200 + with open_spinner(message) as spinner:
  201 + call_subprocess(args, spinner=spinner)
  202 +
  203 +
  204 +class NoOpBuildEnvironment(BuildEnvironment):
  205 + """A no-op drop-in replacement for BuildEnvironment
  206 + """
  207 +
  208 + def __init__(self):
  209 + pass
  210 +
  211 + def __enter__(self):
  212 + pass
  213 +
  214 + def __exit__(self, exc_type, exc_val, exc_tb):
  215 + pass
  216 +
  217 + def cleanup(self):
  218 + pass
  219 +
  220 + def install_requirements(self, finder, requirements, prefix, message):
  221 + raise NotImplementedError()
... ...
  1 +"""Cache Management
  2 +"""
  3 +
  4 +# The following comment should be removed at some point in the future.
  5 +# mypy: strict-optional=False
  6 +
  7 +import hashlib
  8 +import json
  9 +import logging
  10 +import os
  11 +
  12 +from pip._vendor.packaging.tags import interpreter_name, interpreter_version
  13 +from pip._vendor.packaging.utils import canonicalize_name
  14 +
  15 +from pip._internal.exceptions import InvalidWheelFilename
  16 +from pip._internal.models.link import Link
  17 +from pip._internal.models.wheel import Wheel
  18 +from pip._internal.utils.temp_dir import TempDirectory
  19 +from pip._internal.utils.typing import MYPY_CHECK_RUNNING
  20 +from pip._internal.utils.urls import path_to_url
  21 +
  22 +if MYPY_CHECK_RUNNING:
  23 + from typing import Optional, Set, List, Any, Dict
  24 +
  25 + from pip._vendor.packaging.tags import Tag
  26 +
  27 + from pip._internal.models.format_control import FormatControl
  28 +
  29 +logger = logging.getLogger(__name__)
  30 +
  31 +
  32 +def _hash_dict(d):
  33 + # type: (Dict[str, str]) -> str
  34 + """Return a stable sha224 of a dictionary."""
  35 + s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True)
  36 + return hashlib.sha224(s.encode("ascii")).hexdigest()
  37 +
  38 +
  39 +class Cache(object):
  40 + """An abstract class - provides cache directories for data from links
  41 +
  42 +
  43 + :param cache_dir: The root of the cache.
  44 + :param format_control: An object of FormatControl class to limit
  45 + binaries being read from the cache.
  46 + :param allowed_formats: which formats of files the cache should store.
  47 + ('binary' and 'source' are the only allowed values)
  48 + """
  49 +
  50 + def __init__(self, cache_dir, format_control, allowed_formats):
  51 + # type: (str, FormatControl, Set[str]) -> None
  52 + super(Cache, self).__init__()
  53 + assert not cache_dir or os.path.isabs(cache_dir)
  54 + self.cache_dir = cache_dir or None
  55 + self.format_control = format_control
  56 + self.allowed_formats = allowed_formats
  57 +
  58 + _valid_formats = {"source", "binary"}
  59 + assert self.allowed_formats.union(_valid_formats) == _valid_formats
  60 +
  61 + def _get_cache_path_parts_legacy(self, link):
  62 + # type: (Link) -> List[str]
  63 + """Get parts of part that must be os.path.joined with cache_dir
  64 +
  65 + Legacy cache key (pip < 20) for compatibility with older caches.
  66 + """
  67 +
  68 + # We want to generate an url to use as our cache key, we don't want to
  69 + # just re-use the URL because it might have other items in the fragment
  70 + # and we don't care about those.
  71 + key_parts = [link.url_without_fragment]
  72 + if link.hash_name is not None and link.hash is not None:
  73 + key_parts.append("=".join([link.hash_name, link.hash]))
  74 + key_url = "#".join(key_parts)
  75 +
  76 + # Encode our key url with sha224, we'll use this because it has similar
  77 + # security properties to sha256, but with a shorter total output (and
  78 + # thus less secure). However the differences don't make a lot of
  79 + # difference for our use case here.
  80 + hashed = hashlib.sha224(key_url.encode()).hexdigest()
  81 +
  82 + # We want to nest the directories some to prevent having a ton of top
  83 + # level directories where we might run out of sub directories on some
  84 + # FS.
  85 + parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
  86 +
  87 + return parts
  88 +
  89 + def _get_cache_path_parts(self, link):
  90 + # type: (Link) -> List[str]
  91 + """Get parts of part that must be os.path.joined with cache_dir
  92 + """
  93 +
  94 + # We want to generate an url to use as our cache key, we don't want to
  95 + # just re-use the URL because it might have other items in the fragment
  96 + # and we don't care about those.
  97 + key_parts = {"url": link.url_without_fragment}
  98 + if link.hash_name is not None and link.hash is not None:
  99 + key_parts[link.hash_name] = link.hash
  100 + if link.subdirectory_fragment:
  101 + key_parts["subdirectory"] = link.subdirectory_fragment
  102 +
  103 + # Include interpreter name, major and minor version in cache key
  104 + # to cope with ill-behaved sdists that build a different wheel
  105 + # depending on the python version their setup.py is being run on,
  106 + # and don't encode the difference in compatibility tags.
  107 + # https://github.com/pypa/pip/issues/7296
  108 + key_parts["interpreter_name"] = interpreter_name()
  109 + key_parts["interpreter_version"] = interpreter_version()
  110 +
  111 + # Encode our key url with sha224, we'll use this because it has similar
  112 + # security properties to sha256, but with a shorter total output (and
  113 + # thus less secure). However the differences don't make a lot of
  114 + # difference for our use case here.
  115 + hashed = _hash_dict(key_parts)
  116 +
  117 + # We want to nest the directories some to prevent having a ton of top
  118 + # level directories where we might run out of sub directories on some
  119 + # FS.
  120 + parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
  121 +
  122 + return parts
  123 +
  124 + def _get_candidates(self, link, canonical_package_name):
  125 + # type: (Link, Optional[str]) -> List[Any]
  126 + can_not_cache = (
  127 + not self.cache_dir or
  128 + not canonical_package_name or
  129 + not link
  130 + )
  131 + if can_not_cache:
  132 + return []
  133 +
  134 + formats = self.format_control.get_allowed_formats(
  135 + canonical_package_name
  136 + )
  137 + if not self.allowed_formats.intersection(formats):
  138 + return []
  139 +
  140 + candidates = []
  141 + path = self.get_path_for_link(link)
  142 + if os.path.isdir(path):
  143 + for candidate in os.listdir(path):
  144 + candidates.append((candidate, path))
  145 + # TODO remove legacy path lookup in pip>=21
  146 + legacy_path = self.get_path_for_link_legacy(link)
  147 + if os.path.isdir(legacy_path):
  148 + for candidate in os.listdir(legacy_path):
  149 + candidates.append((candidate, legacy_path))
  150 + return candidates
  151 +
  152 + def get_path_for_link_legacy(self, link):
  153 + # type: (Link) -> str
  154 + raise NotImplementedError()
  155 +
  156 + def get_path_for_link(self, link):
  157 + # type: (Link) -> str
  158 + """Return a directory to store cached items in for link.
  159 + """
  160 + raise NotImplementedError()
  161 +
  162 + def get(
  163 + self,
  164 + link, # type: Link
  165 + package_name, # type: Optional[str]
  166 + supported_tags, # type: List[Tag]
  167 + ):
  168 + # type: (...) -> Link
  169 + """Returns a link to a cached item if it exists, otherwise returns the
  170 + passed link.
  171 + """
  172 + raise NotImplementedError()
  173 +
  174 + def cleanup(self):
  175 + # type: () -> None
  176 + pass
  177 +
  178 +
  179 +class SimpleWheelCache(Cache):
  180 + """A cache of wheels for future installs.
  181 + """
  182 +
  183 + def __init__(self, cache_dir, format_control):
  184 + # type: (str, FormatControl) -> None
  185 + super(SimpleWheelCache, self).__init__(
  186 + cache_dir, format_control, {"binary"}
  187 + )
  188 +
  189 + def get_path_for_link_legacy(self, link):
  190 + # type: (Link) -> str
  191 + parts = self._get_cache_path_parts_legacy(link)
  192 + return os.path.join(self.cache_dir, "wheels", *parts)
  193 +
  194 + def get_path_for_link(self, link):
  195 + # type: (Link) -> str
  196 + """Return a directory to store cached wheels for link
  197 +
  198 + Because there are M wheels for any one sdist, we provide a directory
  199 + to cache them in, and then consult that directory when looking up
  200 + cache hits.
  201 +
  202 + We only insert things into the cache if they have plausible version
  203 + numbers, so that we don't contaminate the cache with things that were
  204 + not unique. E.g. ./package might have dozens of installs done for it
  205 + and build a version of 0.0...and if we built and cached a wheel, we'd
  206 + end up using the same wheel even if the source has been edited.
  207 +
  208 + :param link: The link of the sdist for which this will cache wheels.
  209 + """
  210 + parts = self._get_cache_path_parts(link)
  211 +
  212 + # Store wheels within the root cache_dir
  213 + return os.path.join(self.cache_dir, "wheels", *parts)
  214 +
  215 + def get(
  216 + self,
  217 + link, # type: Link
  218 + package_name, # type: Optional[str]
  219 + supported_tags, # type: List[Tag]
  220 + ):
  221 + # type: (...) -> Link
  222 + candidates = []
  223 +
  224 + if not package_name:
  225 + return link
  226 +
  227 + canonical_package_name = canonicalize_name(package_name)
  228 + for wheel_name, wheel_dir in self._get_candidates(
  229 + link, canonical_package_name
  230 + ):
  231 + try:
  232 + wheel = Wheel(wheel_name)
  233 + except InvalidWheelFilename:
  234 + continue
  235 + if canonicalize_name(wheel.name) != canonical_package_name:
  236 + logger.debug(
  237 + "Ignoring cached wheel {} for {} as it "
  238 + "does not match the expected distribution name {}.".format(
  239 + wheel_name, link, package_name
  240 + )
  241 + )
  242 + continue
  243 + if not wheel.supported(supported_tags):
  244 + # Built for a different python/arch/etc
  245 + continue
  246 + candidates.append(
  247 + (
  248 + wheel.support_index_min(supported_tags),
  249 + wheel_name,
  250 + wheel_dir,
  251 + )
  252 + )
  253 +
  254 + if not candidates:
  255 + return link
  256 +
  257 + _, wheel_name, wheel_dir = min(candidates)
  258 + return Link(path_to_url(os.path.join(wheel_dir, wheel_name)))
  259 +
  260 +
  261 +class EphemWheelCache(SimpleWheelCache):
  262 + """A SimpleWheelCache that creates it's own temporary cache directory
  263 + """
  264 +
  265 + def __init__(self, format_control):
  266 + # type: (FormatControl) -> None
  267 + self._temp_dir = TempDirectory(kind="ephem-wheel-cache")
  268 +
  269 + super(EphemWheelCache, self).__init__(
  270 + self._temp_dir.path, format_control
  271 + )
  272 +
  273 + def cleanup(self):
  274 + # type: () -> None
  275 + self._temp_dir.cleanup()
  276 +
  277 +
  278 +class WheelCache(Cache):
  279 + """Wraps EphemWheelCache and SimpleWheelCache into a single Cache
  280 +
  281 + This Cache allows for gracefully degradation, using the ephem wheel cache
  282 + when a certain link is not found in the simple wheel cache first.
  283 + """
  284 +
  285 + def __init__(self, cache_dir, format_control):
  286 + # type: (str, FormatControl) -> None
  287 + super(WheelCache, self).__init__(
  288 + cache_dir, format_control, {'binary'}
  289 + )
  290 + self._wheel_cache = SimpleWheelCache(cache_dir, format_control)
  291 + self._ephem_cache = EphemWheelCache(format_control)
  292 +
  293 + def get_path_for_link_legacy(self, link):
  294 + # type: (Link) -> str
  295 + return self._wheel_cache.get_path_for_link_legacy(link)
  296 +
  297 + def get_path_for_link(self, link):
  298 + # type: (Link) -> str
  299 + return self._wheel_cache.get_path_for_link(link)
  300 +
  301 + def get_ephem_path_for_link(self, link):
  302 + # type: (Link) -> str
  303 + return self._ephem_cache.get_path_for_link(link)
  304 +
  305 + def get(
  306 + self,
  307 + link, # type: Link
  308 + package_name, # type: Optional[str]
  309 + supported_tags, # type: List[Tag]
  310 + ):
  311 + # type: (...) -> Link
  312 + retval = self._wheel_cache.get(
  313 + link=link,
  314 + package_name=package_name,
  315 + supported_tags=supported_tags,
  316 + )
  317 + if retval is not link:
  318 + return retval
  319 +
  320 + return self._ephem_cache.get(
  321 + link=link,
  322 + package_name=package_name,
  323 + supported_tags=supported_tags,
  324 + )
  325 +
  326 + def cleanup(self):
  327 + # type: () -> None
  328 + self._wheel_cache.cleanup()
  329 + self._ephem_cache.cleanup()
... ...
  1 +"""Subpackage containing all of pip's command line interface related code
  2 +"""
  3 +
  4 +# This file intentionally does not import submodules
... ...
  1 +"""Logic that powers autocompletion installed by ``pip completion``.
  2 +"""
  3 +
  4 +import optparse
  5 +import os
  6 +import sys
  7 +from itertools import chain
  8 +
  9 +from pip._internal.cli.main_parser import create_main_parser
  10 +from pip._internal.commands import commands_dict, create_command
  11 +from pip._internal.utils.misc import get_installed_distributions
  12 +from pip._internal.utils.typing import MYPY_CHECK_RUNNING
  13 +
  14 +if MYPY_CHECK_RUNNING:
  15 + from typing import Any, Iterable, List, Optional
  16 +
  17 +
  18 +def autocomplete():
  19 + # type: () -> None
  20 + """Entry Point for completion of main and subcommand options.
  21 + """
  22 + # Don't complete if user hasn't sourced bash_completion file.
  23 + if 'PIP_AUTO_COMPLETE' not in os.environ:
  24 + return
  25 + cwords = os.environ['COMP_WORDS'].split()[1:]
  26 + cword = int(os.environ['COMP_CWORD'])
  27 + try:
  28 + current = cwords[cword - 1]
  29 + except IndexError:
  30 + current = ''
  31 +
  32 + parser = create_main_parser()
  33 + subcommands = list(commands_dict)
  34 + options = []
  35 +
  36 + # subcommand
  37 + subcommand_name = None # type: Optional[str]
  38 + for word in cwords:
  39 + if word in subcommands:
  40 + subcommand_name = word
  41 + break
  42 + # subcommand options
  43 + if subcommand_name is not None:
  44 + # special case: 'help' subcommand has no options
  45 + if subcommand_name == 'help':
  46 + sys.exit(1)
  47 + # special case: list locally installed dists for show and uninstall
  48 + should_list_installed = (
  49 + subcommand_name in ['show', 'uninstall'] and
  50 + not current.startswith('-')
  51 + )
  52 + if should_list_installed:
  53 + installed = []
  54 + lc = current.lower()
  55 + for dist in get_installed_distributions(local_only=True):
  56 + if dist.key.startswith(lc) and dist.key not in cwords[1:]:
  57 + installed.append(dist.key)
  58 + # if there are no dists installed, fall back to option completion
  59 + if installed:
  60 + for dist in installed:
  61 + print(dist)
  62 + sys.exit(1)
  63 +
  64 + subcommand = create_command(subcommand_name)
  65 +
  66 + for opt in subcommand.parser.option_list_all:
  67 + if opt.help != optparse.SUPPRESS_HELP:
  68 + for opt_str in opt._long_opts + opt._short_opts:
  69 + options.append((opt_str, opt.nargs))
  70 +
  71 + # filter out previously specified options from available options
  72 + prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]
  73 + options = [(x, v) for (x, v) in options if x not in prev_opts]
  74 + # filter options by current input
  75 + options = [(k, v) for k, v in options if k.startswith(current)]
  76 + # get completion type given cwords and available subcommand options
  77 + completion_type = get_path_completion_type(
  78 + cwords, cword, subcommand.parser.option_list_all,
  79 + )
  80 + # get completion files and directories if ``completion_type`` is
  81 + # ``<file>``, ``<dir>`` or ``<path>``
  82 + if completion_type:
  83 + paths = auto_complete_paths(current, completion_type)
  84 + options = [(path, 0) for path in paths]
  85 + for option in options:
  86 + opt_label = option[0]
  87 + # append '=' to options which require args
  88 + if option[1] and option[0][:2] == "--":
  89 + opt_label += '='
  90 + print(opt_label)
  91 + else:
  92 + # show main parser options only when necessary
  93 +
  94 + opts = [i.option_list for i in parser.option_groups]
  95 + opts.append(parser.option_list)
  96 + flattened_opts = chain.from_iterable(opts)
  97 + if current.startswith('-'):
  98 + for opt in flattened_opts:
  99 + if opt.help != optparse.SUPPRESS_HELP:
  100 + subcommands += opt._long_opts + opt._short_opts
  101 + else:
  102 + # get completion type given cwords and all available options
  103 + completion_type = get_path_completion_type(cwords, cword,
  104 + flattened_opts)
  105 + if completion_type:
  106 + subcommands = list(auto_complete_paths(current,
  107 + completion_type))
  108 +
  109 + print(' '.join([x for x in subcommands if x.startswith(current)]))
  110 + sys.exit(1)
  111 +
  112 +
  113 +def get_path_completion_type(cwords, cword, opts):
  114 + # type: (List[str], int, Iterable[Any]) -> Optional[str]
  115 + """Get the type of path completion (``file``, ``dir``, ``path`` or None)
  116 +
  117 + :param cwords: same as the environmental variable ``COMP_WORDS``
  118 + :param cword: same as the environmental variable ``COMP_CWORD``
  119 + :param opts: The available options to check
  120 + :return: path completion type (``file``, ``dir``, ``path`` or None)
  121 + """
  122 + if cword < 2 or not cwords[cword - 2].startswith('-'):
  123 + return None
  124 + for opt in opts:
  125 + if opt.help == optparse.SUPPRESS_HELP:
  126 + continue
  127 + for o in str(opt).split('/'):
  128 + if cwords[cword - 2].split('=')[0] == o:
  129 + if not opt.metavar or any(
  130 + x in ('path', 'file', 'dir')
  131 + for x in opt.metavar.split('/')):
  132 + return opt.metavar
  133 + return None
  134 +
  135 +
  136 +def auto_complete_paths(current, completion_type):
  137 + # type: (str, str) -> Iterable[str]
  138 + """If ``completion_type`` is ``file`` or ``path``, list all regular files
  139 + and directories starting with ``current``; otherwise only list directories
  140 + starting with ``current``.
  141 +
  142 + :param current: The word to be completed
  143 + :param completion_type: path completion type(`file`, `path` or `dir`)i
  144 + :return: A generator of regular files and/or directories
  145 + """
  146 + directory, filename = os.path.split(current)
  147 + current_path = os.path.abspath(directory)
  148 + # Don't complete paths if they can't be accessed
  149 + if not os.access(current_path, os.R_OK):
  150 + return
  151 + filename = os.path.normcase(filename)
  152 + # list all files that start with ``filename``
  153 + file_list = (x for x in os.listdir(current_path)
  154 + if os.path.normcase(x).startswith(filename))
  155 + for f in file_list:
  156 + opt = os.path.join(current_path, f)
  157 + comp_file = os.path.normcase(os.path.join(directory, f))
  158 + # complete regular files when there is not ``<dir>`` after option
  159 + # complete directories when there is ``<file>``, ``<path>`` or
  160 + # ``<dir>``after option
  161 + if completion_type != 'dir' and os.path.isfile(opt):
  162 + yield comp_file
  163 + elif os.path.isdir(opt):
  164 + yield os.path.join(comp_file, '')
... ...
  1 +"""Base Command class, and related routines"""
  2 +
  3 +from __future__ import absolute_import, print_function
  4 +
  5 +import logging
  6 +import logging.config
  7 +import optparse
  8 +import os
  9 +import platform
  10 +import sys
  11 +import traceback
  12 +
  13 +from pip._internal.cli import cmdoptions
  14 +from pip._internal.cli.command_context import CommandContextMixIn
  15 +from pip._internal.cli.parser import (
  16 + ConfigOptionParser,
  17 + UpdatingDefaultsHelpFormatter,
  18 +)
  19 +from pip._internal.cli.status_codes import (
  20 + ERROR,
  21 + PREVIOUS_BUILD_DIR_ERROR,
  22 + SUCCESS,
  23 + UNKNOWN_ERROR,
  24 + VIRTUALENV_NOT_FOUND,
  25 +)
  26 +from pip._internal.exceptions import (
  27 + BadCommand,
  28 + CommandError,
  29 + InstallationError,
  30 + PreviousBuildDirError,
  31 + UninstallationError,
  32 +)
  33 +from pip._internal.utils.deprecation import deprecated
  34 +from pip._internal.utils.filesystem import check_path_owner
  35 +from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
  36 +from pip._internal.utils.misc import get_prog, normalize_path
  37 +from pip._internal.utils.temp_dir import global_tempdir_manager
  38 +from pip._internal.utils.typing import MYPY_CHECK_RUNNING
  39 +from pip._internal.utils.virtualenv import running_under_virtualenv
  40 +
  41 +if MYPY_CHECK_RUNNING:
  42 + from typing import List, Tuple, Any
  43 + from optparse import Values
  44 +
  45 +__all__ = ['Command']
  46 +
  47 +logger = logging.getLogger(__name__)
  48 +
  49 +
  50 +class Command(CommandContextMixIn):
  51 + usage = None # type: str
  52 + ignore_require_venv = False # type: bool
  53 +
  54 + def __init__(self, name, summary, isolated=False):
  55 + # type: (str, str, bool) -> None
  56 + super(Command, self).__init__()
  57 + parser_kw = {
  58 + 'usage': self.usage,
  59 + 'prog': '%s %s' % (get_prog(), name),
  60 + 'formatter': UpdatingDefaultsHelpFormatter(),
  61 + 'add_help_option': False,
  62 + 'name': name,
  63 + 'description': self.__doc__,
  64 + 'isolated': isolated,
  65 + }
  66 +
  67 + self.name = name
  68 + self.summary = summary
  69 + self.parser = ConfigOptionParser(**parser_kw)
  70 +
  71 + # Commands should add options to this option group
  72 + optgroup_name = '%s Options' % self.name.capitalize()
  73 + self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
  74 +
  75 + # Add the general options
  76 + gen_opts = cmdoptions.make_option_group(
  77 + cmdoptions.general_group,
  78 + self.parser,
  79 + )
  80 + self.parser.add_option_group(gen_opts)
  81 +
  82 + def handle_pip_version_check(self, options):
  83 + # type: (Values) -> None
  84 + """
  85 + This is a no-op so that commands by default do not do the pip version
  86 + check.
  87 + """
  88 + # Make sure we do the pip version check if the index_group options
  89 + # are present.
  90 + assert not hasattr(options, 'no_index')
  91 +
  92 + def run(self, options, args):
  93 + # type: (Values, List[Any]) -> Any
  94 + raise NotImplementedError
  95 +
  96 + def parse_args(self, args):
  97 + # type: (List[str]) -> Tuple[Any, Any]
  98 + # factored out for testability
  99 + return self.parser.parse_args(args)
  100 +
  101 + def main(self, args):
  102 + # type: (List[str]) -> int
  103 + try:
  104 + with self.main_context():
  105 + return self._main(args)
  106 + finally:
  107 + logging.shutdown()
  108 +
  109 + def _main(self, args):
  110 + # type: (List[str]) -> int
  111 + # Intentionally set as early as possible so globally-managed temporary
  112 + # directories are available to the rest of the code.
  113 + self.enter_context(global_tempdir_manager())
  114 +
  115 + options, args = self.parse_args(args)
  116 +
  117 + # Set verbosity so that it can be used elsewhere.
  118 + self.verbosity = options.verbose - options.quiet
  119 +
  120 + level_number = setup_logging(
  121 + verbosity=self.verbosity,
  122 + no_color=options.no_color,
  123 + user_log_file=options.log,
  124 + )
  125 +
  126 + if (
  127 + sys.version_info[:2] == (2, 7) and
  128 + not options.no_python_version_warning
  129 + ):
  130 + message = (
  131 + "A future version of pip will drop support for Python 2.7. "
  132 + "More details about Python 2 support in pip, can be found at "
  133 + "https://pip.pypa.io/en/latest/development/release-process/#python-2-support" # noqa
  134 + )
  135 + if platform.python_implementation() == "CPython":
  136 + message = (
  137 + "Python 2.7 reached the end of its life on January "
  138 + "1st, 2020. Please upgrade your Python as Python 2.7 "
  139 + "is no longer maintained. "
  140 + ) + message
  141 + deprecated(message, replacement=None, gone_in=None)
  142 +
  143 + if options.skip_requirements_regex:
  144 + deprecated(
  145 + "--skip-requirements-regex is unsupported and will be removed",
  146 + replacement=(
  147 + "manage requirements/constraints files explicitly, "
  148 + "possibly generating them from metadata"
  149 + ),
  150 + gone_in="20.1",
  151 + issue=7297,
  152 + )
  153 +
  154 + # TODO: Try to get these passing down from the command?
  155 + # without resorting to os.environ to hold these.
  156 + # This also affects isolated builds and it should.
  157 +
  158 + if options.no_input:
  159 + os.environ['PIP_NO_INPUT'] = '1'
  160 +
  161 + if options.exists_action:
  162 + os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action)
  163 +
  164 + if options.require_venv and not self.ignore_require_venv:
  165 + # If a venv is required check if it can really be found
  166 + if not running_under_virtualenv():
  167 + logger.critical(
  168 + 'Could not find an activated virtualenv (required).'
  169 + )
  170 + sys.exit(VIRTUALENV_NOT_FOUND)
  171 +
  172 + if options.cache_dir:
  173 + options.cache_dir = normalize_path(options.cache_dir)
  174 + if not check_path_owner(options.cache_dir):
  175 + logger.warning(
  176 + "The directory '%s' or its parent directory is not owned "
  177 + "or is not writable by the current user. The cache "
  178 + "has been disabled. Check the permissions and owner of "
  179 + "that directory. If executing pip with sudo, you may want "
  180 + "sudo's -H flag.",
  181 + options.cache_dir,
  182 + )
  183 + options.cache_dir = None
  184 +
  185 + try:
  186 + status = self.run(options, args)
  187 + # FIXME: all commands should return an exit status
  188 + # and when it is done, isinstance is not needed anymore
  189 + if isinstance(status, int):
  190 + return status
  191 + except PreviousBuildDirError as exc:
  192 + logger.critical(str(exc))
  193 + logger.debug('Exception information:', exc_info=True)
  194 +
  195 + return PREVIOUS_BUILD_DIR_ERROR
  196 + except (InstallationError, UninstallationError, BadCommand) as exc:
  197 + logger.critical(str(exc))
  198 + logger.debug('Exception information:', exc_info=True)
  199 +
  200 + return ERROR
  201 + except CommandError as exc:
  202 + logger.critical('%s', exc)
  203 + logger.debug('Exception information:', exc_info=True)
  204 +
  205 + return ERROR
  206 + except BrokenStdoutLoggingError:
  207 + # Bypass our logger and write any remaining messages to stderr
  208 + # because stdout no longer works.
  209 + print('ERROR: Pipe to stdout was broken', file=sys.stderr)
  210 + if level_number <= logging.DEBUG:
  211 + traceback.print_exc(file=sys.stderr)
  212 +
  213 + return ERROR
  214 + except KeyboardInterrupt:
  215 + logger.critical('Operation cancelled by user')
  216 + logger.debug('Exception information:', exc_info=True)
  217 +
  218 + return ERROR
  219 + except BaseException:
  220 + logger.critical('Exception:', exc_info=True)
  221 +
  222 + return UNKNOWN_ERROR
  223 + finally:
  224 + self.handle_pip_version_check(options)
  225 +
  226 + return SUCCESS
... ...
  1 +"""
  2 +shared options and groups
  3 +
  4 +The principle here is to define options once, but *not* instantiate them
  5 +globally. One reason being that options with action='append' can carry state
  6 +between parses. pip parses general options twice internally, and shouldn't
  7 +pass on state. To be consistent, all options will follow this design.
  8 +"""
  9 +
  10 +# The following comment should be removed at some point in the future.
  11 +# mypy: strict-optional=False
  12 +
  13 +from __future__ import absolute_import
  14 +
  15 +import logging
  16 +import os
  17 +import textwrap
  18 +import warnings
  19 +from distutils.util import strtobool
  20 +from functools import partial
  21 +from optparse import SUPPRESS_HELP, Option, OptionGroup
  22 +from textwrap import dedent
  23 +
  24 +from pip._internal.exceptions import CommandError
  25 +from pip._internal.locations import USER_CACHE_DIR, get_src_prefix
  26 +from pip._internal.models.format_control import FormatControl
  27 +from pip._internal.models.index import PyPI
  28 +from pip._internal.models.target_python import TargetPython
  29 +from pip._internal.utils.hashes import STRONG_HASHES
  30 +from pip._internal.utils.typing import MYPY_CHECK_RUNNING
  31 +from pip._internal.utils.ui import BAR_TYPES
  32 +
  33 +if MYPY_CHECK_RUNNING:
  34 + from typing import Any, Callable, Dict, Optional, Tuple
  35 + from optparse import OptionParser, Values
  36 + from pip._internal.cli.parser import ConfigOptionParser
  37 +
  38 +logger = logging.getLogger(__name__)
  39 +
  40 +
  41 +def raise_option_error(parser, option, msg):
  42 + # type: (OptionParser, Option, str) -> None
  43 + """
  44 + Raise an option parsing error using parser.error().
  45 +
  46 + Args:
  47 + parser: an OptionParser instance.
  48 + option: an Option instance.
  49 + msg: the error text.
  50 + """
  51 + msg = '{} error: {}'.format(option, msg)
  52 + msg = textwrap.fill(' '.join(msg.split()))
  53 + parser.error(msg)
  54 +
  55 +
  56 +def make_option_group(group, parser):
  57 + # type: (Dict[str, Any], ConfigOptionParser) -> OptionGroup
  58 + """
  59 + Return an OptionGroup object
  60 + group -- assumed to be dict with 'name' and 'options' keys
  61 + parser -- an optparse Parser
  62 + """
  63 + option_group = OptionGroup(parser, group['name'])
  64 + for option in group['options']:
  65 + option_group.add_option(option())
  66 + return option_group
  67 +
  68 +
  69 +def check_install_build_global(options, check_options=None):
  70 + # type: (Values, Optional[Values]) -> None
  71 + """Disable wheels if per-setup.py call options are set.
  72 +
  73 + :param options: The OptionParser options to update.
  74 + :param check_options: The options to check, if not supplied defaults to
  75 + options.
  76 + """
  77 + if check_options is None:
  78 + check_options = options
  79 +
  80 + def getname(n):
  81 + # type: (str) -> Optional[Any]
  82 + return getattr(check_options, n, None)
  83 + names = ["build_options", "global_options", "install_options"]
  84 + if any(map(getname, names)):
  85 + control = options.format_control
  86 + control.disallow_binaries()
  87 + warnings.warn(
  88 + 'Disabling all use of wheels due to the use of --build-option '
  89 + '/ --global-option / --install-option.', stacklevel=2,
  90 + )
  91 +
  92 +
  93 +def check_dist_restriction(options, check_target=False):
  94 + # type: (Values, bool) -> None
  95 + """Function for determining if custom platform options are allowed.
  96 +
  97 + :param options: The OptionParser options.
  98 + :param check_target: Whether or not to check if --target is being used.
  99 + """
  100 + dist_restriction_set = any([
  101 + options.python_version,
  102 + options.platform,
  103 + options.abi,
  104 + options.implementation,
  105 + ])
  106 +
  107 + binary_only = FormatControl(set(), {':all:'})
  108 + sdist_dependencies_allowed = (
  109 + options.format_control != binary_only and
  110 + not options.ignore_dependencies
  111 + )
  112 +
  113 + # Installations or downloads using dist restrictions must not combine
  114 + # source distributions and dist-specific wheels, as they are not
  115 + # guaranteed to be locally compatible.
  116 + if dist_restriction_set and sdist_dependencies_allowed:
  117 + raise CommandError(
  118 + "When restricting platform and interpreter constraints using "
  119 + "--python-version, --platform, --abi, or --implementation, "
  120 + "either --no-deps must be set, or --only-binary=:all: must be "
  121 + "set and --no-binary must not be set (or must be set to "
  122 + ":none:)."
  123 + )
  124 +
  125 + if check_target:
  126 + if dist_restriction_set and not options.target_dir:
  127 + raise CommandError(
  128 + "Can not use any platform or abi specific options unless "
  129 + "installing via '--target'"
  130 + )
  131 +
  132 +
  133 +def _path_option_check(option, opt, value):
  134 + # type: (Option, str, str) -> str
  135 + return os.path.expanduser(value)
  136 +
  137 +
  138 +class PipOption(Option):
  139 + TYPES = Option.TYPES + ("path",)
  140 + TYPE_CHECKER = Option.TYPE_CHECKER.copy()
  141 + TYPE_CHECKER["path"] = _path_option_check
  142 +
  143 +
  144 +###########
  145 +# options #
  146 +###########
  147 +
  148 +help_ = partial(
  149 + Option,
  150 + '-h', '--help',
  151 + dest='help',
  152 + action='help',
  153 + help='Show help.',
  154 +) # type: Callable[..., Option]
  155 +
  156 +isolated_mode = partial(
  157 + Option,
  158 + "--isolated",
  159 + dest="isolated_mode",
  160 + action="store_true",
  161 + default=False,
  162 + help=(
  163 + "Run pip in an isolated mode, ignoring environment variables and user "
  164 + "configuration."
  165 + ),
  166 +) # type: Callable[..., Option]
  167 +
  168 +require_virtualenv = partial(
  169 + Option,
  170 + # Run only if inside a virtualenv, bail if not.
  171 + '--require-virtualenv', '--require-venv',
  172 + dest='require_venv',
  173 + action='store_true',
  174 + default=False,
  175 + help=SUPPRESS_HELP
  176 +) # type: Callable[..., Option]
  177 +
  178 +verbose = partial(
  179 + Option,
  180 + '-v', '--verbose',
  181 + dest='verbose',
  182 + action='count',
  183 + default=0,
  184 + help='Give more output. Option is additive, and can be used up to 3 times.'
  185 +) # type: Callable[..., Option]
  186 +
  187 +no_color = partial(
  188 + Option,
  189 + '--no-color',
  190 + dest='no_color',
  191 + action='store_true',
  192 + default=False,
  193 + help="Suppress colored output",
  194 +) # type: Callable[..., Option]
  195 +
  196 +version = partial(
  197 + Option,
  198 + '-V', '--version',
  199 + dest='version',
  200 + action='store_true',
  201 + help='Show version and exit.',
  202 +) # type: Callable[..., Option]
  203 +
  204 +quiet = partial(
  205 + Option,
  206 + '-q', '--quiet',
  207 + dest='quiet',
  208 + action='count',
  209 + default=0,
  210 + help=(
  211 + 'Give less output. Option is additive, and can be used up to 3'
  212 + ' times (corresponding to WARNING, ERROR, and CRITICAL logging'
  213 + ' levels).'
  214 + ),
  215 +) # type: Callable[..., Option]
  216 +
  217 +progress_bar = partial(
  218 + Option,
  219 + '--progress-bar',
  220 + dest='progress_bar',
  221 + type='choice',
  222 + choices=list(BAR_TYPES.keys()),
  223 + default='on',
  224 + help=(
  225 + 'Specify type of progress to be displayed [' +
  226 + '|'.join(BAR_TYPES.keys()) + '] (default: %default)'
  227 + ),
  228 +) # type: Callable[..., Option]
  229 +
  230 +log = partial(
  231 + PipOption,
  232 + "--log", "--log-file", "--local-log",
  233 + dest="log",
  234 + metavar="path",
  235 + type="path",
  236 + help="Path to a verbose appending log."
  237 +) # type: Callable[..., Option]
  238 +
  239 +no_input = partial(
  240 + Option,
  241 + # Don't ask for input
  242 + '--no-input',
  243 + dest='no_input',
  244 + action='store_true',
  245 + default=False,
  246 + help=SUPPRESS_HELP
  247 +) # type: Callable[..., Option]
  248 +
  249 +proxy = partial(
  250 + Option,
  251 + '--proxy',
  252 + dest='proxy',
  253 + type='str',
  254 + default='',
  255 + help="Specify a proxy in the form [user:passwd@]proxy.server:port."
  256 +) # type: Callable[..., Option]
  257 +
  258 +retries = partial(
  259 + Option,
  260 + '--retries',
  261 + dest='retries',
  262 + type='int',
  263 + default=5,
  264 + help="Maximum number of retries each connection should attempt "
  265 + "(default %default times).",
  266 +) # type: Callable[..., Option]
  267 +
  268 +timeout = partial(
  269 + Option,
  270 + '--timeout', '--default-timeout',
  271 + metavar='sec',
  272 + dest='timeout',
  273 + type='float',
  274 + default=15,
  275 + help='Set the socket timeout (default %default seconds).',
  276 +) # type: Callable[..., Option]
  277 +
  278 +skip_requirements_regex = partial(
  279 + Option,
  280 + # A regex to be used to skip requirements
  281 + '--skip-requirements-regex',
  282 + dest='skip_requirements_regex',
  283 + type='str',
  284 + default='',
  285 + help=SUPPRESS_HELP,
  286 +) # type: Callable[..., Option]
  287 +
  288 +
  289 +def exists_action():
  290 + # type: () -> Option
  291 + return Option(
  292 + # Option when path already exist
  293 + '--exists-action',
  294 + dest='exists_action',
  295 + type='choice',
  296 + choices=['s', 'i', 'w', 'b', 'a'],
  297 + default=[],
  298 + action='append',
  299 + metavar='action',
  300 + help="Default action when a path already exists: "
  301 + "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.",
  302 + )
  303 +
  304 +
  305 +cert = partial(
  306 + PipOption,
  307 + '--cert',
  308 + dest='cert',
  309 + type='path',
  310 + metavar='path',
  311 + help="Path to alternate CA bundle.",
  312 +) # type: Callable[..., Option]
  313 +
  314 +client_cert = partial(
  315 + PipOption,
  316 + '--client-cert',
  317 + dest='client_cert',
  318 + type='path',
  319 + default=None,
  320 + metavar='path',
  321 + help="Path to SSL client certificate, a single file containing the "
  322 + "private key and the certificate in PEM format.",
  323 +) # type: Callable[..., Option]
  324 +
  325 +index_url = partial(
  326 + Option,
  327 + '-i', '--index-url', '--pypi-url',
  328 + dest='index_url',
  329 + metavar='URL',
  330 + default=PyPI.simple_url,
  331 + help="Base URL of the Python Package Index (default %default). "
  332 + "This should point to a repository compliant with PEP 503 "
  333 + "(the simple repository API) or a local directory laid out "
  334 + "in the same format.",
  335 +) # type: Callable[..., Option]
  336 +
  337 +
  338 +def extra_index_url():
  339 + # type: () -> Option
  340 + return Option(
  341 + '--extra-index-url',
  342 + dest='extra_index_urls',
  343 + metavar='URL',
  344 + action='append',
  345 + default=[],
  346 + help="Extra URLs of package indexes to use in addition to "
  347 + "--index-url. Should follow the same rules as "
  348 + "--index-url.",
  349 + )
  350 +
  351 +
  352 +no_index = partial(
  353 + Option,
  354 + '--no-index',
  355 + dest='no_index',
  356 + action='store_true',
  357 + default=False,
  358 + help='Ignore package index (only looking at --find-links URLs instead).',
  359 +) # type: Callable[..., Option]
  360 +
  361 +
  362 +def find_links():
  363 + # type: () -> Option
  364 + return Option(
  365 + '-f', '--find-links',
  366 + dest='find_links',
  367 + action='append',
  368 + default=[],
  369 + metavar='url',
  370 + help="If a url or path to an html file, then parse for links to "
  371 + "archives. If a local path or file:// url that's a directory, "
  372 + "then look for archives in the directory listing.",
  373 + )
  374 +
  375 +
  376 +def trusted_host():
  377 + # type: () -> Option
  378 + return Option(
  379 + "--trusted-host",
  380 + dest="trusted_hosts",
  381 + action="append",
  382 + metavar="HOSTNAME",
  383 + default=[],
  384 + help="Mark this host or host:port pair as trusted, even though it "
  385 + "does not have valid or any HTTPS.",
  386 + )
  387 +
  388 +
  389 +def constraints():
  390 + # type: () -> Option
  391 + return Option(
  392 + '-c', '--constraint',
  393 + dest='constraints',
  394 + action='append',
  395 + default=[],
  396 + metavar='file',
  397 + help='Constrain versions using the given constraints file. '
  398 + 'This option can be used multiple times.'
  399 + )
  400 +
  401 +
  402 +def requirements():
  403 + # type: () -> Option
  404 + return Option(
  405 + '-r', '--requirement',
  406 + dest='requirements',
  407 + action='append',
  408 + default=[],
  409 + metavar='file',
  410 + help='Install from the given requirements file. '
  411 + 'This option can be used multiple times.'
  412 + )
  413 +
  414 +
  415 +def editable():
  416 + # type: () -> Option
  417 + return Option(
  418 + '-e', '--editable',
  419 + dest='editables',
  420 + action='append',
  421 + default=[],
  422 + metavar='path/url',
  423 + help=('Install a project in editable mode (i.e. setuptools '
  424 + '"develop mode") from a local project path or a VCS url.'),
  425 + )
  426 +
  427 +
  428 +def _handle_src(option, opt_str, value, parser):
  429 + # type: (Option, str, str, OptionParser) -> None
  430 + value = os.path.abspath(value)
  431 + setattr(parser.values, option.dest, value)
  432 +
  433 +
  434 +src = partial(
  435 + PipOption,
  436 + '--src', '--source', '--source-dir', '--source-directory',
  437 + dest='src_dir',
  438 + type='path',
  439 + metavar='dir',
  440 + default=get_src_prefix(),
  441 + action='callback',
  442 + callback=_handle_src,
  443 + help='Directory to check out editable projects into. '
  444 + 'The default in a virtualenv is "<venv path>/src". '
  445 + 'The default for global installs is "<current dir>/src".'
  446 +) # type: Callable[..., Option]
  447 +
  448 +
  449 +def _get_format_control(values, option):
  450 + # type: (Values, Option) -> Any
  451 + """Get a format_control object."""
  452 + return getattr(values, option.dest)
  453 +
  454 +
  455 +def _handle_no_binary(option, opt_str, value, parser):
  456 + # type: (Option, str, str, OptionParser) -> None
  457 + existing = _get_format_control(parser.values, option)
  458 + FormatControl.handle_mutual_excludes(
  459 + value, existing.no_binary, existing.only_binary,
  460 + )
  461 +
  462 +
  463 +def _handle_only_binary(option, opt_str, value, parser):
  464 + # type: (Option, str, str, OptionParser) -> None
  465 + existing = _get_format_control(parser.values, option)
  466 + FormatControl.handle_mutual_excludes(
  467 + value, existing.only_binary, existing.no_binary,
  468 + )
  469 +
  470 +
  471 +def no_binary():
  472 + # type: () -> Option
  473 + format_control = FormatControl(set(), set())
  474 + return Option(
  475 + "--no-binary", dest="format_control", action="callback",
  476 + callback=_handle_no_binary, type="str",
  477 + default=format_control,
  478 + help="Do not use binary packages. Can be supplied multiple times, and "
  479 + "each time adds to the existing value. Accepts either :all: to "
  480 + "disable all binary packages, :none: to empty the set, or one or "
  481 + "more package names with commas between them (no colons). Note "
  482 + "that some packages are tricky to compile and may fail to "
  483 + "install when this option is used on them.",
  484 + )
  485 +
  486 +
  487 +def only_binary():
  488 + # type: () -> Option
  489 + format_control = FormatControl(set(), set())
  490 + return Option(
  491 + "--only-binary", dest="format_control", action="callback",
  492 + callback=_handle_only_binary, type="str",
  493 + default=format_control,
  494 + help="Do not use source packages. Can be supplied multiple times, and "
  495 + "each time adds to the existing value. Accepts either :all: to "
  496 + "disable all source packages, :none: to empty the set, or one or "
  497 + "more package names with commas between them. Packages without "
  498 + "binary distributions will fail to install when this option is "
  499 + "used on them.",
  500 + )
  501 +
  502 +
  503 +platform = partial(
  504 + Option,
  505 + '--platform',
  506 + dest='platform',
  507 + metavar='platform',
  508 + default=None,
  509 + help=("Only use wheels compatible with <platform>. "
  510 + "Defaults to the platform of the running system."),
  511 +) # type: Callable[..., Option]
  512 +
  513 +
  514 +# This was made a separate function for unit-testing purposes.
  515 +def _convert_python_version(value):
  516 + # type: (str) -> Tuple[Tuple[int, ...], Optional[str]]
  517 + """
  518 + Convert a version string like "3", "37", or "3.7.3" into a tuple of ints.
  519 +
  520 + :return: A 2-tuple (version_info, error_msg), where `error_msg` is
  521 + non-None if and only if there was a parsing error.
  522 + """
  523 + if not value:
  524 + # The empty string is the same as not providing a value.
  525 + return (None, None)
  526 +
  527 + parts = value.split('.')
  528 + if len(parts) > 3:
  529 + return ((), 'at most three version parts are allowed')
  530 +
  531 + if len(parts) == 1:
  532 + # Then we are in the case of "3" or "37".
  533 + value = parts[0]
  534 + if len(value) > 1:
  535 + parts = [value[0], value[1:]]
  536 +
  537 + try:
  538 + version_info = tuple(int(part) for part in parts)
  539 + except ValueError:
  540 + return ((), 'each version part must be an integer')
  541 +
  542 + return (version_info, None)
  543 +
  544 +
  545 +def _handle_python_version(option, opt_str, value, parser):
  546 + # type: (Option, str, str, OptionParser) -> None
  547 + """
  548 + Handle a provided --python-version value.
  549 + """
  550 + version_info, error_msg = _convert_python_version(value)
  551 + if error_msg is not None:
  552 + msg = (
  553 + 'invalid --python-version value: {!r}: {}'.format(
  554 + value, error_msg,
  555 + )
  556 + )
  557 + raise_option_error(parser, option=option, msg=msg)
  558 +
  559 + parser.values.python_version = version_info
  560 +
  561 +
  562 +python_version = partial(
  563 + Option,
  564 + '--python-version',
  565 + dest='python_version',
  566 + metavar='python_version',
  567 + action='callback',
  568 + callback=_handle_python_version, type='str',
  569 + default=None,
  570 + help=dedent("""\
  571 + The Python interpreter version to use for wheel and "Requires-Python"
  572 + compatibility checks. Defaults to a version derived from the running
  573 + interpreter. The version can be specified using up to three dot-separated
  574 + integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor
  575 + version can also be given as a string without dots (e.g. "37" for 3.7.0).
  576 + """),
  577 +) # type: Callable[..., Option]
  578 +
  579 +
  580 +implementation = partial(
  581 + Option,
  582 + '--implementation',
  583 + dest='implementation',
  584 + metavar='implementation',
  585 + default=None,
  586 + help=("Only use wheels compatible with Python "
  587 + "implementation <implementation>, e.g. 'pp', 'jy', 'cp', "
  588 + " or 'ip'. If not specified, then the current "
  589 + "interpreter implementation is used. Use 'py' to force "
  590 + "implementation-agnostic wheels."),
  591 +) # type: Callable[..., Option]
  592 +
  593 +
  594 +abi = partial(
  595 + Option,
  596 + '--abi',
  597 + dest='abi',
  598 + metavar='abi',
  599 + default=None,
  600 + help=("Only use wheels compatible with Python "
  601 + "abi <abi>, e.g. 'pypy_41'. If not specified, then the "
  602 + "current interpreter abi tag is used. Generally "
  603 + "you will need to specify --implementation, "
  604 + "--platform, and --python-version when using "
  605 + "this option."),
  606 +) # type: Callable[..., Option]
  607 +
  608 +
  609 +def add_target_python_options(cmd_opts):
  610 + # type: (OptionGroup) -> None
  611 + cmd_opts.add_option(platform())
  612 + cmd_opts.add_option(python_version())
  613 + cmd_opts.add_option(implementation())
  614 + cmd_opts.add_option(abi())
  615 +
  616 +
  617 +def make_target_python(options):
  618 + # type: (Values) -> TargetPython
  619 + target_python = TargetPython(
  620 + platform=options.platform,
  621 + py_version_info=options.python_version,
  622 + abi=options.abi,
  623 + implementation=options.implementation,
  624 + )
  625 +
  626 + return target_python
  627 +
  628 +
  629 +def prefer_binary():
  630 + # type: () -> Option
  631 + return Option(
  632 + "--prefer-binary",
  633 + dest="prefer_binary",
  634 + action="store_true",
  635 + default=False,
  636 + help="Prefer older binary packages over newer source packages."
  637 + )
  638 +
  639 +
  640 +cache_dir = partial(
  641 + PipOption,
  642 + "--cache-dir",
  643 + dest="cache_dir",
  644 + default=USER_CACHE_DIR,
  645 + metavar="dir",
  646 + type='path',
  647 + help="Store the cache data in <dir>."
  648 +) # type: Callable[..., Option]
  649 +
  650 +
  651 +def _handle_no_cache_dir(option, opt, value, parser):
  652 + # type: (Option, str, str, OptionParser) -> None
  653 + """
  654 + Process a value provided for the --no-cache-dir option.
  655 +
  656 + This is an optparse.Option callback for the --no-cache-dir option.
  657 + """
  658 + # The value argument will be None if --no-cache-dir is passed via the
  659 + # command-line, since the option doesn't accept arguments. However,
  660 + # the value can be non-None if the option is triggered e.g. by an
  661 + # environment variable, like PIP_NO_CACHE_DIR=true.
  662 + if value is not None:
  663 + # Then parse the string value to get argument error-checking.
  664 + try:
  665 + strtobool(value)
  666 + except ValueError as exc:
  667 + raise_option_error(parser, option=option, msg=str(exc))
  668 +
  669 + # Originally, setting PIP_NO_CACHE_DIR to a value that strtobool()
  670 + # converted to 0 (like "false" or "no") caused cache_dir to be disabled
  671 + # rather than enabled (logic would say the latter). Thus, we disable
  672 + # the cache directory not just on values that parse to True, but (for
  673 + # backwards compatibility reasons) also on values that parse to False.
  674 + # In other words, always set it to False if the option is provided in
  675 + # some (valid) form.
  676 + parser.values.cache_dir = False
  677 +
  678 +
  679 +no_cache = partial(
  680 + Option,
  681 + "--no-cache-dir",
  682 + dest="cache_dir",
  683 + action="callback",
  684 + callback=_handle_no_cache_dir,
  685 + help="Disable the cache.",
  686 +) # type: Callable[..., Option]
  687 +
  688 +no_deps = partial(
  689 + Option,
  690 + '--no-deps', '--no-dependencies',
  691 + dest='ignore_dependencies',
  692 + action='store_true',
  693 + default=False,
  694 + help="Don't install package dependencies.",
  695 +) # type: Callable[..., Option]
  696 +
  697 +
  698 +def _handle_build_dir(option, opt, value, parser):
  699 + # type: (Option, str, str, OptionParser) -> None
  700 + if value:
  701 + value = os.path.abspath(value)
  702 + setattr(parser.values, option.dest, value)
  703 +
  704 +
  705 +build_dir = partial(
  706 + PipOption,
  707 + '-b', '--build', '--build-dir', '--build-directory',
  708 + dest='build_dir',
  709 + type='path',
  710 + metavar='dir',
  711 + action='callback',
  712 + callback=_handle_build_dir,
  713 + help='Directory to unpack packages into and build in. Note that '
  714 + 'an initial build still takes place in a temporary directory. '
  715 + 'The location of temporary directories can be controlled by setting '
  716 + 'the TMPDIR environment variable (TEMP on Windows) appropriately. '
  717 + 'When passed, build directories are not cleaned in case of failures.'
  718 +) # type: Callable[..., Option]
  719 +
  720 +ignore_requires_python = partial(
  721 + Option,
  722 + '--ignore-requires-python',
  723 + dest='ignore_requires_python',
  724 + action='store_true',
  725 + help='Ignore the Requires-Python information.'
  726 +) # type: Callable[..., Option]
  727 +
  728 +no_build_isolation = partial(
  729 + Option,
  730 + '--no-build-isolation',
  731 + dest='build_isolation',
  732 + action='store_false',
  733 + default=True,
  734 + help='Disable isolation when building a modern source distribution. '
  735 + 'Build dependencies specified by PEP 518 must be already installed '
  736 + 'if this option is used.'
  737 +) # type: Callable[..., Option]
  738 +
  739 +
  740 +def _handle_no_use_pep517(option, opt, value, parser):
  741 + # type: (Option, str, str, OptionParser) -> None
  742 + """
  743 + Process a value provided for the --no-use-pep517 option.
  744 +
  745 + This is an optparse.Option callback for the no_use_pep517 option.
  746 + """
  747 + # Since --no-use-pep517 doesn't accept arguments, the value argument
  748 + # will be None if --no-use-pep517 is passed via the command-line.
  749 + # However, the value can be non-None if the option is triggered e.g.
  750 + # by an environment variable, for example "PIP_NO_USE_PEP517=true".
  751 + if value is not None:
  752 + msg = """A value was passed for --no-use-pep517,
  753 + probably using either the PIP_NO_USE_PEP517 environment variable
  754 + or the "no-use-pep517" config file option. Use an appropriate value
  755 + of the PIP_USE_PEP517 environment variable or the "use-pep517"
  756 + config file option instead.
  757 + """
  758 + raise_option_error(parser, option=option, msg=msg)
  759 +
  760 + # Otherwise, --no-use-pep517 was passed via the command-line.
  761 + parser.values.use_pep517 = False
  762 +
  763 +
  764 +use_pep517 = partial(
  765 + Option,
  766 + '--use-pep517',
  767 + dest='use_pep517',
  768 + action='store_true',
  769 + default=None,
  770 + help='Use PEP 517 for building source distributions '
  771 + '(use --no-use-pep517 to force legacy behaviour).'
  772 +) # type: Any
  773 +
  774 +no_use_pep517 = partial(
  775 + Option,
  776 + '--no-use-pep517',
  777 + dest='use_pep517',
  778 + action='callback',
  779 + callback=_handle_no_use_pep517,
  780 + default=None,
  781 + help=SUPPRESS_HELP
  782 +) # type: Any
  783 +
  784 +install_options = partial(
  785 + Option,
  786 + '--install-option',
  787 + dest='install_options',
  788 + action='append',
  789 + metavar='options',
  790 + help="Extra arguments to be supplied to the setup.py install "
  791 + "command (use like --install-option=\"--install-scripts=/usr/local/"
  792 + "bin\"). Use multiple --install-option options to pass multiple "
  793 + "options to setup.py install. If you are using an option with a "
  794 + "directory path, be sure to use absolute path.",
  795 +) # type: Callable[..., Option]
  796 +
  797 +global_options = partial(
  798 + Option,
  799 + '--global-option',
  800 + dest='global_options',
  801 + action='append',
  802 + metavar='options',
  803 + help="Extra global options to be supplied to the setup.py "
  804 + "call before the install command.",
  805 +) # type: Callable[..., Option]
  806 +
  807 +no_clean = partial(
  808 + Option,
  809 + '--no-clean',
  810 + action='store_true',
  811 + default=False,
  812 + help="Don't clean up build directories."
  813 +) # type: Callable[..., Option]
  814 +
  815 +pre = partial(
  816 + Option,
  817 + '--pre',
  818 + action='store_true',
  819 + default=False,
  820 + help="Include pre-release and development versions. By default, "
  821 + "pip only finds stable versions.",
  822 +) # type: Callable[..., Option]
  823 +
  824 +disable_pip_version_check = partial(
  825 + Option,
  826 + "--disable-pip-version-check",
  827 + dest="disable_pip_version_check",
  828 + action="store_true",
  829 + default=True,
  830 + help="Don't periodically check PyPI to determine whether a new version "
  831 + "of pip is available for download. Implied with --no-index.",
  832 +) # type: Callable[..., Option]
  833 +
  834 +
  835 +# Deprecated, Remove later
  836 +always_unzip = partial(
  837 + Option,
  838 + '-Z', '--always-unzip',
  839 + dest='always_unzip',
  840 + action='store_true',
  841 + help=SUPPRESS_HELP,
  842 +) # type: Callable[..., Option]
  843 +
  844 +
  845 +def _handle_merge_hash(option, opt_str, value, parser):
  846 + # type: (Option, str, str, OptionParser) -> None
  847 + """Given a value spelled "algo:digest", append the digest to a list
  848 + pointed to in a dict by the algo name."""
  849 + if not parser.values.hashes:
  850 + parser.values.hashes = {}
  851 + try:
  852 + algo, digest = value.split(':', 1)
  853 + except ValueError:
  854 + parser.error('Arguments to %s must be a hash name '
  855 + 'followed by a value, like --hash=sha256:abcde...' %
  856 + opt_str)
  857 + if algo not in STRONG_HASHES:
  858 + parser.error('Allowed hash algorithms for %s are %s.' %
  859 + (opt_str, ', '.join(STRONG_HASHES)))
  860 + parser.values.hashes.setdefault(algo, []).append(digest)
  861 +
  862 +
  863 +hash = partial(
  864 + Option,
  865 + '--hash',
  866 + # Hash values eventually end up in InstallRequirement.hashes due to
  867 + # __dict__ copying in process_line().
  868 + dest='hashes',
  869 + action='callback',
  870 + callback=_handle_merge_hash,
  871 + type='string',
  872 + help="Verify that the package's archive matches this "
  873 + 'hash before installing. Example: --hash=sha256:abcdef...',
  874 +) # type: Callable[..., Option]
  875 +
  876 +
  877 +require_hashes = partial(
  878 + Option,
  879 + '--require-hashes',
  880 + dest='require_hashes',
  881 + action='store_true',
  882 + default=False,
  883 + help='Require a hash to check each requirement against, for '
  884 + 'repeatable installs. This option is implied when any package in a '
  885 + 'requirements file has a --hash option.',
  886 +) # type: Callable[..., Option]
  887 +
  888 +
  889 +list_path = partial(
  890 + PipOption,
  891 + '--path',
  892 + dest='path',
  893 + type='path',
  894 + action='append',
  895 + help='Restrict to the specified installation path for listing '
  896 + 'packages (can be used multiple times).'
  897 +) # type: Callable[..., Option]
  898 +
  899 +
  900 +def check_list_path_option(options):
  901 + # type: (Values) -> None
  902 + if options.path and (options.user or options.local):
  903 + raise CommandError(
  904 + "Cannot combine '--path' with '--user' or '--local'"
  905 + )
  906 +
  907 +
  908 +no_python_version_warning = partial(
  909 + Option,
  910 + '--no-python-version-warning',
  911 + dest='no_python_version_warning',
  912 + action='store_true',
  913 + default=False,
  914 + help='Silence deprecation warnings for upcoming unsupported Pythons.',
  915 +) # type: Callable[..., Option]
  916 +
  917 +
  918 +##########
  919 +# groups #
  920 +##########
  921 +
  922 +general_group = {
  923 + 'name': 'General Options',
  924 + 'options': [
  925 + help_,
  926 + isolated_mode,
  927 + require_virtualenv,
  928 + verbose,
  929 + version,
  930 + quiet,
  931 + log,
  932 + no_input,
  933 + proxy,
  934 + retries,
  935 + timeout,
  936 + skip_requirements_regex,
  937 + exists_action,
  938 + trusted_host,
  939 + cert,
  940 + client_cert,
  941 + cache_dir,
  942 + no_cache,
  943 + disable_pip_version_check,
  944 + no_color,
  945 + no_python_version_warning,
  946 + ]
  947 +} # type: Dict[str, Any]
  948 +
  949 +index_group = {
  950 + 'name': 'Package Index Options',
  951 + 'options': [
  952 + index_url,
  953 + extra_index_url,
  954 + no_index,
  955 + find_links,
  956 + ]
  957 +} # type: Dict[str, Any]
... ...
  1 +from contextlib import contextmanager
  2 +
  3 +from pip._vendor.contextlib2 import ExitStack
  4 +
  5 +from pip._internal.utils.typing import MYPY_CHECK_RUNNING
  6 +
  7 +if MYPY_CHECK_RUNNING:
  8 + from typing import Iterator, ContextManager, TypeVar
  9 +
  10 + _T = TypeVar('_T', covariant=True)
  11 +
  12 +
  13 +class CommandContextMixIn(object):
  14 + def __init__(self):
  15 + # type: () -> None
  16 + super(CommandContextMixIn, self).__init__()
  17 + self._in_main_context = False
  18 + self._main_context = ExitStack()
  19 +
  20 + @contextmanager
  21 + def main_context(self):
  22 + # type: () -> Iterator[None]
  23 + assert not self._in_main_context
  24 +
  25 + self._in_main_context = True
  26 + try:
  27 + with self._main_context:
  28 + yield
  29 + finally:
  30 + self._in_main_context = False
  31 +
  32 + def enter_context(self, context_provider):
  33 + # type: (ContextManager[_T]) -> _T
  34 + assert self._in_main_context
  35 +
  36 + return self._main_context.enter_context(context_provider)
... ...
  1 +"""Primary application entrypoint.
  2 +"""
  3 +from __future__ import absolute_import
  4 +
  5 +import locale
  6 +import logging
  7 +import os
  8 +import sys
  9 +
  10 +from pip._internal.cli.autocompletion import autocomplete
  11 +from pip._internal.cli.main_parser import parse_command
  12 +from pip._internal.commands import create_command
  13 +from pip._internal.exceptions import PipError
  14 +from pip._internal.utils import deprecation
  15 +from pip._internal.utils.typing import MYPY_CHECK_RUNNING
  16 +
  17 +if MYPY_CHECK_RUNNING:
  18 + from typing import List, Optional
  19 +
  20 +logger = logging.getLogger(__name__)
  21 +
  22 +
  23 +# Do not import and use main() directly! Using it directly is actively
  24 +# discouraged by pip's maintainers. The name, location and behavior of
  25 +# this function is subject to change, so calling it directly is not
  26 +# portable across different pip versions.
  27 +
  28 +# In addition, running pip in-process is unsupported and unsafe. This is
  29 +# elaborated in detail at
  30 +# https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program.
  31 +# That document also provides suggestions that should work for nearly
  32 +# all users that are considering importing and using main() directly.
  33 +
  34 +# However, we know that certain users will still want to invoke pip
  35 +# in-process. If you understand and accept the implications of using pip
  36 +# in an unsupported manner, the best approach is to use runpy to avoid
  37 +# depending on the exact location of this entry point.
  38 +
  39 +# The following example shows how to use runpy to invoke pip in that
  40 +# case:
  41 +#
  42 +# sys.argv = ["pip", your, args, here]
  43 +# runpy.run_module("pip", run_name="__main__")
  44 +#
  45 +# Note that this will exit the process after running, unlike a direct
  46 +# call to main. As it is not safe to do any processing after calling
  47 +# main, this should not be an issue in practice.
  48 +
  49 +def main(args=None):
  50 + # type: (Optional[List[str]]) -> int
  51 + if args is None:
  52 + args = sys.argv[1:]
  53 +
  54 + # Configure our deprecation warnings to be sent through loggers
  55 + deprecation.install_warning_logger()
  56 +
  57 + autocomplete()
  58 +
  59 + try:
  60 + cmd_name, cmd_args = parse_command(args)
  61 + except PipError as exc:
  62 + sys.stderr.write("ERROR: %s" % exc)
  63 + sys.stderr.write(os.linesep)
  64 + sys.exit(1)
  65 +
  66 + # Needed for locale.getpreferredencoding(False) to work
  67 + # in pip._internal.utils.encoding.auto_decode
  68 + try:
  69 + locale.setlocale(locale.LC_ALL, '')
  70 + except locale.Error as e:
  71 + # setlocale can apparently crash if locale are uninitialized
  72 + logger.debug("Ignoring error %s when setting locale", e)
  73 + command = create_command(cmd_name, isolated=("--isolated" in cmd_args))
  74 +
  75 + return command.main(cmd_args)
... ...
  1 +"""A single place for constructing and exposing the main parser
  2 +"""
  3 +
  4 +import os
  5 +import sys
  6 +
  7 +from pip._internal.cli import cmdoptions
  8 +from pip._internal.cli.parser import (
  9 + ConfigOptionParser,
  10 + UpdatingDefaultsHelpFormatter,
  11 +)
  12 +from pip._internal.commands import commands_dict, get_similar_commands
  13 +from pip._internal.exceptions import CommandError
  14 +from pip._internal.utils.misc import get_pip_version, get_prog
  15 +from pip._internal.utils.typing import MYPY_CHECK_RUNNING
  16 +
  17 +if MYPY_CHECK_RUNNING:
  18 + from typing import Tuple, List
  19 +
  20 +
  21 +__all__ = ["create_main_parser", "parse_command"]
  22 +
  23 +
  24 +def create_main_parser():
  25 + # type: () -> ConfigOptionParser
  26 + """Creates and returns the main parser for pip's CLI
  27 + """
  28 +
  29 + parser_kw = {
  30 + 'usage': '\n%prog <command> [options]',
  31 + 'add_help_option': False,
  32 + 'formatter': UpdatingDefaultsHelpFormatter(),
  33 + 'name': 'global',
  34 + 'prog': get_prog(),
  35 + }
  36 +
  37 + parser = ConfigOptionParser(**parser_kw)
  38 + parser.disable_interspersed_args()
  39 +
  40 + parser.version = get_pip_version()
  41 +
  42 + # add the general options
  43 + gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
  44 + parser.add_option_group(gen_opts)
  45 +
  46 + # so the help formatter knows
  47 + parser.main = True # type: ignore
  48 +
  49 + # create command listing for description
  50 + description = [''] + [
  51 + '%-27s %s' % (name, command_info.summary)
  52 + for name, command_info in commands_dict.items()
  53 + ]
  54 + parser.description = '\n'.join(description)
  55 +
  56 + return parser
  57 +
  58 +
  59 +def parse_command(args):
  60 + # type: (List[str]) -> Tuple[str, List[str]]
  61 + parser = create_main_parser()
  62 +
  63 + # Note: parser calls disable_interspersed_args(), so the result of this
  64 + # call is to split the initial args into the general options before the
  65 + # subcommand and everything else.
  66 + # For example:
  67 + # args: ['--timeout=5', 'install', '--user', 'INITools']
  68 + # general_options: ['--timeout==5']
  69 + # args_else: ['install', '--user', 'INITools']
  70 + general_options, args_else = parser.parse_args(args)
  71 +
  72 + # --version
  73 + if general_options.version:
  74 + sys.stdout.write(parser.version) # type: ignore
  75 + sys.stdout.write(os.linesep)
  76 + sys.exit()
  77 +
  78 + # pip || pip help -> print_help()
  79 + if not args_else or (args_else[0] == 'help' and len(args_else) == 1):
  80 + parser.print_help()
  81 + sys.exit()
  82 +
  83 + # the subcommand name
  84 + cmd_name = args_else[0]
  85 +
  86 + if cmd_name not in commands_dict:
  87 + guess = get_similar_commands(cmd_name)
  88 +
  89 + msg = ['unknown command "%s"' % cmd_name]
  90 + if guess:
  91 + msg.append('maybe you meant "%s"' % guess)
  92 +
  93 + raise CommandError(' - '.join(msg))
  94 +
  95 + # all the args without the subcommand
  96 + cmd_args = args[:]
  97 + cmd_args.remove(cmd_name)
  98 +
  99 + return cmd_name, cmd_args
... ...