diff --git a/.gitignore b/.gitignore
index 285015e..35f6300 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,2 @@
token.txt
-
+json/venv
diff --git a/.idea/inspectionProfiles/Project_Default.xml b/.idea/inspectionProfiles/Project_Default.xml
new file mode 100644
index 0000000..8a98d84
--- /dev/null
+++ b/.idea/inspectionProfiles/Project_Default.xml
@@ -0,0 +1,12 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/workspace.xml b/.idea/workspace.xml
new file mode 100644
index 0000000..8406a03
--- /dev/null
+++ b/.idea/workspace.xml
@@ -0,0 +1,692 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1540210136554
+
+
+ 1540210136554
+
+
+ 1540221354839
+
+
+
+ 1540221354839
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/algorithm.txt b/algorithm.txt
deleted file mode 100644
index 341babc..0000000
--- a/algorithm.txt
+++ /dev/null
@@ -1,12 +0,0 @@
-
-
-
-
-
-# Iterate over 'top' results
-# jsonResponse=json.loads(decoded_response)
-top_results = jsonResponse["top"]
-for item in top_results:
- domains += item.get("domain") + ","
- urls += item.get("url"+") + ","
-
diff --git a/json/parse.py b/json/parse.py
index 36e92fc..598abd6 100644
--- a/json/parse.py
+++ b/json/parse.py
@@ -1,38 +1,127 @@
#!/usr/bin/python
import json
-import pprint
+import openpyxl
+from datetime import datetime
+from tkinter import *
+import random
+from tkinter import Tk, Label, Button, Entry, StringVar, DISABLED, NORMAL, END, W, E
+
+
+class Calculator:
+
+ def __init__(self, master):
+ self.master = master
+ master.title("Calculator")
+
+ self.total = 0
+ self.entered_number = 0
+
+ self.total_label_text = IntVar()
+
+ #vcmd = master.register(self.validate) # we have to wrap the command
+ self.labelT = Label(master, text="Token:")
+ self.labelF = Label(master, text="File Path:")
+ self.entry1 = Entry(master) #, validate="key", validatecommand=(vcmd, '%P'))
+ self.entry2 = Entry(master)
+
+ self.ok_button = Button(master, text="OK")
+ self.choose_button = Button(master, text="Choose file", command=lambda: self.choose())
+ #self.add_button = Button(master, text="+", command=lambda: self.update("add"))
+ #self.subtract_button = Button(master, text="-", command=lambda: self.update("subtract"))
+ #self.reset_button = Button(master, text="Reset", command=lambda: self.update("reset"))
+
+ # LAYOUT
+
+ self.entry1.grid(row=1, column=1, columnspan=3, sticky=W+E)
+ self.entry2.grid(row=2, column=1, columnspan=3, sticky=W + E)
+ self.labelT.grid(row=1, column=0)
+ self.labelF.grid(row=2, column=0)
+
+ self.ok_button.grid(row=3, column=0)
+ self.choose_button.grid(row=2, column=5)
+
+ def choose(self):
+ return True
+
+ def ok(self):
+ token = self.entry1.get()
+ filename = self.entry2.get()
+
+ def update(self, method):
+ if method == "add":
+ self.total += self.entered_number
+ elif method == "subtract":
+ self.total -= self.entered_number
+ else: # reset
+ self.total = 0
+
+ self.total_label_text.set(self.total)
+ self.entry.delete(0, END)
+
+root = Tk()
+my_gui = Calculator(root)
+root.mainloop()
+
+print(my_gui.token)
+
+
+#root = Tk()
+#d = MyDialog(root)
+#root.mainloop()
+
+# Output
+def output_to_excel():
+ now = datetime.now()
+ output_file = 'results_' + now.strftime("%Y-%m-%d_%H:%M:%S") + '.xlsx'
+ wb = openpyxl.Workbook()
+ engine = "g_us"
+ sheetname = "Top_keyword" + engine
+ wb.create_sheet(sheetname)
+ sheet = wb.get_sheet_by_name(sheetname)
+
+ sheet.cell(row=1 + 1, column=1).value = 'Keyword'
+ sheet.cell(row=1, column=1).font = openpyxl.styles.Font(bold=True)
+ sheet.cell(row=1, column=2).value = 'Query engine'
+ sheet.cell(row=1, column=2).font = openpyxl.styles.Font(bold=True)
+ sheet.cell(row=1, column=3).value = 'Response Message'
+ sheet.cell(row=1, column=3).font = openpyxl.styles.Font(bold=True)
+ sheet.cell(row=1, column=4).value = 'Queries count'
+ sheet.cell(row=1, column=4).font = openpyxl.styles.Font(bold=True)
+ sheet.cell(row=1, column=5).value = 'Found Results'
+ sheet.cell(row=1, column=5).font = openpyxl.styles.Font(bold=True)
+ sheet.cell(row=1, column=6).value = 'Domains'
+ sheet.cell(row=1, column=6).font = openpyxl.styles.Font(bold=True)
+ sheet.cell(row=1, column=7).value = 'Full URLs'
+ sheet.cell(row=1, column=7).font = openpyxl.styles.Font(bold=True)
+ # ws = wb.activate()
+
+ wb.save(output_file)
file = open("json.example","r")
#print(file.read())
jsondata = json.loads(file.read())
-# print(jsondata)
-
-# output to file example
-# output = open("json.example2", "w")
-# pprint.pprint(jsondata, output)
-
-# left_lines = jsondata["left_lines"]
-# print(left_lines)
-# easier than above
domains = ""
-# print("\ngetting all domains:")
for item in jsondata['result']['top']:
domains += item.get("domain") + ","
-
-# print(domains)
-
urls = ""
-# print("\ngetting all URLS:")
for item in jsondata['result']['top']:
urls += item.get("url") + ","
-
-# print(urls)
-
print(jsondata['left_lines'])
print(jsondata['result']['results'])
-# print(jsondata['result']['top'][0]['domain'] + ", " + jsondata['result']['top'][0]['url'])
print("found_domains:" + domains)
print("found_urls:" + urls)
-print()
\ No newline at end of file
+print(len(jsondata['result']['top']))
+if jsondata['status_code'] == 200:
+ print("status code is 200:")
+ print(jsondata['status_code'])
+
+print(jsondata['status_msg'])
+status_msg = "%i"% (jsondata['status_code']) + ", " + jsondata['status_msg']
+print(status_msg)
+
+
+
+# Output
+# output_to_excel()
\ No newline at end of file
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/EGG-INFO/not-zip-safe b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/EGG-INFO/not-zip-safe
index 8b13789..d3f5a12 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/EGG-INFO/not-zip-safe
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/EGG-INFO/not-zip-safe
@@ -1 +1 @@
-
+
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/__init__.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/__init__.py
index ab64964..0a3b850 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/__init__.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/__init__.py
@@ -1 +1 @@
-__version__ = "10.0.1"
+__version__ = "10.0.1"
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/__main__.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/__main__.py
index 4609582..a128ee3 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/__main__.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/__main__.py
@@ -1,19 +1,19 @@
-from __future__ import absolute_import
-
-import os
-import sys
-
-# If we are running from a wheel, add the wheel to sys.path
-# This allows the usage python pip-*.whl/pip install pip-*.whl
-if __package__ == '':
- # __file__ is pip-*.whl/pip/__main__.py
- # first dirname call strips of '/__main__.py', second strips off '/pip'
- # Resulting path is the name of the wheel itself
- # Add that to sys.path so we can import pip
- path = os.path.dirname(os.path.dirname(__file__))
- sys.path.insert(0, path)
-
-from pip._internal import main as _main # noqa
-
-if __name__ == '__main__':
- sys.exit(_main())
+from __future__ import absolute_import
+
+import os
+import sys
+
+# If we are running from a wheel, add the wheel to sys.path
+# This allows the usage python pip-*.whl/pip install pip-*.whl
+if __package__ == '':
+ # __file__ is pip-*.whl/pip/__main__.py
+ # first dirname call strips of '/__main__.py', second strips off '/pip'
+ # Resulting path is the name of the wheel itself
+ # Add that to sys.path so we can import pip
+ path = os.path.dirname(os.path.dirname(__file__))
+ sys.path.insert(0, path)
+
+from pip._internal import main as _main # noqa
+
+if __name__ == '__main__':
+ sys.exit(_main())
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/__init__.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/__init__.py
index 865d9ec..d713b0d 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/__init__.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/__init__.py
@@ -1,246 +1,246 @@
-#!/usr/bin/env python
-from __future__ import absolute_import
-
-import locale
-import logging
-import os
-import optparse
-import warnings
-
-import sys
-
-# 2016-06-17 barry@debian.org: urllib3 1.14 added optional support for socks,
-# but if invoked (i.e. imported), it will issue a warning to stderr if socks
-# isn't available. requests unconditionally imports urllib3's socks contrib
-# module, triggering this warning. The warning breaks DEP-8 tests (because of
-# the stderr output) and is just plain annoying in normal usage. I don't want
-# to add socks as yet another dependency for pip, nor do I want to allow-stder
-# in the DEP-8 tests, so just suppress the warning. pdb tells me this has to
-# be done before the import of pip.vcs.
-from pip._vendor.urllib3.exceptions import DependencyWarning
-warnings.filterwarnings("ignore", category=DependencyWarning) # noqa
-
-# We want to inject the use of SecureTransport as early as possible so that any
-# references or sessions or what have you are ensured to have it, however we
-# only want to do this in the case that we're running on macOS and the linked
-# OpenSSL is too old to handle TLSv1.2
-try:
- import ssl
-except ImportError:
- pass
-else:
- # Checks for OpenSSL 1.0.1 on MacOS
- if sys.platform == "darwin" and ssl.OPENSSL_VERSION_NUMBER < 0x1000100f:
- try:
- from pip._vendor.urllib3.contrib import securetransport
- except (ImportError, OSError):
- pass
- else:
- securetransport.inject_into_urllib3()
-
-from pip import __version__
-from pip._internal import cmdoptions
-from pip._internal.exceptions import CommandError, PipError
-from pip._internal.utils.misc import get_installed_distributions, get_prog
-from pip._internal.utils import deprecation
-from pip._internal.vcs import git, mercurial, subversion, bazaar # noqa
-from pip._internal.baseparser import (
- ConfigOptionParser, UpdatingDefaultsHelpFormatter,
-)
-from pip._internal.commands import get_summaries, get_similar_commands
-from pip._internal.commands import commands_dict
-from pip._vendor.urllib3.exceptions import InsecureRequestWarning
-
-logger = logging.getLogger(__name__)
-
-# Hide the InsecureRequestWarning from urllib3
-warnings.filterwarnings("ignore", category=InsecureRequestWarning)
-
-
-def autocomplete():
- """Command and option completion for the main option parser (and options)
- and its subcommands (and options).
-
- Enable by sourcing one of the completion shell scripts (bash, zsh or fish).
- """
- # Don't complete if user hasn't sourced bash_completion file.
- if 'PIP_AUTO_COMPLETE' not in os.environ:
- return
- cwords = os.environ['COMP_WORDS'].split()[1:]
- cword = int(os.environ['COMP_CWORD'])
- try:
- current = cwords[cword - 1]
- except IndexError:
- current = ''
-
- subcommands = [cmd for cmd, summary in get_summaries()]
- options = []
- # subcommand
- try:
- subcommand_name = [w for w in cwords if w in subcommands][0]
- except IndexError:
- subcommand_name = None
-
- parser = create_main_parser()
- # subcommand options
- if subcommand_name:
- # special case: 'help' subcommand has no options
- if subcommand_name == 'help':
- sys.exit(1)
- # special case: list locally installed dists for show and uninstall
- should_list_installed = (
- subcommand_name in ['show', 'uninstall'] and
- not current.startswith('-')
- )
- if should_list_installed:
- installed = []
- lc = current.lower()
- for dist in get_installed_distributions(local_only=True):
- if dist.key.startswith(lc) and dist.key not in cwords[1:]:
- installed.append(dist.key)
- # if there are no dists installed, fall back to option completion
- if installed:
- for dist in installed:
- print(dist)
- sys.exit(1)
-
- subcommand = commands_dict[subcommand_name]()
-
- for opt in subcommand.parser.option_list_all:
- if opt.help != optparse.SUPPRESS_HELP:
- for opt_str in opt._long_opts + opt._short_opts:
- options.append((opt_str, opt.nargs))
-
- # filter out previously specified options from available options
- prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]
- options = [(x, v) for (x, v) in options if x not in prev_opts]
- # filter options by current input
- options = [(k, v) for k, v in options if k.startswith(current)]
- for option in options:
- opt_label = option[0]
- # append '=' to options which require args
- if option[1] and option[0][:2] == "--":
- opt_label += '='
- print(opt_label)
- else:
- # show main parser options only when necessary
- if current.startswith('-') or current.startswith('--'):
- opts = [i.option_list for i in parser.option_groups]
- opts.append(parser.option_list)
- opts = (o for it in opts for o in it)
-
- for opt in opts:
- if opt.help != optparse.SUPPRESS_HELP:
- subcommands += opt._long_opts + opt._short_opts
-
- print(' '.join([x for x in subcommands if x.startswith(current)]))
- sys.exit(1)
-
-
-def create_main_parser():
- parser_kw = {
- 'usage': '\n%prog [options]',
- 'add_help_option': False,
- 'formatter': UpdatingDefaultsHelpFormatter(),
- 'name': 'global',
- 'prog': get_prog(),
- }
-
- parser = ConfigOptionParser(**parser_kw)
- parser.disable_interspersed_args()
-
- pip_pkg_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
- parser.version = 'pip %s from %s (python %s)' % (
- __version__, pip_pkg_dir, sys.version[:3],
- )
-
- # add the general options
- gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
- parser.add_option_group(gen_opts)
-
- parser.main = True # so the help formatter knows
-
- # create command listing for description
- command_summaries = get_summaries()
- description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries]
- parser.description = '\n'.join(description)
-
- return parser
-
-
-def parseopts(args):
- parser = create_main_parser()
-
- # Note: parser calls disable_interspersed_args(), so the result of this
- # call is to split the initial args into the general options before the
- # subcommand and everything else.
- # For example:
- # args: ['--timeout=5', 'install', '--user', 'INITools']
- # general_options: ['--timeout==5']
- # args_else: ['install', '--user', 'INITools']
- general_options, args_else = parser.parse_args(args)
-
- # --version
- if general_options.version:
- sys.stdout.write(parser.version)
- sys.stdout.write(os.linesep)
- sys.exit()
-
- # pip || pip help -> print_help()
- if not args_else or (args_else[0] == 'help' and len(args_else) == 1):
- parser.print_help()
- sys.exit()
-
- # the subcommand name
- cmd_name = args_else[0]
-
- if cmd_name not in commands_dict:
- guess = get_similar_commands(cmd_name)
-
- msg = ['unknown command "%s"' % cmd_name]
- if guess:
- msg.append('maybe you meant "%s"' % guess)
-
- raise CommandError(' - '.join(msg))
-
- # all the args without the subcommand
- cmd_args = args[:]
- cmd_args.remove(cmd_name)
-
- return cmd_name, cmd_args
-
-
-def check_isolated(args):
- isolated = False
-
- if "--isolated" in args:
- isolated = True
-
- return isolated
-
-
-def main(args=None):
- if args is None:
- args = sys.argv[1:]
-
- # Configure our deprecation warnings to be sent through loggers
- deprecation.install_warning_logger()
-
- autocomplete()
-
- try:
- cmd_name, cmd_args = parseopts(args)
- except PipError as exc:
- sys.stderr.write("ERROR: %s" % exc)
- sys.stderr.write(os.linesep)
- sys.exit(1)
-
- # Needed for locale.getpreferredencoding(False) to work
- # in pip._internal.utils.encoding.auto_decode
- try:
- locale.setlocale(locale.LC_ALL, '')
- except locale.Error as e:
- # setlocale can apparently crash if locale are uninitialized
- logger.debug("Ignoring error %s when setting locale", e)
- command = commands_dict[cmd_name](isolated=check_isolated(cmd_args))
- return command.main(cmd_args)
+#!/usr/bin/env python
+from __future__ import absolute_import
+
+import locale
+import logging
+import os
+import optparse
+import warnings
+
+import sys
+
+# 2016-06-17 barry@debian.org: urllib3 1.14 added optional support for socks,
+# but if invoked (i.e. imported), it will issue a warning to stderr if socks
+# isn't available. requests unconditionally imports urllib3's socks contrib
+# module, triggering this warning. The warning breaks DEP-8 tests (because of
+# the stderr output) and is just plain annoying in normal usage. I don't want
+# to add socks as yet another dependency for pip, nor do I want to allow-stder
+# in the DEP-8 tests, so just suppress the warning. pdb tells me this has to
+# be done before the import of pip.vcs.
+from pip._vendor.urllib3.exceptions import DependencyWarning
+warnings.filterwarnings("ignore", category=DependencyWarning) # noqa
+
+# We want to inject the use of SecureTransport as early as possible so that any
+# references or sessions or what have you are ensured to have it, however we
+# only want to do this in the case that we're running on macOS and the linked
+# OpenSSL is too old to handle TLSv1.2
+try:
+ import ssl
+except ImportError:
+ pass
+else:
+ # Checks for OpenSSL 1.0.1 on MacOS
+ if sys.platform == "darwin" and ssl.OPENSSL_VERSION_NUMBER < 0x1000100f:
+ try:
+ from pip._vendor.urllib3.contrib import securetransport
+ except (ImportError, OSError):
+ pass
+ else:
+ securetransport.inject_into_urllib3()
+
+from pip import __version__
+from pip._internal import cmdoptions
+from pip._internal.exceptions import CommandError, PipError
+from pip._internal.utils.misc import get_installed_distributions, get_prog
+from pip._internal.utils import deprecation
+from pip._internal.vcs import git, mercurial, subversion, bazaar # noqa
+from pip._internal.baseparser import (
+ ConfigOptionParser, UpdatingDefaultsHelpFormatter,
+)
+from pip._internal.commands import get_summaries, get_similar_commands
+from pip._internal.commands import commands_dict
+from pip._vendor.urllib3.exceptions import InsecureRequestWarning
+
+logger = logging.getLogger(__name__)
+
+# Hide the InsecureRequestWarning from urllib3
+warnings.filterwarnings("ignore", category=InsecureRequestWarning)
+
+
+def autocomplete():
+ """Command and option completion for the main option parser (and options)
+ and its subcommands (and options).
+
+ Enable by sourcing one of the completion shell scripts (bash, zsh or fish).
+ """
+ # Don't complete if user hasn't sourced bash_completion file.
+ if 'PIP_AUTO_COMPLETE' not in os.environ:
+ return
+ cwords = os.environ['COMP_WORDS'].split()[1:]
+ cword = int(os.environ['COMP_CWORD'])
+ try:
+ current = cwords[cword - 1]
+ except IndexError:
+ current = ''
+
+ subcommands = [cmd for cmd, summary in get_summaries()]
+ options = []
+ # subcommand
+ try:
+ subcommand_name = [w for w in cwords if w in subcommands][0]
+ except IndexError:
+ subcommand_name = None
+
+ parser = create_main_parser()
+ # subcommand options
+ if subcommand_name:
+ # special case: 'help' subcommand has no options
+ if subcommand_name == 'help':
+ sys.exit(1)
+ # special case: list locally installed dists for show and uninstall
+ should_list_installed = (
+ subcommand_name in ['show', 'uninstall'] and
+ not current.startswith('-')
+ )
+ if should_list_installed:
+ installed = []
+ lc = current.lower()
+ for dist in get_installed_distributions(local_only=True):
+ if dist.key.startswith(lc) and dist.key not in cwords[1:]:
+ installed.append(dist.key)
+ # if there are no dists installed, fall back to option completion
+ if installed:
+ for dist in installed:
+ print(dist)
+ sys.exit(1)
+
+ subcommand = commands_dict[subcommand_name]()
+
+ for opt in subcommand.parser.option_list_all:
+ if opt.help != optparse.SUPPRESS_HELP:
+ for opt_str in opt._long_opts + opt._short_opts:
+ options.append((opt_str, opt.nargs))
+
+ # filter out previously specified options from available options
+ prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]
+ options = [(x, v) for (x, v) in options if x not in prev_opts]
+ # filter options by current input
+ options = [(k, v) for k, v in options if k.startswith(current)]
+ for option in options:
+ opt_label = option[0]
+ # append '=' to options which require args
+ if option[1] and option[0][:2] == "--":
+ opt_label += '='
+ print(opt_label)
+ else:
+ # show main parser options only when necessary
+ if current.startswith('-') or current.startswith('--'):
+ opts = [i.option_list for i in parser.option_groups]
+ opts.append(parser.option_list)
+ opts = (o for it in opts for o in it)
+
+ for opt in opts:
+ if opt.help != optparse.SUPPRESS_HELP:
+ subcommands += opt._long_opts + opt._short_opts
+
+ print(' '.join([x for x in subcommands if x.startswith(current)]))
+ sys.exit(1)
+
+
+def create_main_parser():
+ parser_kw = {
+ 'usage': '\n%prog [options]',
+ 'add_help_option': False,
+ 'formatter': UpdatingDefaultsHelpFormatter(),
+ 'name': 'global',
+ 'prog': get_prog(),
+ }
+
+ parser = ConfigOptionParser(**parser_kw)
+ parser.disable_interspersed_args()
+
+ pip_pkg_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+ parser.version = 'pip %s from %s (python %s)' % (
+ __version__, pip_pkg_dir, sys.version[:3],
+ )
+
+ # add the general options
+ gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
+ parser.add_option_group(gen_opts)
+
+ parser.main = True # so the help formatter knows
+
+ # create command listing for description
+ command_summaries = get_summaries()
+ description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries]
+ parser.description = '\n'.join(description)
+
+ return parser
+
+
+def parseopts(args):
+ parser = create_main_parser()
+
+ # Note: parser calls disable_interspersed_args(), so the result of this
+ # call is to split the initial args into the general options before the
+ # subcommand and everything else.
+ # For example:
+ # args: ['--timeout=5', 'install', '--user', 'INITools']
+ # general_options: ['--timeout==5']
+ # args_else: ['install', '--user', 'INITools']
+ general_options, args_else = parser.parse_args(args)
+
+ # --version
+ if general_options.version:
+ sys.stdout.write(parser.version)
+ sys.stdout.write(os.linesep)
+ sys.exit()
+
+ # pip || pip help -> print_help()
+ if not args_else or (args_else[0] == 'help' and len(args_else) == 1):
+ parser.print_help()
+ sys.exit()
+
+ # the subcommand name
+ cmd_name = args_else[0]
+
+ if cmd_name not in commands_dict:
+ guess = get_similar_commands(cmd_name)
+
+ msg = ['unknown command "%s"' % cmd_name]
+ if guess:
+ msg.append('maybe you meant "%s"' % guess)
+
+ raise CommandError(' - '.join(msg))
+
+ # all the args without the subcommand
+ cmd_args = args[:]
+ cmd_args.remove(cmd_name)
+
+ return cmd_name, cmd_args
+
+
+def check_isolated(args):
+ isolated = False
+
+ if "--isolated" in args:
+ isolated = True
+
+ return isolated
+
+
+def main(args=None):
+ if args is None:
+ args = sys.argv[1:]
+
+ # Configure our deprecation warnings to be sent through loggers
+ deprecation.install_warning_logger()
+
+ autocomplete()
+
+ try:
+ cmd_name, cmd_args = parseopts(args)
+ except PipError as exc:
+ sys.stderr.write("ERROR: %s" % exc)
+ sys.stderr.write(os.linesep)
+ sys.exit(1)
+
+ # Needed for locale.getpreferredencoding(False) to work
+ # in pip._internal.utils.encoding.auto_decode
+ try:
+ locale.setlocale(locale.LC_ALL, '')
+ except locale.Error as e:
+ # setlocale can apparently crash if locale are uninitialized
+ logger.debug("Ignoring error %s when setting locale", e)
+ command = commands_dict[cmd_name](isolated=check_isolated(cmd_args))
+ return command.main(cmd_args)
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/basecommand.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/basecommand.py
index 2503f36..e900928 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/basecommand.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/basecommand.py
@@ -1,373 +1,373 @@
-"""Base Command class, and related routines"""
-from __future__ import absolute_import
-
-import logging
-import logging.config
-import optparse
-import os
-import sys
-import warnings
-
-from pip._internal import cmdoptions
-from pip._internal.baseparser import (
- ConfigOptionParser, UpdatingDefaultsHelpFormatter,
-)
-from pip._internal.compat import WINDOWS
-from pip._internal.download import PipSession
-from pip._internal.exceptions import (
- BadCommand, CommandError, InstallationError, PreviousBuildDirError,
- UninstallationError,
-)
-from pip._internal.index import PackageFinder
-from pip._internal.locations import running_under_virtualenv
-from pip._internal.req.req_file import parse_requirements
-from pip._internal.req.req_install import InstallRequirement
-from pip._internal.status_codes import (
- ERROR, PREVIOUS_BUILD_DIR_ERROR, SUCCESS, UNKNOWN_ERROR,
- VIRTUALENV_NOT_FOUND,
-)
-from pip._internal.utils import deprecation
-from pip._internal.utils.logging import IndentingFormatter
-from pip._internal.utils.misc import get_prog, normalize_path
-from pip._internal.utils.outdated import pip_version_check
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import Optional
-
-__all__ = ['Command']
-
-logger = logging.getLogger(__name__)
-
-
-class Command(object):
- name = None # type: Optional[str]
- usage = None # type: Optional[str]
- hidden = False # type: bool
- ignore_require_venv = False # type: bool
- log_streams = ("ext://sys.stdout", "ext://sys.stderr")
-
- def __init__(self, isolated=False):
- parser_kw = {
- 'usage': self.usage,
- 'prog': '%s %s' % (get_prog(), self.name),
- 'formatter': UpdatingDefaultsHelpFormatter(),
- 'add_help_option': False,
- 'name': self.name,
- 'description': self.__doc__,
- 'isolated': isolated,
- }
-
- self.parser = ConfigOptionParser(**parser_kw)
-
- # Commands should add options to this option group
- optgroup_name = '%s Options' % self.name.capitalize()
- self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
-
- # Add the general options
- gen_opts = cmdoptions.make_option_group(
- cmdoptions.general_group,
- self.parser,
- )
- self.parser.add_option_group(gen_opts)
-
- def _build_session(self, options, retries=None, timeout=None):
- session = PipSession(
- cache=(
- normalize_path(os.path.join(options.cache_dir, "http"))
- if options.cache_dir else None
- ),
- retries=retries if retries is not None else options.retries,
- insecure_hosts=options.trusted_hosts,
- )
-
- # Handle custom ca-bundles from the user
- if options.cert:
- session.verify = options.cert
-
- # Handle SSL client certificate
- if options.client_cert:
- session.cert = options.client_cert
-
- # Handle timeouts
- if options.timeout or timeout:
- session.timeout = (
- timeout if timeout is not None else options.timeout
- )
-
- # Handle configured proxies
- if options.proxy:
- session.proxies = {
- "http": options.proxy,
- "https": options.proxy,
- }
-
- # Determine if we can prompt the user for authentication or not
- session.auth.prompting = not options.no_input
-
- return session
-
- def parse_args(self, args):
- # factored out for testability
- return self.parser.parse_args(args)
-
- def main(self, args):
- options, args = self.parse_args(args)
-
- # Set verbosity so that it can be used elsewhere.
- self.verbosity = options.verbose - options.quiet
-
- if self.verbosity >= 1:
- level = "DEBUG"
- elif self.verbosity == -1:
- level = "WARNING"
- elif self.verbosity == -2:
- level = "ERROR"
- elif self.verbosity <= -3:
- level = "CRITICAL"
- else:
- level = "INFO"
-
- # The root logger should match the "console" level *unless* we
- # specified "--log" to send debug logs to a file.
- root_level = level
- if options.log:
- root_level = "DEBUG"
-
- logger_class = "pip._internal.utils.logging.ColorizedStreamHandler"
- handler_class = "pip._internal.utils.logging.BetterRotatingFileHandler"
-
- logging.config.dictConfig({
- "version": 1,
- "disable_existing_loggers": False,
- "filters": {
- "exclude_warnings": {
- "()": "pip._internal.utils.logging.MaxLevelFilter",
- "level": logging.WARNING,
- },
- },
- "formatters": {
- "indent": {
- "()": IndentingFormatter,
- "format": "%(message)s",
- },
- },
- "handlers": {
- "console": {
- "level": level,
- "class": logger_class,
- "no_color": options.no_color,
- "stream": self.log_streams[0],
- "filters": ["exclude_warnings"],
- "formatter": "indent",
- },
- "console_errors": {
- "level": "WARNING",
- "class": logger_class,
- "no_color": options.no_color,
- "stream": self.log_streams[1],
- "formatter": "indent",
- },
- "user_log": {
- "level": "DEBUG",
- "class": handler_class,
- "filename": options.log or "/dev/null",
- "delay": True,
- "formatter": "indent",
- },
- },
- "root": {
- "level": root_level,
- "handlers": list(filter(None, [
- "console",
- "console_errors",
- "user_log" if options.log else None,
- ])),
- },
- # Disable any logging besides WARNING unless we have DEBUG level
- # logging enabled. These use both pip._vendor and the bare names
- # for the case where someone unbundles our libraries.
- "loggers": {
- name: {
- "level": (
- "WARNING" if level in ["INFO", "ERROR"] else "DEBUG"
- )
- } for name in [
- "pip._vendor", "distlib", "requests", "urllib3"
- ]
- },
- })
-
- if sys.version_info[:2] == (3, 3):
- warnings.warn(
- "Python 3.3 supported has been deprecated and support for it "
- "will be dropped in the future. Please upgrade your Python.",
- deprecation.RemovedInPip11Warning,
- )
-
- # TODO: try to get these passing down from the command?
- # without resorting to os.environ to hold these.
-
- if options.no_input:
- os.environ['PIP_NO_INPUT'] = '1'
-
- if options.exists_action:
- os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action)
-
- if options.require_venv and not self.ignore_require_venv:
- # If a venv is required check if it can really be found
- if not running_under_virtualenv():
- logger.critical(
- 'Could not find an activated virtualenv (required).'
- )
- sys.exit(VIRTUALENV_NOT_FOUND)
-
- original_root_handlers = set(logging.root.handlers)
-
- try:
- status = self.run(options, args)
- # FIXME: all commands should return an exit status
- # and when it is done, isinstance is not needed anymore
- if isinstance(status, int):
- return status
- except PreviousBuildDirError as exc:
- logger.critical(str(exc))
- logger.debug('Exception information:', exc_info=True)
-
- return PREVIOUS_BUILD_DIR_ERROR
- except (InstallationError, UninstallationError, BadCommand) as exc:
- logger.critical(str(exc))
- logger.debug('Exception information:', exc_info=True)
-
- return ERROR
- except CommandError as exc:
- logger.critical('ERROR: %s', exc)
- logger.debug('Exception information:', exc_info=True)
-
- return ERROR
- except KeyboardInterrupt:
- logger.critical('Operation cancelled by user')
- logger.debug('Exception information:', exc_info=True)
-
- return ERROR
- except:
- logger.critical('Exception:', exc_info=True)
-
- return UNKNOWN_ERROR
- finally:
- # Check if we're using the latest version of pip available
- if (not options.disable_pip_version_check and not
- getattr(options, "no_index", False)):
- with self._build_session(
- options,
- retries=0,
- timeout=min(5, options.timeout)) as session:
- pip_version_check(session, options)
- # Avoid leaking loggers
- for handler in set(logging.root.handlers) - original_root_handlers:
- # this method benefit from the Logger class internal lock
- logging.root.removeHandler(handler)
-
- return SUCCESS
-
-
-class RequirementCommand(Command):
-
- @staticmethod
- def populate_requirement_set(requirement_set, args, options, finder,
- session, name, wheel_cache):
- """
- Marshal cmd line args into a requirement set.
- """
- # NOTE: As a side-effect, options.require_hashes and
- # requirement_set.require_hashes may be updated
-
- for filename in options.constraints:
- for req_to_add in parse_requirements(
- filename,
- constraint=True, finder=finder, options=options,
- session=session, wheel_cache=wheel_cache):
- req_to_add.is_direct = True
- requirement_set.add_requirement(req_to_add)
-
- for req in args:
- req_to_add = InstallRequirement.from_line(
- req, None, isolated=options.isolated_mode,
- wheel_cache=wheel_cache
- )
- req_to_add.is_direct = True
- requirement_set.add_requirement(req_to_add)
-
- for req in options.editables:
- req_to_add = InstallRequirement.from_editable(
- req,
- isolated=options.isolated_mode,
- wheel_cache=wheel_cache
- )
- req_to_add.is_direct = True
- requirement_set.add_requirement(req_to_add)
-
- for filename in options.requirements:
- for req_to_add in parse_requirements(
- filename,
- finder=finder, options=options, session=session,
- wheel_cache=wheel_cache):
- req_to_add.is_direct = True
- requirement_set.add_requirement(req_to_add)
- # If --require-hashes was a line in a requirements file, tell
- # RequirementSet about it:
- requirement_set.require_hashes = options.require_hashes
-
- if not (args or options.editables or options.requirements):
- opts = {'name': name}
- if options.find_links:
- raise CommandError(
- 'You must give at least one requirement to %(name)s '
- '(maybe you meant "pip %(name)s %(links)s"?)' %
- dict(opts, links=' '.join(options.find_links)))
- else:
- raise CommandError(
- 'You must give at least one requirement to %(name)s '
- '(see "pip help %(name)s")' % opts)
-
- # On Windows, any operation modifying pip should be run as:
- # python -m pip ...
- # See https://github.com/pypa/pip/issues/1299 for more discussion
- should_show_use_python_msg = (
- WINDOWS and
- requirement_set.has_requirement("pip") and
- os.path.basename(sys.argv[0]).startswith("pip")
- )
- if should_show_use_python_msg:
- new_command = [
- sys.executable, "-m", "pip"
- ] + sys.argv[1:]
- raise CommandError(
- 'To modify pip, please run the following command:\n{}'
- .format(" ".join(new_command))
- )
-
- def _build_package_finder(self, options, session,
- platform=None, python_versions=None,
- abi=None, implementation=None):
- """
- Create a package finder appropriate to this requirement command.
- """
- index_urls = [options.index_url] + options.extra_index_urls
- if options.no_index:
- logger.debug('Ignoring indexes: %s', ','.join(index_urls))
- index_urls = []
-
- return PackageFinder(
- find_links=options.find_links,
- format_control=options.format_control,
- index_urls=index_urls,
- trusted_hosts=options.trusted_hosts,
- allow_all_prereleases=options.pre,
- process_dependency_links=options.process_dependency_links,
- session=session,
- platform=platform,
- versions=python_versions,
- abi=abi,
- implementation=implementation,
- )
+"""Base Command class, and related routines"""
+from __future__ import absolute_import
+
+import logging
+import logging.config
+import optparse
+import os
+import sys
+import warnings
+
+from pip._internal import cmdoptions
+from pip._internal.baseparser import (
+ ConfigOptionParser, UpdatingDefaultsHelpFormatter,
+)
+from pip._internal.compat import WINDOWS
+from pip._internal.download import PipSession
+from pip._internal.exceptions import (
+ BadCommand, CommandError, InstallationError, PreviousBuildDirError,
+ UninstallationError,
+)
+from pip._internal.index import PackageFinder
+from pip._internal.locations import running_under_virtualenv
+from pip._internal.req.req_file import parse_requirements
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.status_codes import (
+ ERROR, PREVIOUS_BUILD_DIR_ERROR, SUCCESS, UNKNOWN_ERROR,
+ VIRTUALENV_NOT_FOUND,
+)
+from pip._internal.utils import deprecation
+from pip._internal.utils.logging import IndentingFormatter
+from pip._internal.utils.misc import get_prog, normalize_path
+from pip._internal.utils.outdated import pip_version_check
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional
+
+__all__ = ['Command']
+
+logger = logging.getLogger(__name__)
+
+
+class Command(object):
+ name = None # type: Optional[str]
+ usage = None # type: Optional[str]
+ hidden = False # type: bool
+ ignore_require_venv = False # type: bool
+ log_streams = ("ext://sys.stdout", "ext://sys.stderr")
+
+ def __init__(self, isolated=False):
+ parser_kw = {
+ 'usage': self.usage,
+ 'prog': '%s %s' % (get_prog(), self.name),
+ 'formatter': UpdatingDefaultsHelpFormatter(),
+ 'add_help_option': False,
+ 'name': self.name,
+ 'description': self.__doc__,
+ 'isolated': isolated,
+ }
+
+ self.parser = ConfigOptionParser(**parser_kw)
+
+ # Commands should add options to this option group
+ optgroup_name = '%s Options' % self.name.capitalize()
+ self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
+
+ # Add the general options
+ gen_opts = cmdoptions.make_option_group(
+ cmdoptions.general_group,
+ self.parser,
+ )
+ self.parser.add_option_group(gen_opts)
+
+ def _build_session(self, options, retries=None, timeout=None):
+ session = PipSession(
+ cache=(
+ normalize_path(os.path.join(options.cache_dir, "http"))
+ if options.cache_dir else None
+ ),
+ retries=retries if retries is not None else options.retries,
+ insecure_hosts=options.trusted_hosts,
+ )
+
+ # Handle custom ca-bundles from the user
+ if options.cert:
+ session.verify = options.cert
+
+ # Handle SSL client certificate
+ if options.client_cert:
+ session.cert = options.client_cert
+
+ # Handle timeouts
+ if options.timeout or timeout:
+ session.timeout = (
+ timeout if timeout is not None else options.timeout
+ )
+
+ # Handle configured proxies
+ if options.proxy:
+ session.proxies = {
+ "http": options.proxy,
+ "https": options.proxy,
+ }
+
+ # Determine if we can prompt the user for authentication or not
+ session.auth.prompting = not options.no_input
+
+ return session
+
+ def parse_args(self, args):
+ # factored out for testability
+ return self.parser.parse_args(args)
+
+ def main(self, args):
+ options, args = self.parse_args(args)
+
+ # Set verbosity so that it can be used elsewhere.
+ self.verbosity = options.verbose - options.quiet
+
+ if self.verbosity >= 1:
+ level = "DEBUG"
+ elif self.verbosity == -1:
+ level = "WARNING"
+ elif self.verbosity == -2:
+ level = "ERROR"
+ elif self.verbosity <= -3:
+ level = "CRITICAL"
+ else:
+ level = "INFO"
+
+ # The root logger should match the "console" level *unless* we
+ # specified "--log" to send debug logs to a file.
+ root_level = level
+ if options.log:
+ root_level = "DEBUG"
+
+ logger_class = "pip._internal.utils.logging.ColorizedStreamHandler"
+ handler_class = "pip._internal.utils.logging.BetterRotatingFileHandler"
+
+ logging.config.dictConfig({
+ "version": 1,
+ "disable_existing_loggers": False,
+ "filters": {
+ "exclude_warnings": {
+ "()": "pip._internal.utils.logging.MaxLevelFilter",
+ "level": logging.WARNING,
+ },
+ },
+ "formatters": {
+ "indent": {
+ "()": IndentingFormatter,
+ "format": "%(message)s",
+ },
+ },
+ "handlers": {
+ "console": {
+ "level": level,
+ "class": logger_class,
+ "no_color": options.no_color,
+ "stream": self.log_streams[0],
+ "filters": ["exclude_warnings"],
+ "formatter": "indent",
+ },
+ "console_errors": {
+ "level": "WARNING",
+ "class": logger_class,
+ "no_color": options.no_color,
+ "stream": self.log_streams[1],
+ "formatter": "indent",
+ },
+ "user_log": {
+ "level": "DEBUG",
+ "class": handler_class,
+ "filename": options.log or "/dev/null",
+ "delay": True,
+ "formatter": "indent",
+ },
+ },
+ "root": {
+ "level": root_level,
+ "handlers": list(filter(None, [
+ "console",
+ "console_errors",
+ "user_log" if options.log else None,
+ ])),
+ },
+ # Disable any logging besides WARNING unless we have DEBUG level
+ # logging enabled. These use both pip._vendor and the bare names
+ # for the case where someone unbundles our libraries.
+ "loggers": {
+ name: {
+ "level": (
+ "WARNING" if level in ["INFO", "ERROR"] else "DEBUG"
+ )
+ } for name in [
+ "pip._vendor", "distlib", "requests", "urllib3"
+ ]
+ },
+ })
+
+ if sys.version_info[:2] == (3, 3):
+ warnings.warn(
+ "Python 3.3 supported has been deprecated and support for it "
+ "will be dropped in the future. Please upgrade your Python.",
+ deprecation.RemovedInPip11Warning,
+ )
+
+ # TODO: try to get these passing down from the command?
+ # without resorting to os.environ to hold these.
+
+ if options.no_input:
+ os.environ['PIP_NO_INPUT'] = '1'
+
+ if options.exists_action:
+ os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action)
+
+ if options.require_venv and not self.ignore_require_venv:
+ # If a venv is required check if it can really be found
+ if not running_under_virtualenv():
+ logger.critical(
+ 'Could not find an activated virtualenv (required).'
+ )
+ sys.exit(VIRTUALENV_NOT_FOUND)
+
+ original_root_handlers = set(logging.root.handlers)
+
+ try:
+ status = self.run(options, args)
+ # FIXME: all commands should return an exit status
+ # and when it is done, isinstance is not needed anymore
+ if isinstance(status, int):
+ return status
+ except PreviousBuildDirError as exc:
+ logger.critical(str(exc))
+ logger.debug('Exception information:', exc_info=True)
+
+ return PREVIOUS_BUILD_DIR_ERROR
+ except (InstallationError, UninstallationError, BadCommand) as exc:
+ logger.critical(str(exc))
+ logger.debug('Exception information:', exc_info=True)
+
+ return ERROR
+ except CommandError as exc:
+ logger.critical('ERROR: %s', exc)
+ logger.debug('Exception information:', exc_info=True)
+
+ return ERROR
+ except KeyboardInterrupt:
+ logger.critical('Operation cancelled by user')
+ logger.debug('Exception information:', exc_info=True)
+
+ return ERROR
+ except:
+ logger.critical('Exception:', exc_info=True)
+
+ return UNKNOWN_ERROR
+ finally:
+ # Check if we're using the latest version of pip available
+ if (not options.disable_pip_version_check and not
+ getattr(options, "no_index", False)):
+ with self._build_session(
+ options,
+ retries=0,
+ timeout=min(5, options.timeout)) as session:
+ pip_version_check(session, options)
+ # Avoid leaking loggers
+ for handler in set(logging.root.handlers) - original_root_handlers:
+ # this method benefit from the Logger class internal lock
+ logging.root.removeHandler(handler)
+
+ return SUCCESS
+
+
+class RequirementCommand(Command):
+
+ @staticmethod
+ def populate_requirement_set(requirement_set, args, options, finder,
+ session, name, wheel_cache):
+ """
+ Marshal cmd line args into a requirement set.
+ """
+ # NOTE: As a side-effect, options.require_hashes and
+ # requirement_set.require_hashes may be updated
+
+ for filename in options.constraints:
+ for req_to_add in parse_requirements(
+ filename,
+ constraint=True, finder=finder, options=options,
+ session=session, wheel_cache=wheel_cache):
+ req_to_add.is_direct = True
+ requirement_set.add_requirement(req_to_add)
+
+ for req in args:
+ req_to_add = InstallRequirement.from_line(
+ req, None, isolated=options.isolated_mode,
+ wheel_cache=wheel_cache
+ )
+ req_to_add.is_direct = True
+ requirement_set.add_requirement(req_to_add)
+
+ for req in options.editables:
+ req_to_add = InstallRequirement.from_editable(
+ req,
+ isolated=options.isolated_mode,
+ wheel_cache=wheel_cache
+ )
+ req_to_add.is_direct = True
+ requirement_set.add_requirement(req_to_add)
+
+ for filename in options.requirements:
+ for req_to_add in parse_requirements(
+ filename,
+ finder=finder, options=options, session=session,
+ wheel_cache=wheel_cache):
+ req_to_add.is_direct = True
+ requirement_set.add_requirement(req_to_add)
+ # If --require-hashes was a line in a requirements file, tell
+ # RequirementSet about it:
+ requirement_set.require_hashes = options.require_hashes
+
+ if not (args or options.editables or options.requirements):
+ opts = {'name': name}
+ if options.find_links:
+ raise CommandError(
+ 'You must give at least one requirement to %(name)s '
+ '(maybe you meant "pip %(name)s %(links)s"?)' %
+ dict(opts, links=' '.join(options.find_links)))
+ else:
+ raise CommandError(
+ 'You must give at least one requirement to %(name)s '
+ '(see "pip help %(name)s")' % opts)
+
+ # On Windows, any operation modifying pip should be run as:
+ # python -m pip ...
+ # See https://github.com/pypa/pip/issues/1299 for more discussion
+ should_show_use_python_msg = (
+ WINDOWS and
+ requirement_set.has_requirement("pip") and
+ os.path.basename(sys.argv[0]).startswith("pip")
+ )
+ if should_show_use_python_msg:
+ new_command = [
+ sys.executable, "-m", "pip"
+ ] + sys.argv[1:]
+ raise CommandError(
+ 'To modify pip, please run the following command:\n{}'
+ .format(" ".join(new_command))
+ )
+
+ def _build_package_finder(self, options, session,
+ platform=None, python_versions=None,
+ abi=None, implementation=None):
+ """
+ Create a package finder appropriate to this requirement command.
+ """
+ index_urls = [options.index_url] + options.extra_index_urls
+ if options.no_index:
+ logger.debug('Ignoring indexes: %s', ','.join(index_urls))
+ index_urls = []
+
+ return PackageFinder(
+ find_links=options.find_links,
+ format_control=options.format_control,
+ index_urls=index_urls,
+ trusted_hosts=options.trusted_hosts,
+ allow_all_prereleases=options.pre,
+ process_dependency_links=options.process_dependency_links,
+ session=session,
+ platform=platform,
+ versions=python_versions,
+ abi=abi,
+ implementation=implementation,
+ )
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/baseparser.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/baseparser.py
index 9a8d129..ed28a1b 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/baseparser.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/baseparser.py
@@ -1,240 +1,240 @@
-"""Base option parser setup"""
-from __future__ import absolute_import
-
-import logging
-import optparse
-import sys
-import textwrap
-from distutils.util import strtobool
-
-from pip._vendor.six import string_types
-
-from pip._internal.compat import get_terminal_size
-from pip._internal.configuration import Configuration, ConfigurationError
-
-logger = logging.getLogger(__name__)
-
-
-class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
- """A prettier/less verbose help formatter for optparse."""
-
- def __init__(self, *args, **kwargs):
- # help position must be aligned with __init__.parseopts.description
- kwargs['max_help_position'] = 30
- kwargs['indent_increment'] = 1
- kwargs['width'] = get_terminal_size()[0] - 2
- optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs)
-
- def format_option_strings(self, option):
- return self._format_option_strings(option, ' <%s>', ', ')
-
- def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '):
- """
- Return a comma-separated list of option strings and metavars.
-
- :param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
- :param mvarfmt: metavar format string - evaluated as mvarfmt % metavar
- :param optsep: separator
- """
- opts = []
-
- if option._short_opts:
- opts.append(option._short_opts[0])
- if option._long_opts:
- opts.append(option._long_opts[0])
- if len(opts) > 1:
- opts.insert(1, optsep)
-
- if option.takes_value():
- metavar = option.metavar or option.dest.lower()
- opts.append(mvarfmt % metavar.lower())
-
- return ''.join(opts)
-
- def format_heading(self, heading):
- if heading == 'Options':
- return ''
- return heading + ':\n'
-
- def format_usage(self, usage):
- """
- Ensure there is only one newline between usage and the first heading
- if there is no description.
- """
- msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ")
- return msg
-
- def format_description(self, description):
- # leave full control over description to us
- if description:
- if hasattr(self.parser, 'main'):
- label = 'Commands'
- else:
- label = 'Description'
- # some doc strings have initial newlines, some don't
- description = description.lstrip('\n')
- # some doc strings have final newlines and spaces, some don't
- description = description.rstrip()
- # dedent, then reindent
- description = self.indent_lines(textwrap.dedent(description), " ")
- description = '%s:\n%s\n' % (label, description)
- return description
- else:
- return ''
-
- def format_epilog(self, epilog):
- # leave full control over epilog to us
- if epilog:
- return epilog
- else:
- return ''
-
- def indent_lines(self, text, indent):
- new_lines = [indent + line for line in text.split('\n')]
- return "\n".join(new_lines)
-
-
-class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
- """Custom help formatter for use in ConfigOptionParser.
-
- This is updates the defaults before expanding them, allowing
- them to show up correctly in the help listing.
- """
-
- def expand_default(self, option):
- if self.parser is not None:
- self.parser._update_defaults(self.parser.defaults)
- return optparse.IndentedHelpFormatter.expand_default(self, option)
-
-
-class CustomOptionParser(optparse.OptionParser):
-
- def insert_option_group(self, idx, *args, **kwargs):
- """Insert an OptionGroup at a given position."""
- group = self.add_option_group(*args, **kwargs)
-
- self.option_groups.pop()
- self.option_groups.insert(idx, group)
-
- return group
-
- @property
- def option_list_all(self):
- """Get a list of all options, including those in option groups."""
- res = self.option_list[:]
- for i in self.option_groups:
- res.extend(i.option_list)
-
- return res
-
-
-class ConfigOptionParser(CustomOptionParser):
- """Custom option parser which updates its defaults by checking the
- configuration files and environmental variables"""
-
- def __init__(self, *args, **kwargs):
- self.name = kwargs.pop('name')
-
- isolated = kwargs.pop("isolated", False)
- self.config = Configuration(isolated)
-
- assert self.name
- optparse.OptionParser.__init__(self, *args, **kwargs)
-
- def check_default(self, option, key, val):
- try:
- return option.check_value(key, val)
- except optparse.OptionValueError as exc:
- print("An error occurred during configuration: %s" % exc)
- sys.exit(3)
-
- def _get_ordered_configuration_items(self):
- # Configuration gives keys in an unordered manner. Order them.
- override_order = ["global", self.name, ":env:"]
-
- # Pool the options into different groups
- section_items = {name: [] for name in override_order}
- for section_key, val in self.config.items():
- # ignore empty values
- if not val:
- logger.debug(
- "Ignoring configuration key '%s' as it's value is empty.",
- section_key
- )
- continue
-
- section, key = section_key.split(".", 1)
- if section in override_order:
- section_items[section].append((key, val))
-
- # Yield each group in their override order
- for section in override_order:
- for key, val in section_items[section]:
- yield key, val
-
- def _update_defaults(self, defaults):
- """Updates the given defaults with values from the config files and
- the environ. Does a little special handling for certain types of
- options (lists)."""
-
- # Accumulate complex default state.
- self.values = optparse.Values(self.defaults)
- late_eval = set()
- # Then set the options with those values
- for key, val in self._get_ordered_configuration_items():
- # '--' because configuration supports only long names
- option = self.get_option('--' + key)
-
- # Ignore options not present in this parser. E.g. non-globals put
- # in [global] by users that want them to apply to all applicable
- # commands.
- if option is None:
- continue
-
- if option.action in ('store_true', 'store_false', 'count'):
- val = strtobool(val)
- elif option.action == 'append':
- val = val.split()
- val = [self.check_default(option, key, v) for v in val]
- elif option.action == 'callback':
- late_eval.add(option.dest)
- opt_str = option.get_opt_string()
- val = option.convert_value(opt_str, val)
- # From take_action
- args = option.callback_args or ()
- kwargs = option.callback_kwargs or {}
- option.callback(option, opt_str, val, self, *args, **kwargs)
- else:
- val = self.check_default(option, key, val)
-
- defaults[option.dest] = val
-
- for key in late_eval:
- defaults[key] = getattr(self.values, key)
- self.values = None
- return defaults
-
- def get_default_values(self):
- """Overriding to make updating the defaults after instantiation of
- the option parser possible, _update_defaults() does the dirty work."""
- if not self.process_default_values:
- # Old, pre-Optik 1.5 behaviour.
- return optparse.Values(self.defaults)
-
- # Load the configuration, or error out in case of an error
- try:
- self.config.load()
- except ConfigurationError as err:
- self.exit(2, err.args[0])
-
- defaults = self._update_defaults(self.defaults.copy()) # ours
- for option in self._get_all_options():
- default = defaults.get(option.dest)
- if isinstance(default, string_types):
- opt_str = option.get_opt_string()
- defaults[option.dest] = option.check_value(opt_str, default)
- return optparse.Values(defaults)
-
- def error(self, msg):
- self.print_usage(sys.stderr)
- self.exit(2, "%s\n" % msg)
+"""Base option parser setup"""
+from __future__ import absolute_import
+
+import logging
+import optparse
+import sys
+import textwrap
+from distutils.util import strtobool
+
+from pip._vendor.six import string_types
+
+from pip._internal.compat import get_terminal_size
+from pip._internal.configuration import Configuration, ConfigurationError
+
+logger = logging.getLogger(__name__)
+
+
+class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
+ """A prettier/less verbose help formatter for optparse."""
+
+ def __init__(self, *args, **kwargs):
+ # help position must be aligned with __init__.parseopts.description
+ kwargs['max_help_position'] = 30
+ kwargs['indent_increment'] = 1
+ kwargs['width'] = get_terminal_size()[0] - 2
+ optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs)
+
+ def format_option_strings(self, option):
+ return self._format_option_strings(option, ' <%s>', ', ')
+
+ def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '):
+ """
+ Return a comma-separated list of option strings and metavars.
+
+ :param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
+ :param mvarfmt: metavar format string - evaluated as mvarfmt % metavar
+ :param optsep: separator
+ """
+ opts = []
+
+ if option._short_opts:
+ opts.append(option._short_opts[0])
+ if option._long_opts:
+ opts.append(option._long_opts[0])
+ if len(opts) > 1:
+ opts.insert(1, optsep)
+
+ if option.takes_value():
+ metavar = option.metavar or option.dest.lower()
+ opts.append(mvarfmt % metavar.lower())
+
+ return ''.join(opts)
+
+ def format_heading(self, heading):
+ if heading == 'Options':
+ return ''
+ return heading + ':\n'
+
+ def format_usage(self, usage):
+ """
+ Ensure there is only one newline between usage and the first heading
+ if there is no description.
+ """
+ msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ")
+ return msg
+
+ def format_description(self, description):
+ # leave full control over description to us
+ if description:
+ if hasattr(self.parser, 'main'):
+ label = 'Commands'
+ else:
+ label = 'Description'
+ # some doc strings have initial newlines, some don't
+ description = description.lstrip('\n')
+ # some doc strings have final newlines and spaces, some don't
+ description = description.rstrip()
+ # dedent, then reindent
+ description = self.indent_lines(textwrap.dedent(description), " ")
+ description = '%s:\n%s\n' % (label, description)
+ return description
+ else:
+ return ''
+
+ def format_epilog(self, epilog):
+ # leave full control over epilog to us
+ if epilog:
+ return epilog
+ else:
+ return ''
+
+ def indent_lines(self, text, indent):
+ new_lines = [indent + line for line in text.split('\n')]
+ return "\n".join(new_lines)
+
+
+class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
+ """Custom help formatter for use in ConfigOptionParser.
+
+ This is updates the defaults before expanding them, allowing
+ them to show up correctly in the help listing.
+ """
+
+ def expand_default(self, option):
+ if self.parser is not None:
+ self.parser._update_defaults(self.parser.defaults)
+ return optparse.IndentedHelpFormatter.expand_default(self, option)
+
+
+class CustomOptionParser(optparse.OptionParser):
+
+ def insert_option_group(self, idx, *args, **kwargs):
+ """Insert an OptionGroup at a given position."""
+ group = self.add_option_group(*args, **kwargs)
+
+ self.option_groups.pop()
+ self.option_groups.insert(idx, group)
+
+ return group
+
+ @property
+ def option_list_all(self):
+ """Get a list of all options, including those in option groups."""
+ res = self.option_list[:]
+ for i in self.option_groups:
+ res.extend(i.option_list)
+
+ return res
+
+
+class ConfigOptionParser(CustomOptionParser):
+ """Custom option parser which updates its defaults by checking the
+ configuration files and environmental variables"""
+
+ def __init__(self, *args, **kwargs):
+ self.name = kwargs.pop('name')
+
+ isolated = kwargs.pop("isolated", False)
+ self.config = Configuration(isolated)
+
+ assert self.name
+ optparse.OptionParser.__init__(self, *args, **kwargs)
+
+ def check_default(self, option, key, val):
+ try:
+ return option.check_value(key, val)
+ except optparse.OptionValueError as exc:
+ print("An error occurred during configuration: %s" % exc)
+ sys.exit(3)
+
+ def _get_ordered_configuration_items(self):
+ # Configuration gives keys in an unordered manner. Order them.
+ override_order = ["global", self.name, ":env:"]
+
+ # Pool the options into different groups
+ section_items = {name: [] for name in override_order}
+ for section_key, val in self.config.items():
+ # ignore empty values
+ if not val:
+ logger.debug(
+ "Ignoring configuration key '%s' as it's value is empty.",
+ section_key
+ )
+ continue
+
+ section, key = section_key.split(".", 1)
+ if section in override_order:
+ section_items[section].append((key, val))
+
+ # Yield each group in their override order
+ for section in override_order:
+ for key, val in section_items[section]:
+ yield key, val
+
+ def _update_defaults(self, defaults):
+ """Updates the given defaults with values from the config files and
+ the environ. Does a little special handling for certain types of
+ options (lists)."""
+
+ # Accumulate complex default state.
+ self.values = optparse.Values(self.defaults)
+ late_eval = set()
+ # Then set the options with those values
+ for key, val in self._get_ordered_configuration_items():
+ # '--' because configuration supports only long names
+ option = self.get_option('--' + key)
+
+ # Ignore options not present in this parser. E.g. non-globals put
+ # in [global] by users that want them to apply to all applicable
+ # commands.
+ if option is None:
+ continue
+
+ if option.action in ('store_true', 'store_false', 'count'):
+ val = strtobool(val)
+ elif option.action == 'append':
+ val = val.split()
+ val = [self.check_default(option, key, v) for v in val]
+ elif option.action == 'callback':
+ late_eval.add(option.dest)
+ opt_str = option.get_opt_string()
+ val = option.convert_value(opt_str, val)
+ # From take_action
+ args = option.callback_args or ()
+ kwargs = option.callback_kwargs or {}
+ option.callback(option, opt_str, val, self, *args, **kwargs)
+ else:
+ val = self.check_default(option, key, val)
+
+ defaults[option.dest] = val
+
+ for key in late_eval:
+ defaults[key] = getattr(self.values, key)
+ self.values = None
+ return defaults
+
+ def get_default_values(self):
+ """Overriding to make updating the defaults after instantiation of
+ the option parser possible, _update_defaults() does the dirty work."""
+ if not self.process_default_values:
+ # Old, pre-Optik 1.5 behaviour.
+ return optparse.Values(self.defaults)
+
+ # Load the configuration, or error out in case of an error
+ try:
+ self.config.load()
+ except ConfigurationError as err:
+ self.exit(2, err.args[0])
+
+ defaults = self._update_defaults(self.defaults.copy()) # ours
+ for option in self._get_all_options():
+ default = defaults.get(option.dest)
+ if isinstance(default, string_types):
+ opt_str = option.get_opt_string()
+ defaults[option.dest] = option.check_value(opt_str, default)
+ return optparse.Values(defaults)
+
+ def error(self, msg):
+ self.print_usage(sys.stderr)
+ self.exit(2, "%s\n" % msg)
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/build_env.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/build_env.py
index 791d734..8ad7735 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/build_env.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/build_env.py
@@ -1,92 +1,92 @@
-"""Build Environment used for isolation during sdist building
-"""
-
-import os
-from distutils.sysconfig import get_python_lib
-from sysconfig import get_paths
-
-from pip._internal.utils.temp_dir import TempDirectory
-
-
-class BuildEnvironment(object):
- """Creates and manages an isolated environment to install build deps
- """
-
- def __init__(self, no_clean):
- self._temp_dir = TempDirectory(kind="build-env")
- self._no_clean = no_clean
-
- @property
- def path(self):
- return self._temp_dir.path
-
- def __enter__(self):
- self._temp_dir.create()
-
- self.save_path = os.environ.get('PATH', None)
- self.save_pythonpath = os.environ.get('PYTHONPATH', None)
- self.save_nousersite = os.environ.get('PYTHONNOUSERSITE', None)
-
- install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix'
- install_dirs = get_paths(install_scheme, vars={
- 'base': self.path,
- 'platbase': self.path,
- })
-
- scripts = install_dirs['scripts']
- if self.save_path:
- os.environ['PATH'] = scripts + os.pathsep + self.save_path
- else:
- os.environ['PATH'] = scripts + os.pathsep + os.defpath
-
- # Note: prefer distutils' sysconfig to get the
- # library paths so PyPy is correctly supported.
- purelib = get_python_lib(plat_specific=0, prefix=self.path)
- platlib = get_python_lib(plat_specific=1, prefix=self.path)
- if purelib == platlib:
- lib_dirs = purelib
- else:
- lib_dirs = purelib + os.pathsep + platlib
- if self.save_pythonpath:
- os.environ['PYTHONPATH'] = lib_dirs + os.pathsep + \
- self.save_pythonpath
- else:
- os.environ['PYTHONPATH'] = lib_dirs
-
- os.environ['PYTHONNOUSERSITE'] = '1'
-
- return self.path
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- if not self._no_clean:
- self._temp_dir.cleanup()
-
- def restore_var(varname, old_value):
- if old_value is None:
- os.environ.pop(varname, None)
- else:
- os.environ[varname] = old_value
-
- restore_var('PATH', self.save_path)
- restore_var('PYTHONPATH', self.save_pythonpath)
- restore_var('PYTHONNOUSERSITE', self.save_nousersite)
-
- def cleanup(self):
- self._temp_dir.cleanup()
-
-
-class NoOpBuildEnvironment(BuildEnvironment):
- """A no-op drop-in replacement for BuildEnvironment
- """
-
- def __init__(self, no_clean):
- pass
-
- def __enter__(self):
- pass
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- pass
-
- def cleanup(self):
- pass
+"""Build Environment used for isolation during sdist building
+"""
+
+import os
+from distutils.sysconfig import get_python_lib
+from sysconfig import get_paths
+
+from pip._internal.utils.temp_dir import TempDirectory
+
+
+class BuildEnvironment(object):
+ """Creates and manages an isolated environment to install build deps
+ """
+
+ def __init__(self, no_clean):
+ self._temp_dir = TempDirectory(kind="build-env")
+ self._no_clean = no_clean
+
+ @property
+ def path(self):
+ return self._temp_dir.path
+
+ def __enter__(self):
+ self._temp_dir.create()
+
+ self.save_path = os.environ.get('PATH', None)
+ self.save_pythonpath = os.environ.get('PYTHONPATH', None)
+ self.save_nousersite = os.environ.get('PYTHONNOUSERSITE', None)
+
+ install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix'
+ install_dirs = get_paths(install_scheme, vars={
+ 'base': self.path,
+ 'platbase': self.path,
+ })
+
+ scripts = install_dirs['scripts']
+ if self.save_path:
+ os.environ['PATH'] = scripts + os.pathsep + self.save_path
+ else:
+ os.environ['PATH'] = scripts + os.pathsep + os.defpath
+
+ # Note: prefer distutils' sysconfig to get the
+ # library paths so PyPy is correctly supported.
+ purelib = get_python_lib(plat_specific=0, prefix=self.path)
+ platlib = get_python_lib(plat_specific=1, prefix=self.path)
+ if purelib == platlib:
+ lib_dirs = purelib
+ else:
+ lib_dirs = purelib + os.pathsep + platlib
+ if self.save_pythonpath:
+ os.environ['PYTHONPATH'] = lib_dirs + os.pathsep + \
+ self.save_pythonpath
+ else:
+ os.environ['PYTHONPATH'] = lib_dirs
+
+ os.environ['PYTHONNOUSERSITE'] = '1'
+
+ return self.path
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ if not self._no_clean:
+ self._temp_dir.cleanup()
+
+ def restore_var(varname, old_value):
+ if old_value is None:
+ os.environ.pop(varname, None)
+ else:
+ os.environ[varname] = old_value
+
+ restore_var('PATH', self.save_path)
+ restore_var('PYTHONPATH', self.save_pythonpath)
+ restore_var('PYTHONNOUSERSITE', self.save_nousersite)
+
+ def cleanup(self):
+ self._temp_dir.cleanup()
+
+
+class NoOpBuildEnvironment(BuildEnvironment):
+ """A no-op drop-in replacement for BuildEnvironment
+ """
+
+ def __init__(self, no_clean):
+ pass
+
+ def __enter__(self):
+ pass
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ pass
+
+ def cleanup(self):
+ pass
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/cache.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/cache.py
index 1aa17aa..5547d73 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/cache.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/cache.py
@@ -1,202 +1,202 @@
-"""Cache Management
-"""
-
-import errno
-import hashlib
-import logging
-import os
-
-from pip._vendor.packaging.utils import canonicalize_name
-
-from pip._internal import index
-from pip._internal.compat import expanduser
-from pip._internal.download import path_to_url
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.wheel import InvalidWheelFilename, Wheel
-
-logger = logging.getLogger(__name__)
-
-
-class Cache(object):
- """An abstract class - provides cache directories for data from links
-
-
- :param cache_dir: The root of the cache.
- :param format_control: A pip.index.FormatControl object to limit
- binaries being read from the cache.
- :param allowed_formats: which formats of files the cache should store.
- ('binary' and 'source' are the only allowed values)
- """
-
- def __init__(self, cache_dir, format_control, allowed_formats):
- super(Cache, self).__init__()
- self.cache_dir = expanduser(cache_dir) if cache_dir else None
- self.format_control = format_control
- self.allowed_formats = allowed_formats
-
- _valid_formats = {"source", "binary"}
- assert self.allowed_formats.union(_valid_formats) == _valid_formats
-
- def _get_cache_path_parts(self, link):
- """Get parts of part that must be os.path.joined with cache_dir
- """
-
- # We want to generate an url to use as our cache key, we don't want to
- # just re-use the URL because it might have other items in the fragment
- # and we don't care about those.
- key_parts = [link.url_without_fragment]
- if link.hash_name is not None and link.hash is not None:
- key_parts.append("=".join([link.hash_name, link.hash]))
- key_url = "#".join(key_parts)
-
- # Encode our key url with sha224, we'll use this because it has similar
- # security properties to sha256, but with a shorter total output (and
- # thus less secure). However the differences don't make a lot of
- # difference for our use case here.
- hashed = hashlib.sha224(key_url.encode()).hexdigest()
-
- # We want to nest the directories some to prevent having a ton of top
- # level directories where we might run out of sub directories on some
- # FS.
- parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
-
- return parts
-
- def _get_candidates(self, link, package_name):
- can_not_cache = (
- not self.cache_dir or
- not package_name or
- not link
- )
- if can_not_cache:
- return []
-
- canonical_name = canonicalize_name(package_name)
- formats = index.fmt_ctl_formats(
- self.format_control, canonical_name
- )
- if not self.allowed_formats.intersection(formats):
- return []
-
- root = self.get_path_for_link(link)
- try:
- return os.listdir(root)
- except OSError as err:
- if err.errno in {errno.ENOENT, errno.ENOTDIR}:
- return []
- raise
-
- def get_path_for_link(self, link):
- """Return a directory to store cached items in for link.
- """
- raise NotImplementedError()
-
- def get(self, link, package_name):
- """Returns a link to a cached item if it exists, otherwise returns the
- passed link.
- """
- raise NotImplementedError()
-
- def _link_for_candidate(self, link, candidate):
- root = self.get_path_for_link(link)
- path = os.path.join(root, candidate)
-
- return index.Link(path_to_url(path))
-
- def cleanup(self):
- pass
-
-
-class SimpleWheelCache(Cache):
- """A cache of wheels for future installs.
- """
-
- def __init__(self, cache_dir, format_control):
- super(SimpleWheelCache, self).__init__(
- cache_dir, format_control, {"binary"}
- )
-
- def get_path_for_link(self, link):
- """Return a directory to store cached wheels for link
-
- Because there are M wheels for any one sdist, we provide a directory
- to cache them in, and then consult that directory when looking up
- cache hits.
-
- We only insert things into the cache if they have plausible version
- numbers, so that we don't contaminate the cache with things that were
- not unique. E.g. ./package might have dozens of installs done for it
- and build a version of 0.0...and if we built and cached a wheel, we'd
- end up using the same wheel even if the source has been edited.
-
- :param link: The link of the sdist for which this will cache wheels.
- """
- parts = self._get_cache_path_parts(link)
-
- # Store wheels within the root cache_dir
- return os.path.join(self.cache_dir, "wheels", *parts)
-
- def get(self, link, package_name):
- candidates = []
-
- for wheel_name in self._get_candidates(link, package_name):
- try:
- wheel = Wheel(wheel_name)
- except InvalidWheelFilename:
- continue
- if not wheel.supported():
- # Built for a different python/arch/etc
- continue
- candidates.append((wheel.support_index_min(), wheel_name))
-
- if not candidates:
- return link
-
- return self._link_for_candidate(link, min(candidates)[1])
-
-
-class EphemWheelCache(SimpleWheelCache):
- """A SimpleWheelCache that creates it's own temporary cache directory
- """
-
- def __init__(self, format_control):
- self._temp_dir = TempDirectory(kind="ephem-wheel-cache")
- self._temp_dir.create()
-
- super(EphemWheelCache, self).__init__(
- self._temp_dir.path, format_control
- )
-
- def cleanup(self):
- self._temp_dir.cleanup()
-
-
-class WheelCache(Cache):
- """Wraps EphemWheelCache and SimpleWheelCache into a single Cache
-
- This Cache allows for gracefully degradation, using the ephem wheel cache
- when a certain link is not found in the simple wheel cache first.
- """
-
- def __init__(self, cache_dir, format_control):
- super(WheelCache, self).__init__(
- cache_dir, format_control, {'binary'}
- )
- self._wheel_cache = SimpleWheelCache(cache_dir, format_control)
- self._ephem_cache = EphemWheelCache(format_control)
-
- def get_path_for_link(self, link):
- return self._wheel_cache.get_path_for_link(link)
-
- def get_ephem_path_for_link(self, link):
- return self._ephem_cache.get_path_for_link(link)
-
- def get(self, link, package_name):
- retval = self._wheel_cache.get(link, package_name)
- if retval is link:
- retval = self._ephem_cache.get(link, package_name)
- return retval
-
- def cleanup(self):
- self._wheel_cache.cleanup()
- self._ephem_cache.cleanup()
+"""Cache Management
+"""
+
+import errno
+import hashlib
+import logging
+import os
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal import index
+from pip._internal.compat import expanduser
+from pip._internal.download import path_to_url
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.wheel import InvalidWheelFilename, Wheel
+
+logger = logging.getLogger(__name__)
+
+
+class Cache(object):
+ """An abstract class - provides cache directories for data from links
+
+
+ :param cache_dir: The root of the cache.
+ :param format_control: A pip.index.FormatControl object to limit
+ binaries being read from the cache.
+ :param allowed_formats: which formats of files the cache should store.
+ ('binary' and 'source' are the only allowed values)
+ """
+
+ def __init__(self, cache_dir, format_control, allowed_formats):
+ super(Cache, self).__init__()
+ self.cache_dir = expanduser(cache_dir) if cache_dir else None
+ self.format_control = format_control
+ self.allowed_formats = allowed_formats
+
+ _valid_formats = {"source", "binary"}
+ assert self.allowed_formats.union(_valid_formats) == _valid_formats
+
+ def _get_cache_path_parts(self, link):
+ """Get parts of part that must be os.path.joined with cache_dir
+ """
+
+ # We want to generate an url to use as our cache key, we don't want to
+ # just re-use the URL because it might have other items in the fragment
+ # and we don't care about those.
+ key_parts = [link.url_without_fragment]
+ if link.hash_name is not None and link.hash is not None:
+ key_parts.append("=".join([link.hash_name, link.hash]))
+ key_url = "#".join(key_parts)
+
+ # Encode our key url with sha224, we'll use this because it has similar
+ # security properties to sha256, but with a shorter total output (and
+ # thus less secure). However the differences don't make a lot of
+ # difference for our use case here.
+ hashed = hashlib.sha224(key_url.encode()).hexdigest()
+
+ # We want to nest the directories some to prevent having a ton of top
+ # level directories where we might run out of sub directories on some
+ # FS.
+ parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
+
+ return parts
+
+ def _get_candidates(self, link, package_name):
+ can_not_cache = (
+ not self.cache_dir or
+ not package_name or
+ not link
+ )
+ if can_not_cache:
+ return []
+
+ canonical_name = canonicalize_name(package_name)
+ formats = index.fmt_ctl_formats(
+ self.format_control, canonical_name
+ )
+ if not self.allowed_formats.intersection(formats):
+ return []
+
+ root = self.get_path_for_link(link)
+ try:
+ return os.listdir(root)
+ except OSError as err:
+ if err.errno in {errno.ENOENT, errno.ENOTDIR}:
+ return []
+ raise
+
+ def get_path_for_link(self, link):
+ """Return a directory to store cached items in for link.
+ """
+ raise NotImplementedError()
+
+ def get(self, link, package_name):
+ """Returns a link to a cached item if it exists, otherwise returns the
+ passed link.
+ """
+ raise NotImplementedError()
+
+ def _link_for_candidate(self, link, candidate):
+ root = self.get_path_for_link(link)
+ path = os.path.join(root, candidate)
+
+ return index.Link(path_to_url(path))
+
+ def cleanup(self):
+ pass
+
+
+class SimpleWheelCache(Cache):
+ """A cache of wheels for future installs.
+ """
+
+ def __init__(self, cache_dir, format_control):
+ super(SimpleWheelCache, self).__init__(
+ cache_dir, format_control, {"binary"}
+ )
+
+ def get_path_for_link(self, link):
+ """Return a directory to store cached wheels for link
+
+ Because there are M wheels for any one sdist, we provide a directory
+ to cache them in, and then consult that directory when looking up
+ cache hits.
+
+ We only insert things into the cache if they have plausible version
+ numbers, so that we don't contaminate the cache with things that were
+ not unique. E.g. ./package might have dozens of installs done for it
+ and build a version of 0.0...and if we built and cached a wheel, we'd
+ end up using the same wheel even if the source has been edited.
+
+ :param link: The link of the sdist for which this will cache wheels.
+ """
+ parts = self._get_cache_path_parts(link)
+
+ # Store wheels within the root cache_dir
+ return os.path.join(self.cache_dir, "wheels", *parts)
+
+ def get(self, link, package_name):
+ candidates = []
+
+ for wheel_name in self._get_candidates(link, package_name):
+ try:
+ wheel = Wheel(wheel_name)
+ except InvalidWheelFilename:
+ continue
+ if not wheel.supported():
+ # Built for a different python/arch/etc
+ continue
+ candidates.append((wheel.support_index_min(), wheel_name))
+
+ if not candidates:
+ return link
+
+ return self._link_for_candidate(link, min(candidates)[1])
+
+
+class EphemWheelCache(SimpleWheelCache):
+ """A SimpleWheelCache that creates it's own temporary cache directory
+ """
+
+ def __init__(self, format_control):
+ self._temp_dir = TempDirectory(kind="ephem-wheel-cache")
+ self._temp_dir.create()
+
+ super(EphemWheelCache, self).__init__(
+ self._temp_dir.path, format_control
+ )
+
+ def cleanup(self):
+ self._temp_dir.cleanup()
+
+
+class WheelCache(Cache):
+ """Wraps EphemWheelCache and SimpleWheelCache into a single Cache
+
+ This Cache allows for gracefully degradation, using the ephem wheel cache
+ when a certain link is not found in the simple wheel cache first.
+ """
+
+ def __init__(self, cache_dir, format_control):
+ super(WheelCache, self).__init__(
+ cache_dir, format_control, {'binary'}
+ )
+ self._wheel_cache = SimpleWheelCache(cache_dir, format_control)
+ self._ephem_cache = EphemWheelCache(format_control)
+
+ def get_path_for_link(self, link):
+ return self._wheel_cache.get_path_for_link(link)
+
+ def get_ephem_path_for_link(self, link):
+ return self._ephem_cache.get_path_for_link(link)
+
+ def get(self, link, package_name):
+ retval = self._wheel_cache.get(link, package_name)
+ if retval is link:
+ retval = self._ephem_cache.get(link, package_name)
+ return retval
+
+ def cleanup(self):
+ self._wheel_cache.cleanup()
+ self._ephem_cache.cleanup()
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/cmdoptions.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/cmdoptions.py
index 6319995..58854e3 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/cmdoptions.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/cmdoptions.py
@@ -1,609 +1,609 @@
-"""
-shared options and groups
-
-The principle here is to define options once, but *not* instantiate them
-globally. One reason being that options with action='append' can carry state
-between parses. pip parses general options twice internally, and shouldn't
-pass on state. To be consistent, all options will follow this design.
-
-"""
-from __future__ import absolute_import
-
-import warnings
-from functools import partial
-from optparse import SUPPRESS_HELP, Option, OptionGroup
-
-from pip._internal.index import (
- FormatControl, fmt_ctl_handle_mutual_exclude, fmt_ctl_no_binary,
-)
-from pip._internal.locations import USER_CACHE_DIR, src_prefix
-from pip._internal.models import PyPI
-from pip._internal.utils.hashes import STRONG_HASHES
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.utils.ui import BAR_TYPES
-
-if MYPY_CHECK_RUNNING:
- from typing import Any
-
-
-def make_option_group(group, parser):
- """
- Return an OptionGroup object
- group -- assumed to be dict with 'name' and 'options' keys
- parser -- an optparse Parser
- """
- option_group = OptionGroup(parser, group['name'])
- for option in group['options']:
- option_group.add_option(option())
- return option_group
-
-
-def check_install_build_global(options, check_options=None):
- """Disable wheels if per-setup.py call options are set.
-
- :param options: The OptionParser options to update.
- :param check_options: The options to check, if not supplied defaults to
- options.
- """
- if check_options is None:
- check_options = options
-
- def getname(n):
- return getattr(check_options, n, None)
- names = ["build_options", "global_options", "install_options"]
- if any(map(getname, names)):
- control = options.format_control
- fmt_ctl_no_binary(control)
- warnings.warn(
- 'Disabling all use of wheels due to the use of --build-options '
- '/ --global-options / --install-options.', stacklevel=2,
- )
-
-
-###########
-# options #
-###########
-
-help_ = partial(
- Option,
- '-h', '--help',
- dest='help',
- action='help',
- help='Show help.',
-) # type: Any
-
-isolated_mode = partial(
- Option,
- "--isolated",
- dest="isolated_mode",
- action="store_true",
- default=False,
- help=(
- "Run pip in an isolated mode, ignoring environment variables and user "
- "configuration."
- ),
-)
-
-require_virtualenv = partial(
- Option,
- # Run only if inside a virtualenv, bail if not.
- '--require-virtualenv', '--require-venv',
- dest='require_venv',
- action='store_true',
- default=False,
- help=SUPPRESS_HELP
-) # type: Any
-
-verbose = partial(
- Option,
- '-v', '--verbose',
- dest='verbose',
- action='count',
- default=0,
- help='Give more output. Option is additive, and can be used up to 3 times.'
-)
-
-no_color = partial(
- Option,
- '--no-color',
- dest='no_color',
- action='store_true',
- default=False,
- help="Suppress colored output",
-)
-
-version = partial(
- Option,
- '-V', '--version',
- dest='version',
- action='store_true',
- help='Show version and exit.',
-) # type: Any
-
-quiet = partial(
- Option,
- '-q', '--quiet',
- dest='quiet',
- action='count',
- default=0,
- help=(
- 'Give less output. Option is additive, and can be used up to 3'
- ' times (corresponding to WARNING, ERROR, and CRITICAL logging'
- ' levels).'
- ),
-) # type: Any
-
-progress_bar = partial(
- Option,
- '--progress-bar',
- dest='progress_bar',
- type='choice',
- choices=list(BAR_TYPES.keys()),
- default='on',
- help=(
- 'Specify type of progress to be displayed [' +
- '|'.join(BAR_TYPES.keys()) + '] (default: %default)'
- ),
-) # type: Any
-
-log = partial(
- Option,
- "--log", "--log-file", "--local-log",
- dest="log",
- metavar="path",
- help="Path to a verbose appending log."
-) # type: Any
-
-no_input = partial(
- Option,
- # Don't ask for input
- '--no-input',
- dest='no_input',
- action='store_true',
- default=False,
- help=SUPPRESS_HELP
-) # type: Any
-
-proxy = partial(
- Option,
- '--proxy',
- dest='proxy',
- type='str',
- default='',
- help="Specify a proxy in the form [user:passwd@]proxy.server:port."
-) # type: Any
-
-retries = partial(
- Option,
- '--retries',
- dest='retries',
- type='int',
- default=5,
- help="Maximum number of retries each connection should attempt "
- "(default %default times).",
-) # type: Any
-
-timeout = partial(
- Option,
- '--timeout', '--default-timeout',
- metavar='sec',
- dest='timeout',
- type='float',
- default=15,
- help='Set the socket timeout (default %default seconds).',
-) # type: Any
-
-skip_requirements_regex = partial(
- Option,
- # A regex to be used to skip requirements
- '--skip-requirements-regex',
- dest='skip_requirements_regex',
- type='str',
- default='',
- help=SUPPRESS_HELP,
-) # type: Any
-
-
-def exists_action():
- return Option(
- # Option when path already exist
- '--exists-action',
- dest='exists_action',
- type='choice',
- choices=['s', 'i', 'w', 'b', 'a'],
- default=[],
- action='append',
- metavar='action',
- help="Default action when a path already exists: "
- "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort).",
- )
-
-
-cert = partial(
- Option,
- '--cert',
- dest='cert',
- type='str',
- metavar='path',
- help="Path to alternate CA bundle.",
-) # type: Any
-
-client_cert = partial(
- Option,
- '--client-cert',
- dest='client_cert',
- type='str',
- default=None,
- metavar='path',
- help="Path to SSL client certificate, a single file containing the "
- "private key and the certificate in PEM format.",
-) # type: Any
-
-index_url = partial(
- Option,
- '-i', '--index-url', '--pypi-url',
- dest='index_url',
- metavar='URL',
- default=PyPI.simple_url,
- help="Base URL of Python Package Index (default %default). "
- "This should point to a repository compliant with PEP 503 "
- "(the simple repository API) or a local directory laid out "
- "in the same format.",
-) # type: Any
-
-
-def extra_index_url():
- return Option(
- '--extra-index-url',
- dest='extra_index_urls',
- metavar='URL',
- action='append',
- default=[],
- help="Extra URLs of package indexes to use in addition to "
- "--index-url. Should follow the same rules as "
- "--index-url.",
- )
-
-
-no_index = partial(
- Option,
- '--no-index',
- dest='no_index',
- action='store_true',
- default=False,
- help='Ignore package index (only looking at --find-links URLs instead).',
-) # type: Any
-
-
-def find_links():
- return Option(
- '-f', '--find-links',
- dest='find_links',
- action='append',
- default=[],
- metavar='url',
- help="If a url or path to an html file, then parse for links to "
- "archives. If a local path or file:// url that's a directory, "
- "then look for archives in the directory listing.",
- )
-
-
-def trusted_host():
- return Option(
- "--trusted-host",
- dest="trusted_hosts",
- action="append",
- metavar="HOSTNAME",
- default=[],
- help="Mark this host as trusted, even though it does not have valid "
- "or any HTTPS.",
- )
-
-
-# Remove after 1.5
-process_dependency_links = partial(
- Option,
- "--process-dependency-links",
- dest="process_dependency_links",
- action="store_true",
- default=False,
- help="Enable the processing of dependency links.",
-) # type: Any
-
-
-def constraints():
- return Option(
- '-c', '--constraint',
- dest='constraints',
- action='append',
- default=[],
- metavar='file',
- help='Constrain versions using the given constraints file. '
- 'This option can be used multiple times.'
- )
-
-
-def requirements():
- return Option(
- '-r', '--requirement',
- dest='requirements',
- action='append',
- default=[],
- metavar='file',
- help='Install from the given requirements file. '
- 'This option can be used multiple times.'
- )
-
-
-def editable():
- return Option(
- '-e', '--editable',
- dest='editables',
- action='append',
- default=[],
- metavar='path/url',
- help=('Install a project in editable mode (i.e. setuptools '
- '"develop mode") from a local project path or a VCS url.'),
- )
-
-
-src = partial(
- Option,
- '--src', '--source', '--source-dir', '--source-directory',
- dest='src_dir',
- metavar='dir',
- default=src_prefix,
- help='Directory to check out editable projects into. '
- 'The default in a virtualenv is "/src". '
- 'The default for global installs is "/src".'
-) # type: Any
-
-
-def _get_format_control(values, option):
- """Get a format_control object."""
- return getattr(values, option.dest)
-
-
-def _handle_no_binary(option, opt_str, value, parser):
- existing = getattr(parser.values, option.dest)
- fmt_ctl_handle_mutual_exclude(
- value, existing.no_binary, existing.only_binary,
- )
-
-
-def _handle_only_binary(option, opt_str, value, parser):
- existing = getattr(parser.values, option.dest)
- fmt_ctl_handle_mutual_exclude(
- value, existing.only_binary, existing.no_binary,
- )
-
-
-def no_binary():
- return Option(
- "--no-binary", dest="format_control", action="callback",
- callback=_handle_no_binary, type="str",
- default=FormatControl(set(), set()),
- help="Do not use binary packages. Can be supplied multiple times, and "
- "each time adds to the existing value. Accepts either :all: to "
- "disable all binary packages, :none: to empty the set, or one or "
- "more package names with commas between them. Note that some "
- "packages are tricky to compile and may fail to install when "
- "this option is used on them.",
- )
-
-
-def only_binary():
- return Option(
- "--only-binary", dest="format_control", action="callback",
- callback=_handle_only_binary, type="str",
- default=FormatControl(set(), set()),
- help="Do not use source packages. Can be supplied multiple times, and "
- "each time adds to the existing value. Accepts either :all: to "
- "disable all source packages, :none: to empty the set, or one or "
- "more package names with commas between them. Packages without "
- "binary distributions will fail to install when this option is "
- "used on them.",
- )
-
-
-cache_dir = partial(
- Option,
- "--cache-dir",
- dest="cache_dir",
- default=USER_CACHE_DIR,
- metavar="dir",
- help="Store the cache data in ."
-)
-
-no_cache = partial(
- Option,
- "--no-cache-dir",
- dest="cache_dir",
- action="store_false",
- help="Disable the cache.",
-)
-
-no_deps = partial(
- Option,
- '--no-deps', '--no-dependencies',
- dest='ignore_dependencies',
- action='store_true',
- default=False,
- help="Don't install package dependencies.",
-) # type: Any
-
-build_dir = partial(
- Option,
- '-b', '--build', '--build-dir', '--build-directory',
- dest='build_dir',
- metavar='dir',
- help='Directory to unpack packages into and build in. Note that '
- 'an initial build still takes place in a temporary directory. '
- 'The location of temporary directories can be controlled by setting '
- 'the TMPDIR environment variable (TEMP on Windows) appropriately. '
- 'When passed, build directories are not cleaned in case of failures.'
-) # type: Any
-
-ignore_requires_python = partial(
- Option,
- '--ignore-requires-python',
- dest='ignore_requires_python',
- action='store_true',
- help='Ignore the Requires-Python information.'
-) # type: Any
-
-no_build_isolation = partial(
- Option,
- '--no-build-isolation',
- dest='build_isolation',
- action='store_false',
- default=True,
- help='Disable isolation when building a modern source distribution. '
- 'Build dependencies specified by PEP 518 must be already installed '
- 'if this option is used.'
-) # type: Any
-
-install_options = partial(
- Option,
- '--install-option',
- dest='install_options',
- action='append',
- metavar='options',
- help="Extra arguments to be supplied to the setup.py install "
- "command (use like --install-option=\"--install-scripts=/usr/local/"
- "bin\"). Use multiple --install-option options to pass multiple "
- "options to setup.py install. If you are using an option with a "
- "directory path, be sure to use absolute path.",
-) # type: Any
-
-global_options = partial(
- Option,
- '--global-option',
- dest='global_options',
- action='append',
- metavar='options',
- help="Extra global options to be supplied to the setup.py "
- "call before the install command.",
-) # type: Any
-
-no_clean = partial(
- Option,
- '--no-clean',
- action='store_true',
- default=False,
- help="Don't clean up build directories)."
-) # type: Any
-
-pre = partial(
- Option,
- '--pre',
- action='store_true',
- default=False,
- help="Include pre-release and development versions. By default, "
- "pip only finds stable versions.",
-) # type: Any
-
-disable_pip_version_check = partial(
- Option,
- "--disable-pip-version-check",
- dest="disable_pip_version_check",
- action="store_true",
- default=False,
- help="Don't periodically check PyPI to determine whether a new version "
- "of pip is available for download. Implied with --no-index.",
-) # type: Any
-
-
-# Deprecated, Remove later
-always_unzip = partial(
- Option,
- '-Z', '--always-unzip',
- dest='always_unzip',
- action='store_true',
- help=SUPPRESS_HELP,
-) # type: Any
-
-
-def _merge_hash(option, opt_str, value, parser):
- """Given a value spelled "algo:digest", append the digest to a list
- pointed to in a dict by the algo name."""
- if not parser.values.hashes:
- parser.values.hashes = {}
- try:
- algo, digest = value.split(':', 1)
- except ValueError:
- parser.error('Arguments to %s must be a hash name '
- 'followed by a value, like --hash=sha256:abcde...' %
- opt_str)
- if algo not in STRONG_HASHES:
- parser.error('Allowed hash algorithms for %s are %s.' %
- (opt_str, ', '.join(STRONG_HASHES)))
- parser.values.hashes.setdefault(algo, []).append(digest)
-
-
-hash = partial(
- Option,
- '--hash',
- # Hash values eventually end up in InstallRequirement.hashes due to
- # __dict__ copying in process_line().
- dest='hashes',
- action='callback',
- callback=_merge_hash,
- type='string',
- help="Verify that the package's archive matches this "
- 'hash before installing. Example: --hash=sha256:abcdef...',
-) # type: Any
-
-
-require_hashes = partial(
- Option,
- '--require-hashes',
- dest='require_hashes',
- action='store_true',
- default=False,
- help='Require a hash to check each requirement against, for '
- 'repeatable installs. This option is implied when any package in a '
- 'requirements file has a --hash option.',
-) # type: Any
-
-
-##########
-# groups #
-##########
-
-general_group = {
- 'name': 'General Options',
- 'options': [
- help_,
- isolated_mode,
- require_virtualenv,
- verbose,
- version,
- quiet,
- log,
- no_input,
- proxy,
- retries,
- timeout,
- skip_requirements_regex,
- exists_action,
- trusted_host,
- cert,
- client_cert,
- cache_dir,
- no_cache,
- disable_pip_version_check,
- no_color,
- ]
-}
-
-index_group = {
- 'name': 'Package Index Options',
- 'options': [
- index_url,
- extra_index_url,
- no_index,
- find_links,
- process_dependency_links,
- ]
-}
+"""
+shared options and groups
+
+The principle here is to define options once, but *not* instantiate them
+globally. One reason being that options with action='append' can carry state
+between parses. pip parses general options twice internally, and shouldn't
+pass on state. To be consistent, all options will follow this design.
+
+"""
+from __future__ import absolute_import
+
+import warnings
+from functools import partial
+from optparse import SUPPRESS_HELP, Option, OptionGroup
+
+from pip._internal.index import (
+ FormatControl, fmt_ctl_handle_mutual_exclude, fmt_ctl_no_binary,
+)
+from pip._internal.locations import USER_CACHE_DIR, src_prefix
+from pip._internal.models import PyPI
+from pip._internal.utils.hashes import STRONG_HASHES
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.ui import BAR_TYPES
+
+if MYPY_CHECK_RUNNING:
+ from typing import Any
+
+
+def make_option_group(group, parser):
+ """
+ Return an OptionGroup object
+ group -- assumed to be dict with 'name' and 'options' keys
+ parser -- an optparse Parser
+ """
+ option_group = OptionGroup(parser, group['name'])
+ for option in group['options']:
+ option_group.add_option(option())
+ return option_group
+
+
+def check_install_build_global(options, check_options=None):
+ """Disable wheels if per-setup.py call options are set.
+
+ :param options: The OptionParser options to update.
+ :param check_options: The options to check, if not supplied defaults to
+ options.
+ """
+ if check_options is None:
+ check_options = options
+
+ def getname(n):
+ return getattr(check_options, n, None)
+ names = ["build_options", "global_options", "install_options"]
+ if any(map(getname, names)):
+ control = options.format_control
+ fmt_ctl_no_binary(control)
+ warnings.warn(
+ 'Disabling all use of wheels due to the use of --build-options '
+ '/ --global-options / --install-options.', stacklevel=2,
+ )
+
+
+###########
+# options #
+###########
+
+help_ = partial(
+ Option,
+ '-h', '--help',
+ dest='help',
+ action='help',
+ help='Show help.',
+) # type: Any
+
+isolated_mode = partial(
+ Option,
+ "--isolated",
+ dest="isolated_mode",
+ action="store_true",
+ default=False,
+ help=(
+ "Run pip in an isolated mode, ignoring environment variables and user "
+ "configuration."
+ ),
+)
+
+require_virtualenv = partial(
+ Option,
+ # Run only if inside a virtualenv, bail if not.
+ '--require-virtualenv', '--require-venv',
+ dest='require_venv',
+ action='store_true',
+ default=False,
+ help=SUPPRESS_HELP
+) # type: Any
+
+verbose = partial(
+ Option,
+ '-v', '--verbose',
+ dest='verbose',
+ action='count',
+ default=0,
+ help='Give more output. Option is additive, and can be used up to 3 times.'
+)
+
+no_color = partial(
+ Option,
+ '--no-color',
+ dest='no_color',
+ action='store_true',
+ default=False,
+ help="Suppress colored output",
+)
+
+version = partial(
+ Option,
+ '-V', '--version',
+ dest='version',
+ action='store_true',
+ help='Show version and exit.',
+) # type: Any
+
+quiet = partial(
+ Option,
+ '-q', '--quiet',
+ dest='quiet',
+ action='count',
+ default=0,
+ help=(
+ 'Give less output. Option is additive, and can be used up to 3'
+ ' times (corresponding to WARNING, ERROR, and CRITICAL logging'
+ ' levels).'
+ ),
+) # type: Any
+
+progress_bar = partial(
+ Option,
+ '--progress-bar',
+ dest='progress_bar',
+ type='choice',
+ choices=list(BAR_TYPES.keys()),
+ default='on',
+ help=(
+ 'Specify type of progress to be displayed [' +
+ '|'.join(BAR_TYPES.keys()) + '] (default: %default)'
+ ),
+) # type: Any
+
+log = partial(
+ Option,
+ "--log", "--log-file", "--local-log",
+ dest="log",
+ metavar="path",
+ help="Path to a verbose appending log."
+) # type: Any
+
+no_input = partial(
+ Option,
+ # Don't ask for input
+ '--no-input',
+ dest='no_input',
+ action='store_true',
+ default=False,
+ help=SUPPRESS_HELP
+) # type: Any
+
+proxy = partial(
+ Option,
+ '--proxy',
+ dest='proxy',
+ type='str',
+ default='',
+ help="Specify a proxy in the form [user:passwd@]proxy.server:port."
+) # type: Any
+
+retries = partial(
+ Option,
+ '--retries',
+ dest='retries',
+ type='int',
+ default=5,
+ help="Maximum number of retries each connection should attempt "
+ "(default %default times).",
+) # type: Any
+
+timeout = partial(
+ Option,
+ '--timeout', '--default-timeout',
+ metavar='sec',
+ dest='timeout',
+ type='float',
+ default=15,
+ help='Set the socket timeout (default %default seconds).',
+) # type: Any
+
+skip_requirements_regex = partial(
+ Option,
+ # A regex to be used to skip requirements
+ '--skip-requirements-regex',
+ dest='skip_requirements_regex',
+ type='str',
+ default='',
+ help=SUPPRESS_HELP,
+) # type: Any
+
+
+def exists_action():
+ return Option(
+ # Option when path already exist
+ '--exists-action',
+ dest='exists_action',
+ type='choice',
+ choices=['s', 'i', 'w', 'b', 'a'],
+ default=[],
+ action='append',
+ metavar='action',
+ help="Default action when a path already exists: "
+ "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort).",
+ )
+
+
+cert = partial(
+ Option,
+ '--cert',
+ dest='cert',
+ type='str',
+ metavar='path',
+ help="Path to alternate CA bundle.",
+) # type: Any
+
+client_cert = partial(
+ Option,
+ '--client-cert',
+ dest='client_cert',
+ type='str',
+ default=None,
+ metavar='path',
+ help="Path to SSL client certificate, a single file containing the "
+ "private key and the certificate in PEM format.",
+) # type: Any
+
+index_url = partial(
+ Option,
+ '-i', '--index-url', '--pypi-url',
+ dest='index_url',
+ metavar='URL',
+ default=PyPI.simple_url,
+ help="Base URL of Python Package Index (default %default). "
+ "This should point to a repository compliant with PEP 503 "
+ "(the simple repository API) or a local directory laid out "
+ "in the same format.",
+) # type: Any
+
+
+def extra_index_url():
+ return Option(
+ '--extra-index-url',
+ dest='extra_index_urls',
+ metavar='URL',
+ action='append',
+ default=[],
+ help="Extra URLs of package indexes to use in addition to "
+ "--index-url. Should follow the same rules as "
+ "--index-url.",
+ )
+
+
+no_index = partial(
+ Option,
+ '--no-index',
+ dest='no_index',
+ action='store_true',
+ default=False,
+ help='Ignore package index (only looking at --find-links URLs instead).',
+) # type: Any
+
+
+def find_links():
+ return Option(
+ '-f', '--find-links',
+ dest='find_links',
+ action='append',
+ default=[],
+ metavar='url',
+ help="If a url or path to an html file, then parse for links to "
+ "archives. If a local path or file:// url that's a directory, "
+ "then look for archives in the directory listing.",
+ )
+
+
+def trusted_host():
+ return Option(
+ "--trusted-host",
+ dest="trusted_hosts",
+ action="append",
+ metavar="HOSTNAME",
+ default=[],
+ help="Mark this host as trusted, even though it does not have valid "
+ "or any HTTPS.",
+ )
+
+
+# Remove after 1.5
+process_dependency_links = partial(
+ Option,
+ "--process-dependency-links",
+ dest="process_dependency_links",
+ action="store_true",
+ default=False,
+ help="Enable the processing of dependency links.",
+) # type: Any
+
+
+def constraints():
+ return Option(
+ '-c', '--constraint',
+ dest='constraints',
+ action='append',
+ default=[],
+ metavar='file',
+ help='Constrain versions using the given constraints file. '
+ 'This option can be used multiple times.'
+ )
+
+
+def requirements():
+ return Option(
+ '-r', '--requirement',
+ dest='requirements',
+ action='append',
+ default=[],
+ metavar='file',
+ help='Install from the given requirements file. '
+ 'This option can be used multiple times.'
+ )
+
+
+def editable():
+ return Option(
+ '-e', '--editable',
+ dest='editables',
+ action='append',
+ default=[],
+ metavar='path/url',
+ help=('Install a project in editable mode (i.e. setuptools '
+ '"develop mode") from a local project path or a VCS url.'),
+ )
+
+
+src = partial(
+ Option,
+ '--src', '--source', '--source-dir', '--source-directory',
+ dest='src_dir',
+ metavar='dir',
+ default=src_prefix,
+ help='Directory to check out editable projects into. '
+ 'The default in a virtualenv is "/src". '
+ 'The default for global installs is "/src".'
+) # type: Any
+
+
+def _get_format_control(values, option):
+ """Get a format_control object."""
+ return getattr(values, option.dest)
+
+
+def _handle_no_binary(option, opt_str, value, parser):
+ existing = getattr(parser.values, option.dest)
+ fmt_ctl_handle_mutual_exclude(
+ value, existing.no_binary, existing.only_binary,
+ )
+
+
+def _handle_only_binary(option, opt_str, value, parser):
+ existing = getattr(parser.values, option.dest)
+ fmt_ctl_handle_mutual_exclude(
+ value, existing.only_binary, existing.no_binary,
+ )
+
+
+def no_binary():
+ return Option(
+ "--no-binary", dest="format_control", action="callback",
+ callback=_handle_no_binary, type="str",
+ default=FormatControl(set(), set()),
+ help="Do not use binary packages. Can be supplied multiple times, and "
+ "each time adds to the existing value. Accepts either :all: to "
+ "disable all binary packages, :none: to empty the set, or one or "
+ "more package names with commas between them. Note that some "
+ "packages are tricky to compile and may fail to install when "
+ "this option is used on them.",
+ )
+
+
+def only_binary():
+ return Option(
+ "--only-binary", dest="format_control", action="callback",
+ callback=_handle_only_binary, type="str",
+ default=FormatControl(set(), set()),
+ help="Do not use source packages. Can be supplied multiple times, and "
+ "each time adds to the existing value. Accepts either :all: to "
+ "disable all source packages, :none: to empty the set, or one or "
+ "more package names with commas between them. Packages without "
+ "binary distributions will fail to install when this option is "
+ "used on them.",
+ )
+
+
+cache_dir = partial(
+ Option,
+ "--cache-dir",
+ dest="cache_dir",
+ default=USER_CACHE_DIR,
+ metavar="dir",
+ help="Store the cache data in ."
+)
+
+no_cache = partial(
+ Option,
+ "--no-cache-dir",
+ dest="cache_dir",
+ action="store_false",
+ help="Disable the cache.",
+)
+
+no_deps = partial(
+ Option,
+ '--no-deps', '--no-dependencies',
+ dest='ignore_dependencies',
+ action='store_true',
+ default=False,
+ help="Don't install package dependencies.",
+) # type: Any
+
+build_dir = partial(
+ Option,
+ '-b', '--build', '--build-dir', '--build-directory',
+ dest='build_dir',
+ metavar='dir',
+ help='Directory to unpack packages into and build in. Note that '
+ 'an initial build still takes place in a temporary directory. '
+ 'The location of temporary directories can be controlled by setting '
+ 'the TMPDIR environment variable (TEMP on Windows) appropriately. '
+ 'When passed, build directories are not cleaned in case of failures.'
+) # type: Any
+
+ignore_requires_python = partial(
+ Option,
+ '--ignore-requires-python',
+ dest='ignore_requires_python',
+ action='store_true',
+ help='Ignore the Requires-Python information.'
+) # type: Any
+
+no_build_isolation = partial(
+ Option,
+ '--no-build-isolation',
+ dest='build_isolation',
+ action='store_false',
+ default=True,
+ help='Disable isolation when building a modern source distribution. '
+ 'Build dependencies specified by PEP 518 must be already installed '
+ 'if this option is used.'
+) # type: Any
+
+install_options = partial(
+ Option,
+ '--install-option',
+ dest='install_options',
+ action='append',
+ metavar='options',
+ help="Extra arguments to be supplied to the setup.py install "
+ "command (use like --install-option=\"--install-scripts=/usr/local/"
+ "bin\"). Use multiple --install-option options to pass multiple "
+ "options to setup.py install. If you are using an option with a "
+ "directory path, be sure to use absolute path.",
+) # type: Any
+
+global_options = partial(
+ Option,
+ '--global-option',
+ dest='global_options',
+ action='append',
+ metavar='options',
+ help="Extra global options to be supplied to the setup.py "
+ "call before the install command.",
+) # type: Any
+
+no_clean = partial(
+ Option,
+ '--no-clean',
+ action='store_true',
+ default=False,
+ help="Don't clean up build directories)."
+) # type: Any
+
+pre = partial(
+ Option,
+ '--pre',
+ action='store_true',
+ default=False,
+ help="Include pre-release and development versions. By default, "
+ "pip only finds stable versions.",
+) # type: Any
+
+disable_pip_version_check = partial(
+ Option,
+ "--disable-pip-version-check",
+ dest="disable_pip_version_check",
+ action="store_true",
+ default=False,
+ help="Don't periodically check PyPI to determine whether a new version "
+ "of pip is available for download. Implied with --no-index.",
+) # type: Any
+
+
+# Deprecated, Remove later
+always_unzip = partial(
+ Option,
+ '-Z', '--always-unzip',
+ dest='always_unzip',
+ action='store_true',
+ help=SUPPRESS_HELP,
+) # type: Any
+
+
+def _merge_hash(option, opt_str, value, parser):
+ """Given a value spelled "algo:digest", append the digest to a list
+ pointed to in a dict by the algo name."""
+ if not parser.values.hashes:
+ parser.values.hashes = {}
+ try:
+ algo, digest = value.split(':', 1)
+ except ValueError:
+ parser.error('Arguments to %s must be a hash name '
+ 'followed by a value, like --hash=sha256:abcde...' %
+ opt_str)
+ if algo not in STRONG_HASHES:
+ parser.error('Allowed hash algorithms for %s are %s.' %
+ (opt_str, ', '.join(STRONG_HASHES)))
+ parser.values.hashes.setdefault(algo, []).append(digest)
+
+
+hash = partial(
+ Option,
+ '--hash',
+ # Hash values eventually end up in InstallRequirement.hashes due to
+ # __dict__ copying in process_line().
+ dest='hashes',
+ action='callback',
+ callback=_merge_hash,
+ type='string',
+ help="Verify that the package's archive matches this "
+ 'hash before installing. Example: --hash=sha256:abcdef...',
+) # type: Any
+
+
+require_hashes = partial(
+ Option,
+ '--require-hashes',
+ dest='require_hashes',
+ action='store_true',
+ default=False,
+ help='Require a hash to check each requirement against, for '
+ 'repeatable installs. This option is implied when any package in a '
+ 'requirements file has a --hash option.',
+) # type: Any
+
+
+##########
+# groups #
+##########
+
+general_group = {
+ 'name': 'General Options',
+ 'options': [
+ help_,
+ isolated_mode,
+ require_virtualenv,
+ verbose,
+ version,
+ quiet,
+ log,
+ no_input,
+ proxy,
+ retries,
+ timeout,
+ skip_requirements_regex,
+ exists_action,
+ trusted_host,
+ cert,
+ client_cert,
+ cache_dir,
+ no_cache,
+ disable_pip_version_check,
+ no_color,
+ ]
+}
+
+index_group = {
+ 'name': 'Package Index Options',
+ 'options': [
+ index_url,
+ extra_index_url,
+ no_index,
+ find_links,
+ process_dependency_links,
+ ]
+}
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/__init__.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/__init__.py
index d79c48e..d44e6f1 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/__init__.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/__init__.py
@@ -1,79 +1,79 @@
-"""
-Package containing all pip commands
-"""
-from __future__ import absolute_import
-
-from pip._internal.commands.completion import CompletionCommand
-from pip._internal.commands.configuration import ConfigurationCommand
-from pip._internal.commands.download import DownloadCommand
-from pip._internal.commands.freeze import FreezeCommand
-from pip._internal.commands.hash import HashCommand
-from pip._internal.commands.help import HelpCommand
-from pip._internal.commands.list import ListCommand
-from pip._internal.commands.check import CheckCommand
-from pip._internal.commands.search import SearchCommand
-from pip._internal.commands.show import ShowCommand
-from pip._internal.commands.install import InstallCommand
-from pip._internal.commands.uninstall import UninstallCommand
-from pip._internal.commands.wheel import WheelCommand
-
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import List, Type
- from pip._internal.basecommand import Command
-
-commands_order = [
- InstallCommand,
- DownloadCommand,
- UninstallCommand,
- FreezeCommand,
- ListCommand,
- ShowCommand,
- CheckCommand,
- ConfigurationCommand,
- SearchCommand,
- WheelCommand,
- HashCommand,
- CompletionCommand,
- HelpCommand,
-] # type: List[Type[Command]]
-
-commands_dict = {c.name: c for c in commands_order}
-
-
-def get_summaries(ordered=True):
- """Yields sorted (command name, command summary) tuples."""
-
- if ordered:
- cmditems = _sort_commands(commands_dict, commands_order)
- else:
- cmditems = commands_dict.items()
-
- for name, command_class in cmditems:
- yield (name, command_class.summary)
-
-
-def get_similar_commands(name):
- """Command name auto-correct."""
- from difflib import get_close_matches
-
- name = name.lower()
-
- close_commands = get_close_matches(name, commands_dict.keys())
-
- if close_commands:
- return close_commands[0]
- else:
- return False
-
-
-def _sort_commands(cmddict, order):
- def keyfn(key):
- try:
- return order.index(key[1])
- except ValueError:
- # unordered items should come last
- return 0xff
-
- return sorted(cmddict.items(), key=keyfn)
+"""
+Package containing all pip commands
+"""
+from __future__ import absolute_import
+
+from pip._internal.commands.completion import CompletionCommand
+from pip._internal.commands.configuration import ConfigurationCommand
+from pip._internal.commands.download import DownloadCommand
+from pip._internal.commands.freeze import FreezeCommand
+from pip._internal.commands.hash import HashCommand
+from pip._internal.commands.help import HelpCommand
+from pip._internal.commands.list import ListCommand
+from pip._internal.commands.check import CheckCommand
+from pip._internal.commands.search import SearchCommand
+from pip._internal.commands.show import ShowCommand
+from pip._internal.commands.install import InstallCommand
+from pip._internal.commands.uninstall import UninstallCommand
+from pip._internal.commands.wheel import WheelCommand
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import List, Type
+ from pip._internal.basecommand import Command
+
+commands_order = [
+ InstallCommand,
+ DownloadCommand,
+ UninstallCommand,
+ FreezeCommand,
+ ListCommand,
+ ShowCommand,
+ CheckCommand,
+ ConfigurationCommand,
+ SearchCommand,
+ WheelCommand,
+ HashCommand,
+ CompletionCommand,
+ HelpCommand,
+] # type: List[Type[Command]]
+
+commands_dict = {c.name: c for c in commands_order}
+
+
+def get_summaries(ordered=True):
+ """Yields sorted (command name, command summary) tuples."""
+
+ if ordered:
+ cmditems = _sort_commands(commands_dict, commands_order)
+ else:
+ cmditems = commands_dict.items()
+
+ for name, command_class in cmditems:
+ yield (name, command_class.summary)
+
+
+def get_similar_commands(name):
+ """Command name auto-correct."""
+ from difflib import get_close_matches
+
+ name = name.lower()
+
+ close_commands = get_close_matches(name, commands_dict.keys())
+
+ if close_commands:
+ return close_commands[0]
+ else:
+ return False
+
+
+def _sort_commands(cmddict, order):
+ def keyfn(key):
+ try:
+ return order.index(key[1])
+ except ValueError:
+ # unordered items should come last
+ return 0xff
+
+ return sorted(cmddict.items(), key=keyfn)
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/check.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/check.py
index 88db510..b1bf38a 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/check.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/check.py
@@ -1,42 +1,42 @@
-import logging
-
-from pip._internal.basecommand import Command
-from pip._internal.operations.check import (
- check_package_set, create_package_set_from_installed,
-)
-from pip._internal.utils.misc import get_installed_distributions
-
-logger = logging.getLogger(__name__)
-
-
-class CheckCommand(Command):
- """Verify installed packages have compatible dependencies."""
- name = 'check'
- usage = """
- %prog [options]"""
- summary = 'Verify installed packages have compatible dependencies.'
-
- def run(self, options, args):
- package_set = create_package_set_from_installed()
- missing, conflicting = check_package_set(package_set)
-
- for project_name in missing:
- version = package_set[project_name].version
- for dependency in missing[project_name]:
- logger.info(
- "%s %s requires %s, which is not installed.",
- project_name, version, dependency[0],
- )
-
- for project_name in conflicting:
- version = package_set[project_name].version
- for dep_name, dep_version, req in conflicting[project_name]:
- logger.info(
- "%s %s has requirement %s, but you have %s %s.",
- project_name, version, req, dep_name, dep_version,
- )
-
- if missing or conflicting:
- return 1
- else:
- logger.info("No broken requirements found.")
+import logging
+
+from pip._internal.basecommand import Command
+from pip._internal.operations.check import (
+ check_package_set, create_package_set_from_installed,
+)
+from pip._internal.utils.misc import get_installed_distributions
+
+logger = logging.getLogger(__name__)
+
+
+class CheckCommand(Command):
+ """Verify installed packages have compatible dependencies."""
+ name = 'check'
+ usage = """
+ %prog [options]"""
+ summary = 'Verify installed packages have compatible dependencies.'
+
+ def run(self, options, args):
+ package_set = create_package_set_from_installed()
+ missing, conflicting = check_package_set(package_set)
+
+ for project_name in missing:
+ version = package_set[project_name].version
+ for dependency in missing[project_name]:
+ logger.info(
+ "%s %s requires %s, which is not installed.",
+ project_name, version, dependency[0],
+ )
+
+ for project_name in conflicting:
+ version = package_set[project_name].version
+ for dep_name, dep_version, req in conflicting[project_name]:
+ logger.info(
+ "%s %s has requirement %s, but you have %s %s.",
+ project_name, version, req, dep_name, dep_version,
+ )
+
+ if missing or conflicting:
+ return 1
+ else:
+ logger.info("No broken requirements found.")
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/completion.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/completion.py
index c4b3873..8da1e83 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/completion.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/completion.py
@@ -1,94 +1,94 @@
-from __future__ import absolute_import
-
-import sys
-import textwrap
-
-from pip._internal.basecommand import Command
-from pip._internal.utils.misc import get_prog
-
-BASE_COMPLETION = """
-# pip %(shell)s completion start%(script)s# pip %(shell)s completion end
-"""
-
-COMPLETION_SCRIPTS = {
- 'bash': """
- _pip_completion()
- {
- COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\
- COMP_CWORD=$COMP_CWORD \\
- PIP_AUTO_COMPLETE=1 $1 ) )
- }
- complete -o default -F _pip_completion %(prog)s
- """,
- 'zsh': """
- function _pip_completion {
- local words cword
- read -Ac words
- read -cn cword
- reply=( $( COMP_WORDS="$words[*]" \\
- COMP_CWORD=$(( cword-1 )) \\
- PIP_AUTO_COMPLETE=1 $words[1] ) )
- }
- compctl -K _pip_completion %(prog)s
- """,
- 'fish': """
- function __fish_complete_pip
- set -lx COMP_WORDS (commandline -o) ""
- set -lx COMP_CWORD ( \\
- math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\
- )
- set -lx PIP_AUTO_COMPLETE 1
- string split \\ -- (eval $COMP_WORDS[1])
- end
- complete -fa "(__fish_complete_pip)" -c %(prog)s
- """,
-}
-
-
-class CompletionCommand(Command):
- """A helper command to be used for command completion."""
- name = 'completion'
- summary = 'A helper command used for command completion.'
- ignore_require_venv = True
-
- def __init__(self, *args, **kw):
- super(CompletionCommand, self).__init__(*args, **kw)
-
- cmd_opts = self.cmd_opts
-
- cmd_opts.add_option(
- '--bash', '-b',
- action='store_const',
- const='bash',
- dest='shell',
- help='Emit completion code for bash')
- cmd_opts.add_option(
- '--zsh', '-z',
- action='store_const',
- const='zsh',
- dest='shell',
- help='Emit completion code for zsh')
- cmd_opts.add_option(
- '--fish', '-f',
- action='store_const',
- const='fish',
- dest='shell',
- help='Emit completion code for fish')
-
- self.parser.insert_option_group(0, cmd_opts)
-
- def run(self, options, args):
- """Prints the completion code of the given shell"""
- shells = COMPLETION_SCRIPTS.keys()
- shell_options = ['--' + shell for shell in sorted(shells)]
- if options.shell in shells:
- script = textwrap.dedent(
- COMPLETION_SCRIPTS.get(options.shell, '') % {
- 'prog': get_prog(),
- }
- )
- print(BASE_COMPLETION % {'script': script, 'shell': options.shell})
- else:
- sys.stderr.write(
- 'ERROR: You must pass %s\n' % ' or '.join(shell_options)
- )
+from __future__ import absolute_import
+
+import sys
+import textwrap
+
+from pip._internal.basecommand import Command
+from pip._internal.utils.misc import get_prog
+
+BASE_COMPLETION = """
+# pip %(shell)s completion start%(script)s# pip %(shell)s completion end
+"""
+
+COMPLETION_SCRIPTS = {
+ 'bash': """
+ _pip_completion()
+ {
+ COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\
+ COMP_CWORD=$COMP_CWORD \\
+ PIP_AUTO_COMPLETE=1 $1 ) )
+ }
+ complete -o default -F _pip_completion %(prog)s
+ """,
+ 'zsh': """
+ function _pip_completion {
+ local words cword
+ read -Ac words
+ read -cn cword
+ reply=( $( COMP_WORDS="$words[*]" \\
+ COMP_CWORD=$(( cword-1 )) \\
+ PIP_AUTO_COMPLETE=1 $words[1] ) )
+ }
+ compctl -K _pip_completion %(prog)s
+ """,
+ 'fish': """
+ function __fish_complete_pip
+ set -lx COMP_WORDS (commandline -o) ""
+ set -lx COMP_CWORD ( \\
+ math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\
+ )
+ set -lx PIP_AUTO_COMPLETE 1
+ string split \\ -- (eval $COMP_WORDS[1])
+ end
+ complete -fa "(__fish_complete_pip)" -c %(prog)s
+ """,
+}
+
+
+class CompletionCommand(Command):
+ """A helper command to be used for command completion."""
+ name = 'completion'
+ summary = 'A helper command used for command completion.'
+ ignore_require_venv = True
+
+ def __init__(self, *args, **kw):
+ super(CompletionCommand, self).__init__(*args, **kw)
+
+ cmd_opts = self.cmd_opts
+
+ cmd_opts.add_option(
+ '--bash', '-b',
+ action='store_const',
+ const='bash',
+ dest='shell',
+ help='Emit completion code for bash')
+ cmd_opts.add_option(
+ '--zsh', '-z',
+ action='store_const',
+ const='zsh',
+ dest='shell',
+ help='Emit completion code for zsh')
+ cmd_opts.add_option(
+ '--fish', '-f',
+ action='store_const',
+ const='fish',
+ dest='shell',
+ help='Emit completion code for fish')
+
+ self.parser.insert_option_group(0, cmd_opts)
+
+ def run(self, options, args):
+ """Prints the completion code of the given shell"""
+ shells = COMPLETION_SCRIPTS.keys()
+ shell_options = ['--' + shell for shell in sorted(shells)]
+ if options.shell in shells:
+ script = textwrap.dedent(
+ COMPLETION_SCRIPTS.get(options.shell, '') % {
+ 'prog': get_prog(),
+ }
+ )
+ print(BASE_COMPLETION % {'script': script, 'shell': options.shell})
+ else:
+ sys.stderr.write(
+ 'ERROR: You must pass %s\n' % ' or '.join(shell_options)
+ )
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/configuration.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/configuration.py
index 57448cb..e10d9a9 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/configuration.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/configuration.py
@@ -1,227 +1,227 @@
-import logging
-import os
-import subprocess
-
-from pip._internal.basecommand import Command
-from pip._internal.configuration import Configuration, kinds
-from pip._internal.exceptions import PipError
-from pip._internal.locations import venv_config_file
-from pip._internal.status_codes import ERROR, SUCCESS
-from pip._internal.utils.misc import get_prog
-
-logger = logging.getLogger(__name__)
-
-
-class ConfigurationCommand(Command):
- """Manage local and global configuration.
-
- Subcommands:
-
- list: List the active configuration (or from the file specified)
- edit: Edit the configuration file in an editor
- get: Get the value associated with name
- set: Set the name=value
- unset: Unset the value associated with name
-
- If none of --user, --global and --venv are passed, a virtual
- environment configuration file is used if one is active and the file
- exists. Otherwise, all modifications happen on the to the user file by
- default.
- """
-
- name = 'config'
- usage = """
- %prog [] list
- %prog [] [--editor ] edit
-
- %prog [] get name
- %prog [] set name value
- %prog [] unset name
- """
-
- summary = "Manage local and global configuration."
-
- def __init__(self, *args, **kwargs):
- super(ConfigurationCommand, self).__init__(*args, **kwargs)
-
- self.configuration = None
-
- self.cmd_opts.add_option(
- '--editor',
- dest='editor',
- action='store',
- default=None,
- help=(
- 'Editor to use to edit the file. Uses VISUAL or EDITOR '
- 'environment variables if not provided.'
- )
- )
-
- self.cmd_opts.add_option(
- '--global',
- dest='global_file',
- action='store_true',
- default=False,
- help='Use the system-wide configuration file only'
- )
-
- self.cmd_opts.add_option(
- '--user',
- dest='user_file',
- action='store_true',
- default=False,
- help='Use the user configuration file only'
- )
-
- self.cmd_opts.add_option(
- '--venv',
- dest='venv_file',
- action='store_true',
- default=False,
- help='Use the virtualenv configuration file only'
- )
-
- self.parser.insert_option_group(0, self.cmd_opts)
-
- def run(self, options, args):
- handlers = {
- "list": self.list_values,
- "edit": self.open_in_editor,
- "get": self.get_name,
- "set": self.set_name_value,
- "unset": self.unset_name
- }
-
- # Determine action
- if not args or args[0] not in handlers:
- logger.error("Need an action ({}) to perform.".format(
- ", ".join(sorted(handlers)))
- )
- return ERROR
-
- action = args[0]
-
- # Determine which configuration files are to be loaded
- # Depends on whether the command is modifying.
- try:
- load_only = self._determine_file(
- options, need_value=(action in ["get", "set", "unset", "edit"])
- )
- except PipError as e:
- logger.error(e.args[0])
- return ERROR
-
- # Load a new configuration
- self.configuration = Configuration(
- isolated=options.isolated_mode, load_only=load_only
- )
- self.configuration.load()
-
- # Error handling happens here, not in the action-handlers.
- try:
- handlers[action](options, args[1:])
- except PipError as e:
- logger.error(e.args[0])
- return ERROR
-
- return SUCCESS
-
- def _determine_file(self, options, need_value):
- file_options = {
- kinds.USER: options.user_file,
- kinds.GLOBAL: options.global_file,
- kinds.VENV: options.venv_file
- }
-
- if sum(file_options.values()) == 0:
- if not need_value:
- return None
- # Default to user, unless there's a virtualenv file.
- elif os.path.exists(venv_config_file):
- return kinds.VENV
- else:
- return kinds.USER
- elif sum(file_options.values()) == 1:
- # There's probably a better expression for this.
- return [key for key in file_options if file_options[key]][0]
-
- raise PipError(
- "Need exactly one file to operate upon "
- "(--user, --venv, --global) to perform."
- )
-
- def list_values(self, options, args):
- self._get_n_args(args, "list", n=0)
-
- for key, value in sorted(self.configuration.items()):
- logger.info("%s=%r", key, value)
-
- def get_name(self, options, args):
- key = self._get_n_args(args, "get [name]", n=1)
- value = self.configuration.get_value(key)
-
- logger.info("%s", value)
-
- def set_name_value(self, options, args):
- key, value = self._get_n_args(args, "set [name] [value]", n=2)
- self.configuration.set_value(key, value)
-
- self._save_configuration()
-
- def unset_name(self, options, args):
- key = self._get_n_args(args, "unset [name]", n=1)
- self.configuration.unset_value(key)
-
- self._save_configuration()
-
- def open_in_editor(self, options, args):
- editor = self._determine_editor(options)
-
- fname = self.configuration.get_file_to_edit()
- if fname is None:
- raise PipError("Could not determine appropriate file.")
-
- try:
- subprocess.check_call([editor, fname])
- except subprocess.CalledProcessError as e:
- raise PipError(
- "Editor Subprocess exited with exit code {}"
- .format(e.returncode)
- )
-
- def _get_n_args(self, args, example, n):
- """Helper to make sure the command got the right number of arguments
- """
- if len(args) != n:
- msg = (
- 'Got unexpected number of arguments, expected {}. '
- '(example: "{} config {}")'
- ).format(n, get_prog(), example)
- raise PipError(msg)
-
- if n == 1:
- return args[0]
- else:
- return args
-
- def _save_configuration(self):
- # We successfully ran a modifying command. Need to save the
- # configuration.
- try:
- self.configuration.save()
- except Exception:
- logger.error(
- "Unable to save configuration. Please report this as a bug.",
- exc_info=1
- )
- raise PipError("Internal Error.")
-
- def _determine_editor(self, options):
- if options.editor is not None:
- return options.editor
- elif "VISUAL" in os.environ:
- return os.environ["VISUAL"]
- elif "EDITOR" in os.environ:
- return os.environ["EDITOR"]
- else:
- raise PipError("Could not determine editor to use.")
+import logging
+import os
+import subprocess
+
+from pip._internal.basecommand import Command
+from pip._internal.configuration import Configuration, kinds
+from pip._internal.exceptions import PipError
+from pip._internal.locations import venv_config_file
+from pip._internal.status_codes import ERROR, SUCCESS
+from pip._internal.utils.misc import get_prog
+
+logger = logging.getLogger(__name__)
+
+
+class ConfigurationCommand(Command):
+ """Manage local and global configuration.
+
+ Subcommands:
+
+ list: List the active configuration (or from the file specified)
+ edit: Edit the configuration file in an editor
+ get: Get the value associated with name
+ set: Set the name=value
+ unset: Unset the value associated with name
+
+ If none of --user, --global and --venv are passed, a virtual
+ environment configuration file is used if one is active and the file
+ exists. Otherwise, all modifications happen on the to the user file by
+ default.
+ """
+
+ name = 'config'
+ usage = """
+ %prog [] list
+ %prog [] [--editor ] edit
+
+ %prog [] get name
+ %prog [] set name value
+ %prog [] unset name
+ """
+
+ summary = "Manage local and global configuration."
+
+ def __init__(self, *args, **kwargs):
+ super(ConfigurationCommand, self).__init__(*args, **kwargs)
+
+ self.configuration = None
+
+ self.cmd_opts.add_option(
+ '--editor',
+ dest='editor',
+ action='store',
+ default=None,
+ help=(
+ 'Editor to use to edit the file. Uses VISUAL or EDITOR '
+ 'environment variables if not provided.'
+ )
+ )
+
+ self.cmd_opts.add_option(
+ '--global',
+ dest='global_file',
+ action='store_true',
+ default=False,
+ help='Use the system-wide configuration file only'
+ )
+
+ self.cmd_opts.add_option(
+ '--user',
+ dest='user_file',
+ action='store_true',
+ default=False,
+ help='Use the user configuration file only'
+ )
+
+ self.cmd_opts.add_option(
+ '--venv',
+ dest='venv_file',
+ action='store_true',
+ default=False,
+ help='Use the virtualenv configuration file only'
+ )
+
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options, args):
+ handlers = {
+ "list": self.list_values,
+ "edit": self.open_in_editor,
+ "get": self.get_name,
+ "set": self.set_name_value,
+ "unset": self.unset_name
+ }
+
+ # Determine action
+ if not args or args[0] not in handlers:
+ logger.error("Need an action ({}) to perform.".format(
+ ", ".join(sorted(handlers)))
+ )
+ return ERROR
+
+ action = args[0]
+
+ # Determine which configuration files are to be loaded
+ # Depends on whether the command is modifying.
+ try:
+ load_only = self._determine_file(
+ options, need_value=(action in ["get", "set", "unset", "edit"])
+ )
+ except PipError as e:
+ logger.error(e.args[0])
+ return ERROR
+
+ # Load a new configuration
+ self.configuration = Configuration(
+ isolated=options.isolated_mode, load_only=load_only
+ )
+ self.configuration.load()
+
+ # Error handling happens here, not in the action-handlers.
+ try:
+ handlers[action](options, args[1:])
+ except PipError as e:
+ logger.error(e.args[0])
+ return ERROR
+
+ return SUCCESS
+
+ def _determine_file(self, options, need_value):
+ file_options = {
+ kinds.USER: options.user_file,
+ kinds.GLOBAL: options.global_file,
+ kinds.VENV: options.venv_file
+ }
+
+ if sum(file_options.values()) == 0:
+ if not need_value:
+ return None
+ # Default to user, unless there's a virtualenv file.
+ elif os.path.exists(venv_config_file):
+ return kinds.VENV
+ else:
+ return kinds.USER
+ elif sum(file_options.values()) == 1:
+ # There's probably a better expression for this.
+ return [key for key in file_options if file_options[key]][0]
+
+ raise PipError(
+ "Need exactly one file to operate upon "
+ "(--user, --venv, --global) to perform."
+ )
+
+ def list_values(self, options, args):
+ self._get_n_args(args, "list", n=0)
+
+ for key, value in sorted(self.configuration.items()):
+ logger.info("%s=%r", key, value)
+
+ def get_name(self, options, args):
+ key = self._get_n_args(args, "get [name]", n=1)
+ value = self.configuration.get_value(key)
+
+ logger.info("%s", value)
+
+ def set_name_value(self, options, args):
+ key, value = self._get_n_args(args, "set [name] [value]", n=2)
+ self.configuration.set_value(key, value)
+
+ self._save_configuration()
+
+ def unset_name(self, options, args):
+ key = self._get_n_args(args, "unset [name]", n=1)
+ self.configuration.unset_value(key)
+
+ self._save_configuration()
+
+ def open_in_editor(self, options, args):
+ editor = self._determine_editor(options)
+
+ fname = self.configuration.get_file_to_edit()
+ if fname is None:
+ raise PipError("Could not determine appropriate file.")
+
+ try:
+ subprocess.check_call([editor, fname])
+ except subprocess.CalledProcessError as e:
+ raise PipError(
+ "Editor Subprocess exited with exit code {}"
+ .format(e.returncode)
+ )
+
+ def _get_n_args(self, args, example, n):
+ """Helper to make sure the command got the right number of arguments
+ """
+ if len(args) != n:
+ msg = (
+ 'Got unexpected number of arguments, expected {}. '
+ '(example: "{} config {}")'
+ ).format(n, get_prog(), example)
+ raise PipError(msg)
+
+ if n == 1:
+ return args[0]
+ else:
+ return args
+
+ def _save_configuration(self):
+ # We successfully ran a modifying command. Need to save the
+ # configuration.
+ try:
+ self.configuration.save()
+ except Exception:
+ logger.error(
+ "Unable to save configuration. Please report this as a bug.",
+ exc_info=1
+ )
+ raise PipError("Internal Error.")
+
+ def _determine_editor(self, options):
+ if options.editor is not None:
+ return options.editor
+ elif "VISUAL" in os.environ:
+ return os.environ["VISUAL"]
+ elif "EDITOR" in os.environ:
+ return os.environ["EDITOR"]
+ else:
+ raise PipError("Could not determine editor to use.")
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/download.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/download.py
index 5713d07..916a470 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/download.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/download.py
@@ -1,233 +1,233 @@
-from __future__ import absolute_import
-
-import logging
-import os
-
-from pip._internal import cmdoptions
-from pip._internal.basecommand import RequirementCommand
-from pip._internal.exceptions import CommandError
-from pip._internal.index import FormatControl
-from pip._internal.operations.prepare import RequirementPreparer
-from pip._internal.req import RequirementSet
-from pip._internal.resolve import Resolver
-from pip._internal.utils.filesystem import check_path_owner
-from pip._internal.utils.misc import ensure_dir, normalize_path
-from pip._internal.utils.temp_dir import TempDirectory
-
-logger = logging.getLogger(__name__)
-
-
-class DownloadCommand(RequirementCommand):
- """
- Download packages from:
-
- - PyPI (and other indexes) using requirement specifiers.
- - VCS project urls.
- - Local project directories.
- - Local or remote source archives.
-
- pip also supports downloading from "requirements files", which provide
- an easy way to specify a whole environment to be downloaded.
- """
- name = 'download'
-
- usage = """
- %prog [options] [package-index-options] ...
- %prog [options] -r [package-index-options] ...
- %prog [options] ...
- %prog [options] ...
- %prog [options] ..."""
-
- summary = 'Download packages.'
-
- def __init__(self, *args, **kw):
- super(DownloadCommand, self).__init__(*args, **kw)
-
- cmd_opts = self.cmd_opts
-
- cmd_opts.add_option(cmdoptions.constraints())
- cmd_opts.add_option(cmdoptions.requirements())
- cmd_opts.add_option(cmdoptions.build_dir())
- cmd_opts.add_option(cmdoptions.no_deps())
- cmd_opts.add_option(cmdoptions.global_options())
- cmd_opts.add_option(cmdoptions.no_binary())
- cmd_opts.add_option(cmdoptions.only_binary())
- cmd_opts.add_option(cmdoptions.src())
- cmd_opts.add_option(cmdoptions.pre())
- cmd_opts.add_option(cmdoptions.no_clean())
- cmd_opts.add_option(cmdoptions.require_hashes())
- cmd_opts.add_option(cmdoptions.progress_bar())
- cmd_opts.add_option(cmdoptions.no_build_isolation())
-
- cmd_opts.add_option(
- '-d', '--dest', '--destination-dir', '--destination-directory',
- dest='download_dir',
- metavar='dir',
- default=os.curdir,
- help=("Download packages into ."),
- )
-
- cmd_opts.add_option(
- '--platform',
- dest='platform',
- metavar='platform',
- default=None,
- help=("Only download wheels compatible with . "
- "Defaults to the platform of the running system."),
- )
-
- cmd_opts.add_option(
- '--python-version',
- dest='python_version',
- metavar='python_version',
- default=None,
- help=("Only download wheels compatible with Python "
- "interpreter version . If not specified, then the "
- "current system interpreter minor version is used. A major "
- "version (e.g. '2') can be specified to match all "
- "minor revs of that major version. A minor version "
- "(e.g. '34') can also be specified."),
- )
-
- cmd_opts.add_option(
- '--implementation',
- dest='implementation',
- metavar='implementation',
- default=None,
- help=("Only download wheels compatible with Python "
- "implementation , e.g. 'pp', 'jy', 'cp', "
- " or 'ip'. If not specified, then the current "
- "interpreter implementation is used. Use 'py' to force "
- "implementation-agnostic wheels."),
- )
-
- cmd_opts.add_option(
- '--abi',
- dest='abi',
- metavar='abi',
- default=None,
- help=("Only download wheels compatible with Python "
- "abi , e.g. 'pypy_41'. If not specified, then the "
- "current interpreter abi tag is used. Generally "
- "you will need to specify --implementation, "
- "--platform, and --python-version when using "
- "this option."),
- )
-
- index_opts = cmdoptions.make_option_group(
- cmdoptions.index_group,
- self.parser,
- )
-
- self.parser.insert_option_group(0, index_opts)
- self.parser.insert_option_group(0, cmd_opts)
-
- def run(self, options, args):
- options.ignore_installed = True
- # editable doesn't really make sense for `pip download`, but the bowels
- # of the RequirementSet code require that property.
- options.editables = []
-
- if options.python_version:
- python_versions = [options.python_version]
- else:
- python_versions = None
-
- dist_restriction_set = any([
- options.python_version,
- options.platform,
- options.abi,
- options.implementation,
- ])
- binary_only = FormatControl(set(), {':all:'})
- no_sdist_dependencies = (
- options.format_control != binary_only and
- not options.ignore_dependencies
- )
- if dist_restriction_set and no_sdist_dependencies:
- raise CommandError(
- "When restricting platform and interpreter constraints using "
- "--python-version, --platform, --abi, or --implementation, "
- "either --no-deps must be set, or --only-binary=:all: must be "
- "set and --no-binary must not be set (or must be set to "
- ":none:)."
- )
-
- options.src_dir = os.path.abspath(options.src_dir)
- options.download_dir = normalize_path(options.download_dir)
-
- ensure_dir(options.download_dir)
-
- with self._build_session(options) as session:
- finder = self._build_package_finder(
- options=options,
- session=session,
- platform=options.platform,
- python_versions=python_versions,
- abi=options.abi,
- implementation=options.implementation,
- )
- build_delete = (not (options.no_clean or options.build_dir))
- if options.cache_dir and not check_path_owner(options.cache_dir):
- logger.warning(
- "The directory '%s' or its parent directory is not owned "
- "by the current user and caching wheels has been "
- "disabled. check the permissions and owner of that "
- "directory. If executing pip with sudo, you may want "
- "sudo's -H flag.",
- options.cache_dir,
- )
- options.cache_dir = None
-
- with TempDirectory(
- options.build_dir, delete=build_delete, kind="download"
- ) as directory:
-
- requirement_set = RequirementSet(
- require_hashes=options.require_hashes,
- )
- self.populate_requirement_set(
- requirement_set,
- args,
- options,
- finder,
- session,
- self.name,
- None
- )
-
- preparer = RequirementPreparer(
- build_dir=directory.path,
- src_dir=options.src_dir,
- download_dir=options.download_dir,
- wheel_download_dir=None,
- progress_bar=options.progress_bar,
- build_isolation=options.build_isolation,
- )
-
- resolver = Resolver(
- preparer=preparer,
- finder=finder,
- session=session,
- wheel_cache=None,
- use_user_site=False,
- upgrade_strategy="to-satisfy-only",
- force_reinstall=False,
- ignore_dependencies=options.ignore_dependencies,
- ignore_requires_python=False,
- ignore_installed=True,
- isolated=options.isolated_mode,
- )
- resolver.resolve(requirement_set)
-
- downloaded = ' '.join([
- req.name for req in requirement_set.successfully_downloaded
- ])
- if downloaded:
- logger.info('Successfully downloaded %s', downloaded)
-
- # Clean up
- if not options.no_clean:
- requirement_set.cleanup_files()
-
- return requirement_set
+from __future__ import absolute_import
+
+import logging
+import os
+
+from pip._internal import cmdoptions
+from pip._internal.basecommand import RequirementCommand
+from pip._internal.exceptions import CommandError
+from pip._internal.index import FormatControl
+from pip._internal.operations.prepare import RequirementPreparer
+from pip._internal.req import RequirementSet
+from pip._internal.resolve import Resolver
+from pip._internal.utils.filesystem import check_path_owner
+from pip._internal.utils.misc import ensure_dir, normalize_path
+from pip._internal.utils.temp_dir import TempDirectory
+
+logger = logging.getLogger(__name__)
+
+
+class DownloadCommand(RequirementCommand):
+ """
+ Download packages from:
+
+ - PyPI (and other indexes) using requirement specifiers.
+ - VCS project urls.
+ - Local project directories.
+ - Local or remote source archives.
+
+ pip also supports downloading from "requirements files", which provide
+ an easy way to specify a whole environment to be downloaded.
+ """
+ name = 'download'
+
+ usage = """
+ %prog [options] [package-index-options] ...
+ %prog [options] -r [package-index-options] ...
+ %prog [options] ...
+ %prog [options] ...
+ %prog [options] ..."""
+
+ summary = 'Download packages.'
+
+ def __init__(self, *args, **kw):
+ super(DownloadCommand, self).__init__(*args, **kw)
+
+ cmd_opts = self.cmd_opts
+
+ cmd_opts.add_option(cmdoptions.constraints())
+ cmd_opts.add_option(cmdoptions.requirements())
+ cmd_opts.add_option(cmdoptions.build_dir())
+ cmd_opts.add_option(cmdoptions.no_deps())
+ cmd_opts.add_option(cmdoptions.global_options())
+ cmd_opts.add_option(cmdoptions.no_binary())
+ cmd_opts.add_option(cmdoptions.only_binary())
+ cmd_opts.add_option(cmdoptions.src())
+ cmd_opts.add_option(cmdoptions.pre())
+ cmd_opts.add_option(cmdoptions.no_clean())
+ cmd_opts.add_option(cmdoptions.require_hashes())
+ cmd_opts.add_option(cmdoptions.progress_bar())
+ cmd_opts.add_option(cmdoptions.no_build_isolation())
+
+ cmd_opts.add_option(
+ '-d', '--dest', '--destination-dir', '--destination-directory',
+ dest='download_dir',
+ metavar='dir',
+ default=os.curdir,
+ help=("Download packages into ."),
+ )
+
+ cmd_opts.add_option(
+ '--platform',
+ dest='platform',
+ metavar='platform',
+ default=None,
+ help=("Only download wheels compatible with . "
+ "Defaults to the platform of the running system."),
+ )
+
+ cmd_opts.add_option(
+ '--python-version',
+ dest='python_version',
+ metavar='python_version',
+ default=None,
+ help=("Only download wheels compatible with Python "
+ "interpreter version . If not specified, then the "
+ "current system interpreter minor version is used. A major "
+ "version (e.g. '2') can be specified to match all "
+ "minor revs of that major version. A minor version "
+ "(e.g. '34') can also be specified."),
+ )
+
+ cmd_opts.add_option(
+ '--implementation',
+ dest='implementation',
+ metavar='implementation',
+ default=None,
+ help=("Only download wheels compatible with Python "
+ "implementation , e.g. 'pp', 'jy', 'cp', "
+ " or 'ip'. If not specified, then the current "
+ "interpreter implementation is used. Use 'py' to force "
+ "implementation-agnostic wheels."),
+ )
+
+ cmd_opts.add_option(
+ '--abi',
+ dest='abi',
+ metavar='abi',
+ default=None,
+ help=("Only download wheels compatible with Python "
+ "abi , e.g. 'pypy_41'. If not specified, then the "
+ "current interpreter abi tag is used. Generally "
+ "you will need to specify --implementation, "
+ "--platform, and --python-version when using "
+ "this option."),
+ )
+
+ index_opts = cmdoptions.make_option_group(
+ cmdoptions.index_group,
+ self.parser,
+ )
+
+ self.parser.insert_option_group(0, index_opts)
+ self.parser.insert_option_group(0, cmd_opts)
+
+ def run(self, options, args):
+ options.ignore_installed = True
+ # editable doesn't really make sense for `pip download`, but the bowels
+ # of the RequirementSet code require that property.
+ options.editables = []
+
+ if options.python_version:
+ python_versions = [options.python_version]
+ else:
+ python_versions = None
+
+ dist_restriction_set = any([
+ options.python_version,
+ options.platform,
+ options.abi,
+ options.implementation,
+ ])
+ binary_only = FormatControl(set(), {':all:'})
+ no_sdist_dependencies = (
+ options.format_control != binary_only and
+ not options.ignore_dependencies
+ )
+ if dist_restriction_set and no_sdist_dependencies:
+ raise CommandError(
+ "When restricting platform and interpreter constraints using "
+ "--python-version, --platform, --abi, or --implementation, "
+ "either --no-deps must be set, or --only-binary=:all: must be "
+ "set and --no-binary must not be set (or must be set to "
+ ":none:)."
+ )
+
+ options.src_dir = os.path.abspath(options.src_dir)
+ options.download_dir = normalize_path(options.download_dir)
+
+ ensure_dir(options.download_dir)
+
+ with self._build_session(options) as session:
+ finder = self._build_package_finder(
+ options=options,
+ session=session,
+ platform=options.platform,
+ python_versions=python_versions,
+ abi=options.abi,
+ implementation=options.implementation,
+ )
+ build_delete = (not (options.no_clean or options.build_dir))
+ if options.cache_dir and not check_path_owner(options.cache_dir):
+ logger.warning(
+ "The directory '%s' or its parent directory is not owned "
+ "by the current user and caching wheels has been "
+ "disabled. check the permissions and owner of that "
+ "directory. If executing pip with sudo, you may want "
+ "sudo's -H flag.",
+ options.cache_dir,
+ )
+ options.cache_dir = None
+
+ with TempDirectory(
+ options.build_dir, delete=build_delete, kind="download"
+ ) as directory:
+
+ requirement_set = RequirementSet(
+ require_hashes=options.require_hashes,
+ )
+ self.populate_requirement_set(
+ requirement_set,
+ args,
+ options,
+ finder,
+ session,
+ self.name,
+ None
+ )
+
+ preparer = RequirementPreparer(
+ build_dir=directory.path,
+ src_dir=options.src_dir,
+ download_dir=options.download_dir,
+ wheel_download_dir=None,
+ progress_bar=options.progress_bar,
+ build_isolation=options.build_isolation,
+ )
+
+ resolver = Resolver(
+ preparer=preparer,
+ finder=finder,
+ session=session,
+ wheel_cache=None,
+ use_user_site=False,
+ upgrade_strategy="to-satisfy-only",
+ force_reinstall=False,
+ ignore_dependencies=options.ignore_dependencies,
+ ignore_requires_python=False,
+ ignore_installed=True,
+ isolated=options.isolated_mode,
+ )
+ resolver.resolve(requirement_set)
+
+ downloaded = ' '.join([
+ req.name for req in requirement_set.successfully_downloaded
+ ])
+ if downloaded:
+ logger.info('Successfully downloaded %s', downloaded)
+
+ # Clean up
+ if not options.no_clean:
+ requirement_set.cleanup_files()
+
+ return requirement_set
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/freeze.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/freeze.py
index 0d3d4ae..ac562d7 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/freeze.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/freeze.py
@@ -1,96 +1,96 @@
-from __future__ import absolute_import
-
-import sys
-
-from pip._internal import index
-from pip._internal.basecommand import Command
-from pip._internal.cache import WheelCache
-from pip._internal.compat import stdlib_pkgs
-from pip._internal.operations.freeze import freeze
-
-DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel'}
-
-
-class FreezeCommand(Command):
- """
- Output installed packages in requirements format.
-
- packages are listed in a case-insensitive sorted order.
- """
- name = 'freeze'
- usage = """
- %prog [options]"""
- summary = 'Output installed packages in requirements format.'
- log_streams = ("ext://sys.stderr", "ext://sys.stderr")
-
- def __init__(self, *args, **kw):
- super(FreezeCommand, self).__init__(*args, **kw)
-
- self.cmd_opts.add_option(
- '-r', '--requirement',
- dest='requirements',
- action='append',
- default=[],
- metavar='file',
- help="Use the order in the given requirements file and its "
- "comments when generating output. This option can be "
- "used multiple times.")
- self.cmd_opts.add_option(
- '-f', '--find-links',
- dest='find_links',
- action='append',
- default=[],
- metavar='URL',
- help='URL for finding packages, which will be added to the '
- 'output.')
- self.cmd_opts.add_option(
- '-l', '--local',
- dest='local',
- action='store_true',
- default=False,
- help='If in a virtualenv that has global access, do not output '
- 'globally-installed packages.')
- self.cmd_opts.add_option(
- '--user',
- dest='user',
- action='store_true',
- default=False,
- help='Only output packages installed in user-site.')
- self.cmd_opts.add_option(
- '--all',
- dest='freeze_all',
- action='store_true',
- help='Do not skip these packages in the output:'
- ' %s' % ', '.join(DEV_PKGS))
- self.cmd_opts.add_option(
- '--exclude-editable',
- dest='exclude_editable',
- action='store_true',
- help='Exclude editable package from output.')
-
- self.parser.insert_option_group(0, self.cmd_opts)
-
- def run(self, options, args):
- format_control = index.FormatControl(set(), set())
- wheel_cache = WheelCache(options.cache_dir, format_control)
- skip = set(stdlib_pkgs)
- if not options.freeze_all:
- skip.update(DEV_PKGS)
-
- freeze_kwargs = dict(
- requirement=options.requirements,
- find_links=options.find_links,
- local_only=options.local,
- user_only=options.user,
- skip_regex=options.skip_requirements_regex,
- isolated=options.isolated_mode,
- wheel_cache=wheel_cache,
- skip=skip,
- exclude_editable=options.exclude_editable,
- )
-
- try:
- for line in freeze(**freeze_kwargs):
- sys.stdout.write(line + '\n')
- finally:
- wheel_cache.cleanup()
+from __future__ import absolute_import
+
+import sys
+
+from pip._internal import index
+from pip._internal.basecommand import Command
+from pip._internal.cache import WheelCache
+from pip._internal.compat import stdlib_pkgs
+from pip._internal.operations.freeze import freeze
+
+DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel'}
+
+
+class FreezeCommand(Command):
+ """
+ Output installed packages in requirements format.
+
+ packages are listed in a case-insensitive sorted order.
+ """
+ name = 'freeze'
+ usage = """
+ %prog [options]"""
+ summary = 'Output installed packages in requirements format.'
+ log_streams = ("ext://sys.stderr", "ext://sys.stderr")
+
+ def __init__(self, *args, **kw):
+ super(FreezeCommand, self).__init__(*args, **kw)
+
+ self.cmd_opts.add_option(
+ '-r', '--requirement',
+ dest='requirements',
+ action='append',
+ default=[],
+ metavar='file',
+ help="Use the order in the given requirements file and its "
+ "comments when generating output. This option can be "
+ "used multiple times.")
+ self.cmd_opts.add_option(
+ '-f', '--find-links',
+ dest='find_links',
+ action='append',
+ default=[],
+ metavar='URL',
+ help='URL for finding packages, which will be added to the '
+ 'output.')
+ self.cmd_opts.add_option(
+ '-l', '--local',
+ dest='local',
+ action='store_true',
+ default=False,
+ help='If in a virtualenv that has global access, do not output '
+ 'globally-installed packages.')
+ self.cmd_opts.add_option(
+ '--user',
+ dest='user',
+ action='store_true',
+ default=False,
+ help='Only output packages installed in user-site.')
+ self.cmd_opts.add_option(
+ '--all',
+ dest='freeze_all',
+ action='store_true',
+ help='Do not skip these packages in the output:'
+ ' %s' % ', '.join(DEV_PKGS))
+ self.cmd_opts.add_option(
+ '--exclude-editable',
+ dest='exclude_editable',
+ action='store_true',
+ help='Exclude editable package from output.')
+
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options, args):
+ format_control = index.FormatControl(set(), set())
+ wheel_cache = WheelCache(options.cache_dir, format_control)
+ skip = set(stdlib_pkgs)
+ if not options.freeze_all:
+ skip.update(DEV_PKGS)
+
+ freeze_kwargs = dict(
+ requirement=options.requirements,
+ find_links=options.find_links,
+ local_only=options.local,
+ user_only=options.user,
+ skip_regex=options.skip_requirements_regex,
+ isolated=options.isolated_mode,
+ wheel_cache=wheel_cache,
+ skip=skip,
+ exclude_editable=options.exclude_editable,
+ )
+
+ try:
+ for line in freeze(**freeze_kwargs):
+ sys.stdout.write(line + '\n')
+ finally:
+ wheel_cache.cleanup()
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/hash.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/hash.py
index 95353b0..0ce1419 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/hash.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/hash.py
@@ -1,57 +1,57 @@
-from __future__ import absolute_import
-
-import hashlib
-import logging
-import sys
-
-from pip._internal.basecommand import Command
-from pip._internal.status_codes import ERROR
-from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES
-from pip._internal.utils.misc import read_chunks
-
-logger = logging.getLogger(__name__)
-
-
-class HashCommand(Command):
- """
- Compute a hash of a local package archive.
-
- These can be used with --hash in a requirements file to do repeatable
- installs.
-
- """
- name = 'hash'
- usage = '%prog [options] ...'
- summary = 'Compute hashes of package archives.'
- ignore_require_venv = True
-
- def __init__(self, *args, **kw):
- super(HashCommand, self).__init__(*args, **kw)
- self.cmd_opts.add_option(
- '-a', '--algorithm',
- dest='algorithm',
- choices=STRONG_HASHES,
- action='store',
- default=FAVORITE_HASH,
- help='The hash algorithm to use: one of %s' %
- ', '.join(STRONG_HASHES))
- self.parser.insert_option_group(0, self.cmd_opts)
-
- def run(self, options, args):
- if not args:
- self.parser.print_usage(sys.stderr)
- return ERROR
-
- algorithm = options.algorithm
- for path in args:
- logger.info('%s:\n--hash=%s:%s',
- path, algorithm, _hash_of_file(path, algorithm))
-
-
-def _hash_of_file(path, algorithm):
- """Return the hash digest of a file."""
- with open(path, 'rb') as archive:
- hash = hashlib.new(algorithm)
- for chunk in read_chunks(archive):
- hash.update(chunk)
- return hash.hexdigest()
+from __future__ import absolute_import
+
+import hashlib
+import logging
+import sys
+
+from pip._internal.basecommand import Command
+from pip._internal.status_codes import ERROR
+from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES
+from pip._internal.utils.misc import read_chunks
+
+logger = logging.getLogger(__name__)
+
+
+class HashCommand(Command):
+ """
+ Compute a hash of a local package archive.
+
+ These can be used with --hash in a requirements file to do repeatable
+ installs.
+
+ """
+ name = 'hash'
+ usage = '%prog [options] ...'
+ summary = 'Compute hashes of package archives.'
+ ignore_require_venv = True
+
+ def __init__(self, *args, **kw):
+ super(HashCommand, self).__init__(*args, **kw)
+ self.cmd_opts.add_option(
+ '-a', '--algorithm',
+ dest='algorithm',
+ choices=STRONG_HASHES,
+ action='store',
+ default=FAVORITE_HASH,
+ help='The hash algorithm to use: one of %s' %
+ ', '.join(STRONG_HASHES))
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options, args):
+ if not args:
+ self.parser.print_usage(sys.stderr)
+ return ERROR
+
+ algorithm = options.algorithm
+ for path in args:
+ logger.info('%s:\n--hash=%s:%s',
+ path, algorithm, _hash_of_file(path, algorithm))
+
+
+def _hash_of_file(path, algorithm):
+ """Return the hash digest of a file."""
+ with open(path, 'rb') as archive:
+ hash = hashlib.new(algorithm)
+ for chunk in read_chunks(archive):
+ hash.update(chunk)
+ return hash.hexdigest()
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/help.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/help.py
index 06ca2c1..f4a0e40 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/help.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/help.py
@@ -1,36 +1,36 @@
-from __future__ import absolute_import
-
-from pip._internal.basecommand import SUCCESS, Command
-from pip._internal.exceptions import CommandError
-
-
-class HelpCommand(Command):
- """Show help for commands"""
- name = 'help'
- usage = """
- %prog """
- summary = 'Show help for commands.'
- ignore_require_venv = True
-
- def run(self, options, args):
- from pip._internal.commands import commands_dict, get_similar_commands
-
- try:
- # 'pip help' with no args is handled by pip.__init__.parseopt()
- cmd_name = args[0] # the command we need help for
- except IndexError:
- return SUCCESS
-
- if cmd_name not in commands_dict:
- guess = get_similar_commands(cmd_name)
-
- msg = ['unknown command "%s"' % cmd_name]
- if guess:
- msg.append('maybe you meant "%s"' % guess)
-
- raise CommandError(' - '.join(msg))
-
- command = commands_dict[cmd_name]()
- command.parser.print_help()
-
- return SUCCESS
+from __future__ import absolute_import
+
+from pip._internal.basecommand import SUCCESS, Command
+from pip._internal.exceptions import CommandError
+
+
+class HelpCommand(Command):
+ """Show help for commands"""
+ name = 'help'
+ usage = """
+ %prog """
+ summary = 'Show help for commands.'
+ ignore_require_venv = True
+
+ def run(self, options, args):
+ from pip._internal.commands import commands_dict, get_similar_commands
+
+ try:
+ # 'pip help' with no args is handled by pip.__init__.parseopt()
+ cmd_name = args[0] # the command we need help for
+ except IndexError:
+ return SUCCESS
+
+ if cmd_name not in commands_dict:
+ guess = get_similar_commands(cmd_name)
+
+ msg = ['unknown command "%s"' % cmd_name]
+ if guess:
+ msg.append('maybe you meant "%s"' % guess)
+
+ raise CommandError(' - '.join(msg))
+
+ command = commands_dict[cmd_name]()
+ command.parser.print_help()
+
+ return SUCCESS
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/install.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/install.py
index 9138683..057a64e 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/install.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/install.py
@@ -1,502 +1,502 @@
-from __future__ import absolute_import
-
-import errno
-import logging
-import operator
-import os
-import shutil
-from optparse import SUPPRESS_HELP
-
-from pip._internal import cmdoptions
-from pip._internal.basecommand import RequirementCommand
-from pip._internal.cache import WheelCache
-from pip._internal.exceptions import (
- CommandError, InstallationError, PreviousBuildDirError,
-)
-from pip._internal.locations import distutils_scheme, virtualenv_no_global
-from pip._internal.operations.check import check_install_conflicts
-from pip._internal.operations.prepare import RequirementPreparer
-from pip._internal.req import RequirementSet, install_given_reqs
-from pip._internal.resolve import Resolver
-from pip._internal.status_codes import ERROR
-from pip._internal.utils.filesystem import check_path_owner
-from pip._internal.utils.misc import ensure_dir, get_installed_version
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.wheel import WheelBuilder
-
-try:
- import wheel
-except ImportError:
- wheel = None
-
-
-logger = logging.getLogger(__name__)
-
-
-class InstallCommand(RequirementCommand):
- """
- Install packages from:
-
- - PyPI (and other indexes) using requirement specifiers.
- - VCS project urls.
- - Local project directories.
- - Local or remote source archives.
-
- pip also supports installing from "requirements files", which provide
- an easy way to specify a whole environment to be installed.
- """
- name = 'install'
-
- usage = """
- %prog [options] [package-index-options] ...
- %prog [options] -r [package-index-options] ...
- %prog [options] [-e] ...
- %prog [options] [-e] ...
- %prog [options] ..."""
-
- summary = 'Install packages.'
-
- def __init__(self, *args, **kw):
- super(InstallCommand, self).__init__(*args, **kw)
-
- cmd_opts = self.cmd_opts
-
- cmd_opts.add_option(cmdoptions.requirements())
- cmd_opts.add_option(cmdoptions.constraints())
- cmd_opts.add_option(cmdoptions.no_deps())
- cmd_opts.add_option(cmdoptions.pre())
-
- cmd_opts.add_option(cmdoptions.editable())
- cmd_opts.add_option(
- '-t', '--target',
- dest='target_dir',
- metavar='dir',
- default=None,
- help='Install packages into . '
- 'By default this will not replace existing files/folders in '
- '. Use --upgrade to replace existing packages in '
- 'with new versions.'
- )
- cmd_opts.add_option(
- '--user',
- dest='use_user_site',
- action='store_true',
- help="Install to the Python user install directory for your "
- "platform. Typically ~/.local/, or %APPDATA%\\Python on "
- "Windows. (See the Python documentation for site.USER_BASE "
- "for full details.)")
- cmd_opts.add_option(
- '--no-user',
- dest='use_user_site',
- action='store_false',
- help=SUPPRESS_HELP)
- cmd_opts.add_option(
- '--root',
- dest='root_path',
- metavar='dir',
- default=None,
- help="Install everything relative to this alternate root "
- "directory.")
- cmd_opts.add_option(
- '--prefix',
- dest='prefix_path',
- metavar='dir',
- default=None,
- help="Installation prefix where lib, bin and other top-level "
- "folders are placed")
-
- cmd_opts.add_option(cmdoptions.build_dir())
-
- cmd_opts.add_option(cmdoptions.src())
-
- cmd_opts.add_option(
- '-U', '--upgrade',
- dest='upgrade',
- action='store_true',
- help='Upgrade all specified packages to the newest available '
- 'version. The handling of dependencies depends on the '
- 'upgrade-strategy used.'
- )
-
- cmd_opts.add_option(
- '--upgrade-strategy',
- dest='upgrade_strategy',
- default='only-if-needed',
- choices=['only-if-needed', 'eager'],
- help='Determines how dependency upgrading should be handled '
- '[default: %default]. '
- '"eager" - dependencies are upgraded regardless of '
- 'whether the currently installed version satisfies the '
- 'requirements of the upgraded package(s). '
- '"only-if-needed" - are upgraded only when they do not '
- 'satisfy the requirements of the upgraded package(s).'
- )
-
- cmd_opts.add_option(
- '--force-reinstall',
- dest='force_reinstall',
- action='store_true',
- help='Reinstall all packages even if they are already '
- 'up-to-date.')
-
- cmd_opts.add_option(
- '-I', '--ignore-installed',
- dest='ignore_installed',
- action='store_true',
- help='Ignore the installed packages (reinstalling instead).')
-
- cmd_opts.add_option(cmdoptions.ignore_requires_python())
- cmd_opts.add_option(cmdoptions.no_build_isolation())
-
- cmd_opts.add_option(cmdoptions.install_options())
- cmd_opts.add_option(cmdoptions.global_options())
-
- cmd_opts.add_option(
- "--compile",
- action="store_true",
- dest="compile",
- default=True,
- help="Compile Python source files to bytecode",
- )
-
- cmd_opts.add_option(
- "--no-compile",
- action="store_false",
- dest="compile",
- help="Do not compile Python source files to bytecode",
- )
-
- cmd_opts.add_option(
- "--no-warn-script-location",
- action="store_false",
- dest="warn_script_location",
- default=True,
- help="Do not warn when installing scripts outside PATH",
- )
- cmd_opts.add_option(
- "--no-warn-conflicts",
- action="store_false",
- dest="warn_about_conflicts",
- default=True,
- help="Do not warn about broken dependencies",
- )
-
- cmd_opts.add_option(cmdoptions.no_binary())
- cmd_opts.add_option(cmdoptions.only_binary())
- cmd_opts.add_option(cmdoptions.no_clean())
- cmd_opts.add_option(cmdoptions.require_hashes())
- cmd_opts.add_option(cmdoptions.progress_bar())
-
- index_opts = cmdoptions.make_option_group(
- cmdoptions.index_group,
- self.parser,
- )
-
- self.parser.insert_option_group(0, index_opts)
- self.parser.insert_option_group(0, cmd_opts)
-
- def run(self, options, args):
- cmdoptions.check_install_build_global(options)
-
- upgrade_strategy = "to-satisfy-only"
- if options.upgrade:
- upgrade_strategy = options.upgrade_strategy
-
- if options.build_dir:
- options.build_dir = os.path.abspath(options.build_dir)
-
- options.src_dir = os.path.abspath(options.src_dir)
- install_options = options.install_options or []
- if options.use_user_site:
- if options.prefix_path:
- raise CommandError(
- "Can not combine '--user' and '--prefix' as they imply "
- "different installation locations"
- )
- if virtualenv_no_global():
- raise InstallationError(
- "Can not perform a '--user' install. User site-packages "
- "are not visible in this virtualenv."
- )
- install_options.append('--user')
- install_options.append('--prefix=')
-
- target_temp_dir = TempDirectory(kind="target")
- if options.target_dir:
- options.ignore_installed = True
- options.target_dir = os.path.abspath(options.target_dir)
- if (os.path.exists(options.target_dir) and not
- os.path.isdir(options.target_dir)):
- raise CommandError(
- "Target path exists but is not a directory, will not "
- "continue."
- )
-
- # Create a target directory for using with the target option
- target_temp_dir.create()
- install_options.append('--home=' + target_temp_dir.path)
-
- global_options = options.global_options or []
-
- with self._build_session(options) as session:
- finder = self._build_package_finder(options, session)
- build_delete = (not (options.no_clean or options.build_dir))
- wheel_cache = WheelCache(options.cache_dir, options.format_control)
-
- if options.cache_dir and not check_path_owner(options.cache_dir):
- logger.warning(
- "The directory '%s' or its parent directory is not owned "
- "by the current user and caching wheels has been "
- "disabled. check the permissions and owner of that "
- "directory. If executing pip with sudo, you may want "
- "sudo's -H flag.",
- options.cache_dir,
- )
- options.cache_dir = None
-
- with TempDirectory(
- options.build_dir, delete=build_delete, kind="install"
- ) as directory:
- requirement_set = RequirementSet(
- require_hashes=options.require_hashes,
- )
-
- try:
- self.populate_requirement_set(
- requirement_set, args, options, finder, session,
- self.name, wheel_cache
- )
- preparer = RequirementPreparer(
- build_dir=directory.path,
- src_dir=options.src_dir,
- download_dir=None,
- wheel_download_dir=None,
- progress_bar=options.progress_bar,
- build_isolation=options.build_isolation,
- )
-
- resolver = Resolver(
- preparer=preparer,
- finder=finder,
- session=session,
- wheel_cache=wheel_cache,
- use_user_site=options.use_user_site,
- upgrade_strategy=upgrade_strategy,
- force_reinstall=options.force_reinstall,
- ignore_dependencies=options.ignore_dependencies,
- ignore_requires_python=options.ignore_requires_python,
- ignore_installed=options.ignore_installed,
- isolated=options.isolated_mode,
- )
- resolver.resolve(requirement_set)
-
- # If caching is disabled or wheel is not installed don't
- # try to build wheels.
- if wheel and options.cache_dir:
- # build wheels before install.
- wb = WheelBuilder(
- finder, preparer, wheel_cache,
- build_options=[], global_options=[],
- )
- # Ignore the result: a failed wheel will be
- # installed from the sdist/vcs whatever.
- wb.build(
- requirement_set.requirements.values(),
- session=session, autobuilding=True
- )
-
- to_install = resolver.get_installation_order(
- requirement_set
- )
-
- # Consistency Checking of the package set we're installing.
- should_warn_about_conflicts = (
- not options.ignore_dependencies and
- options.warn_about_conflicts
- )
- if should_warn_about_conflicts:
- self._warn_about_conflicts(to_install)
-
- # Don't warn about script install locations if
- # --target has been specified
- warn_script_location = options.warn_script_location
- if options.target_dir:
- warn_script_location = False
-
- installed = install_given_reqs(
- to_install,
- install_options,
- global_options,
- root=options.root_path,
- home=target_temp_dir.path,
- prefix=options.prefix_path,
- pycompile=options.compile,
- warn_script_location=warn_script_location,
- use_user_site=options.use_user_site,
- )
-
- possible_lib_locations = get_lib_location_guesses(
- user=options.use_user_site,
- home=target_temp_dir.path,
- root=options.root_path,
- prefix=options.prefix_path,
- isolated=options.isolated_mode,
- )
- reqs = sorted(installed, key=operator.attrgetter('name'))
- items = []
- for req in reqs:
- item = req.name
- try:
- installed_version = get_installed_version(
- req.name, possible_lib_locations
- )
- if installed_version:
- item += '-' + installed_version
- except Exception:
- pass
- items.append(item)
- installed = ' '.join(items)
- if installed:
- logger.info('Successfully installed %s', installed)
- except EnvironmentError as error:
- show_traceback = (self.verbosity >= 1)
-
- message = create_env_error_message(
- error, show_traceback, options.use_user_site,
- )
- logger.error(message, exc_info=show_traceback)
-
- return ERROR
- except PreviousBuildDirError:
- options.no_clean = True
- raise
- finally:
- # Clean up
- if not options.no_clean:
- requirement_set.cleanup_files()
- wheel_cache.cleanup()
-
- if options.target_dir:
- self._handle_target_dir(
- options.target_dir, target_temp_dir, options.upgrade
- )
- return requirement_set
-
- def _handle_target_dir(self, target_dir, target_temp_dir, upgrade):
- ensure_dir(target_dir)
-
- # Checking both purelib and platlib directories for installed
- # packages to be moved to target directory
- lib_dir_list = []
-
- with target_temp_dir:
- # Checking both purelib and platlib directories for installed
- # packages to be moved to target directory
- scheme = distutils_scheme('', home=target_temp_dir.path)
- purelib_dir = scheme['purelib']
- platlib_dir = scheme['platlib']
- data_dir = scheme['data']
-
- if os.path.exists(purelib_dir):
- lib_dir_list.append(purelib_dir)
- if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:
- lib_dir_list.append(platlib_dir)
- if os.path.exists(data_dir):
- lib_dir_list.append(data_dir)
-
- for lib_dir in lib_dir_list:
- for item in os.listdir(lib_dir):
- if lib_dir == data_dir:
- ddir = os.path.join(data_dir, item)
- if any(s.startswith(ddir) for s in lib_dir_list[:-1]):
- continue
- target_item_dir = os.path.join(target_dir, item)
- if os.path.exists(target_item_dir):
- if not upgrade:
- logger.warning(
- 'Target directory %s already exists. Specify '
- '--upgrade to force replacement.',
- target_item_dir
- )
- continue
- if os.path.islink(target_item_dir):
- logger.warning(
- 'Target directory %s already exists and is '
- 'a link. Pip will not automatically replace '
- 'links, please remove if replacement is '
- 'desired.',
- target_item_dir
- )
- continue
- if os.path.isdir(target_item_dir):
- shutil.rmtree(target_item_dir)
- else:
- os.remove(target_item_dir)
-
- shutil.move(
- os.path.join(lib_dir, item),
- target_item_dir
- )
-
- def _warn_about_conflicts(self, to_install):
- package_set, _dep_info = check_install_conflicts(to_install)
- missing, conflicting = _dep_info
-
- # NOTE: There is some duplication here from pip check
- for project_name in missing:
- version = package_set[project_name][0]
- for dependency in missing[project_name]:
- logger.critical(
- "%s %s requires %s, which is not installed.",
- project_name, version, dependency[1],
- )
-
- for project_name in conflicting:
- version = package_set[project_name][0]
- for dep_name, dep_version, req in conflicting[project_name]:
- logger.critical(
- "%s %s has requirement %s, but you'll have %s %s which is "
- "incompatible.",
- project_name, version, req, dep_name, dep_version,
- )
-
-
-def get_lib_location_guesses(*args, **kwargs):
- scheme = distutils_scheme('', *args, **kwargs)
- return [scheme['purelib'], scheme['platlib']]
-
-
-def create_env_error_message(error, show_traceback, using_user_site):
- """Format an error message for an EnvironmentError
-
- It may occur anytime during the execution of the install command.
- """
- parts = []
-
- # Mention the error if we are not going to show a traceback
- parts.append("Could not install packages due to an EnvironmentError")
- if not show_traceback:
- parts.append(": ")
- parts.append(str(error))
- else:
- parts.append(".")
-
- # Spilt the error indication from a helper message (if any)
- parts[-1] += "\n"
-
- # Suggest useful actions to the user:
- # (1) using user site-packages or (2) verifying the permissions
- if error.errno == errno.EACCES:
- user_option_part = "Consider using the `--user` option"
- permissions_part = "Check the permissions"
-
- if not using_user_site:
- parts.extend([
- user_option_part, " or ",
- permissions_part.lower(),
- ])
- else:
- parts.append(permissions_part)
- parts.append(".\n")
-
- return "".join(parts).strip() + "\n"
+from __future__ import absolute_import
+
+import errno
+import logging
+import operator
+import os
+import shutil
+from optparse import SUPPRESS_HELP
+
+from pip._internal import cmdoptions
+from pip._internal.basecommand import RequirementCommand
+from pip._internal.cache import WheelCache
+from pip._internal.exceptions import (
+ CommandError, InstallationError, PreviousBuildDirError,
+)
+from pip._internal.locations import distutils_scheme, virtualenv_no_global
+from pip._internal.operations.check import check_install_conflicts
+from pip._internal.operations.prepare import RequirementPreparer
+from pip._internal.req import RequirementSet, install_given_reqs
+from pip._internal.resolve import Resolver
+from pip._internal.status_codes import ERROR
+from pip._internal.utils.filesystem import check_path_owner
+from pip._internal.utils.misc import ensure_dir, get_installed_version
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.wheel import WheelBuilder
+
+try:
+ import wheel
+except ImportError:
+ wheel = None
+
+
+logger = logging.getLogger(__name__)
+
+
+class InstallCommand(RequirementCommand):
+ """
+ Install packages from:
+
+ - PyPI (and other indexes) using requirement specifiers.
+ - VCS project urls.
+ - Local project directories.
+ - Local or remote source archives.
+
+ pip also supports installing from "requirements files", which provide
+ an easy way to specify a whole environment to be installed.
+ """
+ name = 'install'
+
+ usage = """
+ %prog [options] [package-index-options] ...
+ %prog [options] -r [package-index-options] ...
+ %prog [options] [-e] ...
+ %prog [options] [-e] ...
+ %prog [options] ..."""
+
+ summary = 'Install packages.'
+
+ def __init__(self, *args, **kw):
+ super(InstallCommand, self).__init__(*args, **kw)
+
+ cmd_opts = self.cmd_opts
+
+ cmd_opts.add_option(cmdoptions.requirements())
+ cmd_opts.add_option(cmdoptions.constraints())
+ cmd_opts.add_option(cmdoptions.no_deps())
+ cmd_opts.add_option(cmdoptions.pre())
+
+ cmd_opts.add_option(cmdoptions.editable())
+ cmd_opts.add_option(
+ '-t', '--target',
+ dest='target_dir',
+ metavar='dir',
+ default=None,
+ help='Install packages into . '
+ 'By default this will not replace existing files/folders in '
+ '. Use --upgrade to replace existing packages in '
+ 'with new versions.'
+ )
+ cmd_opts.add_option(
+ '--user',
+ dest='use_user_site',
+ action='store_true',
+ help="Install to the Python user install directory for your "
+ "platform. Typically ~/.local/, or %APPDATA%\\Python on "
+ "Windows. (See the Python documentation for site.USER_BASE "
+ "for full details.)")
+ cmd_opts.add_option(
+ '--no-user',
+ dest='use_user_site',
+ action='store_false',
+ help=SUPPRESS_HELP)
+ cmd_opts.add_option(
+ '--root',
+ dest='root_path',
+ metavar='dir',
+ default=None,
+ help="Install everything relative to this alternate root "
+ "directory.")
+ cmd_opts.add_option(
+ '--prefix',
+ dest='prefix_path',
+ metavar='dir',
+ default=None,
+ help="Installation prefix where lib, bin and other top-level "
+ "folders are placed")
+
+ cmd_opts.add_option(cmdoptions.build_dir())
+
+ cmd_opts.add_option(cmdoptions.src())
+
+ cmd_opts.add_option(
+ '-U', '--upgrade',
+ dest='upgrade',
+ action='store_true',
+ help='Upgrade all specified packages to the newest available '
+ 'version. The handling of dependencies depends on the '
+ 'upgrade-strategy used.'
+ )
+
+ cmd_opts.add_option(
+ '--upgrade-strategy',
+ dest='upgrade_strategy',
+ default='only-if-needed',
+ choices=['only-if-needed', 'eager'],
+ help='Determines how dependency upgrading should be handled '
+ '[default: %default]. '
+ '"eager" - dependencies are upgraded regardless of '
+ 'whether the currently installed version satisfies the '
+ 'requirements of the upgraded package(s). '
+ '"only-if-needed" - are upgraded only when they do not '
+ 'satisfy the requirements of the upgraded package(s).'
+ )
+
+ cmd_opts.add_option(
+ '--force-reinstall',
+ dest='force_reinstall',
+ action='store_true',
+ help='Reinstall all packages even if they are already '
+ 'up-to-date.')
+
+ cmd_opts.add_option(
+ '-I', '--ignore-installed',
+ dest='ignore_installed',
+ action='store_true',
+ help='Ignore the installed packages (reinstalling instead).')
+
+ cmd_opts.add_option(cmdoptions.ignore_requires_python())
+ cmd_opts.add_option(cmdoptions.no_build_isolation())
+
+ cmd_opts.add_option(cmdoptions.install_options())
+ cmd_opts.add_option(cmdoptions.global_options())
+
+ cmd_opts.add_option(
+ "--compile",
+ action="store_true",
+ dest="compile",
+ default=True,
+ help="Compile Python source files to bytecode",
+ )
+
+ cmd_opts.add_option(
+ "--no-compile",
+ action="store_false",
+ dest="compile",
+ help="Do not compile Python source files to bytecode",
+ )
+
+ cmd_opts.add_option(
+ "--no-warn-script-location",
+ action="store_false",
+ dest="warn_script_location",
+ default=True,
+ help="Do not warn when installing scripts outside PATH",
+ )
+ cmd_opts.add_option(
+ "--no-warn-conflicts",
+ action="store_false",
+ dest="warn_about_conflicts",
+ default=True,
+ help="Do not warn about broken dependencies",
+ )
+
+ cmd_opts.add_option(cmdoptions.no_binary())
+ cmd_opts.add_option(cmdoptions.only_binary())
+ cmd_opts.add_option(cmdoptions.no_clean())
+ cmd_opts.add_option(cmdoptions.require_hashes())
+ cmd_opts.add_option(cmdoptions.progress_bar())
+
+ index_opts = cmdoptions.make_option_group(
+ cmdoptions.index_group,
+ self.parser,
+ )
+
+ self.parser.insert_option_group(0, index_opts)
+ self.parser.insert_option_group(0, cmd_opts)
+
+ def run(self, options, args):
+ cmdoptions.check_install_build_global(options)
+
+ upgrade_strategy = "to-satisfy-only"
+ if options.upgrade:
+ upgrade_strategy = options.upgrade_strategy
+
+ if options.build_dir:
+ options.build_dir = os.path.abspath(options.build_dir)
+
+ options.src_dir = os.path.abspath(options.src_dir)
+ install_options = options.install_options or []
+ if options.use_user_site:
+ if options.prefix_path:
+ raise CommandError(
+ "Can not combine '--user' and '--prefix' as they imply "
+ "different installation locations"
+ )
+ if virtualenv_no_global():
+ raise InstallationError(
+ "Can not perform a '--user' install. User site-packages "
+ "are not visible in this virtualenv."
+ )
+ install_options.append('--user')
+ install_options.append('--prefix=')
+
+ target_temp_dir = TempDirectory(kind="target")
+ if options.target_dir:
+ options.ignore_installed = True
+ options.target_dir = os.path.abspath(options.target_dir)
+ if (os.path.exists(options.target_dir) and not
+ os.path.isdir(options.target_dir)):
+ raise CommandError(
+ "Target path exists but is not a directory, will not "
+ "continue."
+ )
+
+ # Create a target directory for using with the target option
+ target_temp_dir.create()
+ install_options.append('--home=' + target_temp_dir.path)
+
+ global_options = options.global_options or []
+
+ with self._build_session(options) as session:
+ finder = self._build_package_finder(options, session)
+ build_delete = (not (options.no_clean or options.build_dir))
+ wheel_cache = WheelCache(options.cache_dir, options.format_control)
+
+ if options.cache_dir and not check_path_owner(options.cache_dir):
+ logger.warning(
+ "The directory '%s' or its parent directory is not owned "
+ "by the current user and caching wheels has been "
+ "disabled. check the permissions and owner of that "
+ "directory. If executing pip with sudo, you may want "
+ "sudo's -H flag.",
+ options.cache_dir,
+ )
+ options.cache_dir = None
+
+ with TempDirectory(
+ options.build_dir, delete=build_delete, kind="install"
+ ) as directory:
+ requirement_set = RequirementSet(
+ require_hashes=options.require_hashes,
+ )
+
+ try:
+ self.populate_requirement_set(
+ requirement_set, args, options, finder, session,
+ self.name, wheel_cache
+ )
+ preparer = RequirementPreparer(
+ build_dir=directory.path,
+ src_dir=options.src_dir,
+ download_dir=None,
+ wheel_download_dir=None,
+ progress_bar=options.progress_bar,
+ build_isolation=options.build_isolation,
+ )
+
+ resolver = Resolver(
+ preparer=preparer,
+ finder=finder,
+ session=session,
+ wheel_cache=wheel_cache,
+ use_user_site=options.use_user_site,
+ upgrade_strategy=upgrade_strategy,
+ force_reinstall=options.force_reinstall,
+ ignore_dependencies=options.ignore_dependencies,
+ ignore_requires_python=options.ignore_requires_python,
+ ignore_installed=options.ignore_installed,
+ isolated=options.isolated_mode,
+ )
+ resolver.resolve(requirement_set)
+
+ # If caching is disabled or wheel is not installed don't
+ # try to build wheels.
+ if wheel and options.cache_dir:
+ # build wheels before install.
+ wb = WheelBuilder(
+ finder, preparer, wheel_cache,
+ build_options=[], global_options=[],
+ )
+ # Ignore the result: a failed wheel will be
+ # installed from the sdist/vcs whatever.
+ wb.build(
+ requirement_set.requirements.values(),
+ session=session, autobuilding=True
+ )
+
+ to_install = resolver.get_installation_order(
+ requirement_set
+ )
+
+ # Consistency Checking of the package set we're installing.
+ should_warn_about_conflicts = (
+ not options.ignore_dependencies and
+ options.warn_about_conflicts
+ )
+ if should_warn_about_conflicts:
+ self._warn_about_conflicts(to_install)
+
+ # Don't warn about script install locations if
+ # --target has been specified
+ warn_script_location = options.warn_script_location
+ if options.target_dir:
+ warn_script_location = False
+
+ installed = install_given_reqs(
+ to_install,
+ install_options,
+ global_options,
+ root=options.root_path,
+ home=target_temp_dir.path,
+ prefix=options.prefix_path,
+ pycompile=options.compile,
+ warn_script_location=warn_script_location,
+ use_user_site=options.use_user_site,
+ )
+
+ possible_lib_locations = get_lib_location_guesses(
+ user=options.use_user_site,
+ home=target_temp_dir.path,
+ root=options.root_path,
+ prefix=options.prefix_path,
+ isolated=options.isolated_mode,
+ )
+ reqs = sorted(installed, key=operator.attrgetter('name'))
+ items = []
+ for req in reqs:
+ item = req.name
+ try:
+ installed_version = get_installed_version(
+ req.name, possible_lib_locations
+ )
+ if installed_version:
+ item += '-' + installed_version
+ except Exception:
+ pass
+ items.append(item)
+ installed = ' '.join(items)
+ if installed:
+ logger.info('Successfully installed %s', installed)
+ except EnvironmentError as error:
+ show_traceback = (self.verbosity >= 1)
+
+ message = create_env_error_message(
+ error, show_traceback, options.use_user_site,
+ )
+ logger.error(message, exc_info=show_traceback)
+
+ return ERROR
+ except PreviousBuildDirError:
+ options.no_clean = True
+ raise
+ finally:
+ # Clean up
+ if not options.no_clean:
+ requirement_set.cleanup_files()
+ wheel_cache.cleanup()
+
+ if options.target_dir:
+ self._handle_target_dir(
+ options.target_dir, target_temp_dir, options.upgrade
+ )
+ return requirement_set
+
+ def _handle_target_dir(self, target_dir, target_temp_dir, upgrade):
+ ensure_dir(target_dir)
+
+ # Checking both purelib and platlib directories for installed
+ # packages to be moved to target directory
+ lib_dir_list = []
+
+ with target_temp_dir:
+ # Checking both purelib and platlib directories for installed
+ # packages to be moved to target directory
+ scheme = distutils_scheme('', home=target_temp_dir.path)
+ purelib_dir = scheme['purelib']
+ platlib_dir = scheme['platlib']
+ data_dir = scheme['data']
+
+ if os.path.exists(purelib_dir):
+ lib_dir_list.append(purelib_dir)
+ if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:
+ lib_dir_list.append(platlib_dir)
+ if os.path.exists(data_dir):
+ lib_dir_list.append(data_dir)
+
+ for lib_dir in lib_dir_list:
+ for item in os.listdir(lib_dir):
+ if lib_dir == data_dir:
+ ddir = os.path.join(data_dir, item)
+ if any(s.startswith(ddir) for s in lib_dir_list[:-1]):
+ continue
+ target_item_dir = os.path.join(target_dir, item)
+ if os.path.exists(target_item_dir):
+ if not upgrade:
+ logger.warning(
+ 'Target directory %s already exists. Specify '
+ '--upgrade to force replacement.',
+ target_item_dir
+ )
+ continue
+ if os.path.islink(target_item_dir):
+ logger.warning(
+ 'Target directory %s already exists and is '
+ 'a link. Pip will not automatically replace '
+ 'links, please remove if replacement is '
+ 'desired.',
+ target_item_dir
+ )
+ continue
+ if os.path.isdir(target_item_dir):
+ shutil.rmtree(target_item_dir)
+ else:
+ os.remove(target_item_dir)
+
+ shutil.move(
+ os.path.join(lib_dir, item),
+ target_item_dir
+ )
+
+ def _warn_about_conflicts(self, to_install):
+ package_set, _dep_info = check_install_conflicts(to_install)
+ missing, conflicting = _dep_info
+
+ # NOTE: There is some duplication here from pip check
+ for project_name in missing:
+ version = package_set[project_name][0]
+ for dependency in missing[project_name]:
+ logger.critical(
+ "%s %s requires %s, which is not installed.",
+ project_name, version, dependency[1],
+ )
+
+ for project_name in conflicting:
+ version = package_set[project_name][0]
+ for dep_name, dep_version, req in conflicting[project_name]:
+ logger.critical(
+ "%s %s has requirement %s, but you'll have %s %s which is "
+ "incompatible.",
+ project_name, version, req, dep_name, dep_version,
+ )
+
+
+def get_lib_location_guesses(*args, **kwargs):
+ scheme = distutils_scheme('', *args, **kwargs)
+ return [scheme['purelib'], scheme['platlib']]
+
+
+def create_env_error_message(error, show_traceback, using_user_site):
+ """Format an error message for an EnvironmentError
+
+ It may occur anytime during the execution of the install command.
+ """
+ parts = []
+
+ # Mention the error if we are not going to show a traceback
+ parts.append("Could not install packages due to an EnvironmentError")
+ if not show_traceback:
+ parts.append(": ")
+ parts.append(str(error))
+ else:
+ parts.append(".")
+
+ # Spilt the error indication from a helper message (if any)
+ parts[-1] += "\n"
+
+ # Suggest useful actions to the user:
+ # (1) using user site-packages or (2) verifying the permissions
+ if error.errno == errno.EACCES:
+ user_option_part = "Consider using the `--user` option"
+ permissions_part = "Check the permissions"
+
+ if not using_user_site:
+ parts.extend([
+ user_option_part, " or ",
+ permissions_part.lower(),
+ ])
+ else:
+ parts.append(permissions_part)
+ parts.append(".\n")
+
+ return "".join(parts).strip() + "\n"
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/list.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/list.py
index 09f633f..1b46c6f 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/list.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/list.py
@@ -1,343 +1,343 @@
-from __future__ import absolute_import
-
-import json
-import logging
-import warnings
-
-from pip._vendor import six
-from pip._vendor.six.moves import zip_longest
-
-from pip._internal.basecommand import Command
-from pip._internal.cmdoptions import index_group, make_option_group
-from pip._internal.exceptions import CommandError
-from pip._internal.index import PackageFinder
-from pip._internal.utils.deprecation import RemovedInPip11Warning
-from pip._internal.utils.misc import (
- dist_is_editable, get_installed_distributions,
-)
-from pip._internal.utils.packaging import get_installer
-
-logger = logging.getLogger(__name__)
-
-
-class ListCommand(Command):
- """
- List installed packages, including editables.
-
- Packages are listed in a case-insensitive sorted order.
- """
- name = 'list'
- usage = """
- %prog [options]"""
- summary = 'List installed packages.'
-
- def __init__(self, *args, **kw):
- super(ListCommand, self).__init__(*args, **kw)
-
- cmd_opts = self.cmd_opts
-
- cmd_opts.add_option(
- '-o', '--outdated',
- action='store_true',
- default=False,
- help='List outdated packages')
- cmd_opts.add_option(
- '-u', '--uptodate',
- action='store_true',
- default=False,
- help='List uptodate packages')
- cmd_opts.add_option(
- '-e', '--editable',
- action='store_true',
- default=False,
- help='List editable projects.')
- cmd_opts.add_option(
- '-l', '--local',
- action='store_true',
- default=False,
- help=('If in a virtualenv that has global access, do not list '
- 'globally-installed packages.'),
- )
- self.cmd_opts.add_option(
- '--user',
- dest='user',
- action='store_true',
- default=False,
- help='Only output packages installed in user-site.')
-
- cmd_opts.add_option(
- '--pre',
- action='store_true',
- default=False,
- help=("Include pre-release and development versions. By default, "
- "pip only finds stable versions."),
- )
-
- cmd_opts.add_option(
- '--format',
- action='store',
- dest='list_format',
- default="columns",
- choices=('legacy', 'columns', 'freeze', 'json'),
- help="Select the output format among: columns (default), freeze, "
- "json, or legacy.",
- )
-
- cmd_opts.add_option(
- '--not-required',
- action='store_true',
- dest='not_required',
- help="List packages that are not dependencies of "
- "installed packages.",
- )
-
- cmd_opts.add_option(
- '--exclude-editable',
- action='store_false',
- dest='include_editable',
- help='Exclude editable package from output.',
- )
- cmd_opts.add_option(
- '--include-editable',
- action='store_true',
- dest='include_editable',
- help='Include editable package from output.',
- default=True,
- )
- index_opts = make_option_group(index_group, self.parser)
-
- self.parser.insert_option_group(0, index_opts)
- self.parser.insert_option_group(0, cmd_opts)
-
- def _build_package_finder(self, options, index_urls, session):
- """
- Create a package finder appropriate to this list command.
- """
- return PackageFinder(
- find_links=options.find_links,
- index_urls=index_urls,
- allow_all_prereleases=options.pre,
- trusted_hosts=options.trusted_hosts,
- process_dependency_links=options.process_dependency_links,
- session=session,
- )
-
- def run(self, options, args):
- if options.list_format == "legacy":
- warnings.warn(
- "The legacy format has been deprecated and will be removed "
- "in the future.",
- RemovedInPip11Warning,
- )
-
- if options.outdated and options.uptodate:
- raise CommandError(
- "Options --outdated and --uptodate cannot be combined.")
-
- packages = get_installed_distributions(
- local_only=options.local,
- user_only=options.user,
- editables_only=options.editable,
- include_editables=options.include_editable,
- )
-
- if options.outdated:
- packages = self.get_outdated(packages, options)
- elif options.uptodate:
- packages = self.get_uptodate(packages, options)
-
- if options.not_required:
- packages = self.get_not_required(packages, options)
-
- self.output_package_listing(packages, options)
-
- def get_outdated(self, packages, options):
- return [
- dist for dist in self.iter_packages_latest_infos(packages, options)
- if dist.latest_version > dist.parsed_version
- ]
-
- def get_uptodate(self, packages, options):
- return [
- dist for dist in self.iter_packages_latest_infos(packages, options)
- if dist.latest_version == dist.parsed_version
- ]
-
- def get_not_required(self, packages, options):
- dep_keys = set()
- for dist in packages:
- dep_keys.update(requirement.key for requirement in dist.requires())
- return {pkg for pkg in packages if pkg.key not in dep_keys}
-
- def iter_packages_latest_infos(self, packages, options):
- index_urls = [options.index_url] + options.extra_index_urls
- if options.no_index:
- logger.debug('Ignoring indexes: %s', ','.join(index_urls))
- index_urls = []
-
- dependency_links = []
- for dist in packages:
- if dist.has_metadata('dependency_links.txt'):
- dependency_links.extend(
- dist.get_metadata_lines('dependency_links.txt'),
- )
-
- with self._build_session(options) as session:
- finder = self._build_package_finder(options, index_urls, session)
- finder.add_dependency_links(dependency_links)
-
- for dist in packages:
- typ = 'unknown'
- all_candidates = finder.find_all_candidates(dist.key)
- if not options.pre:
- # Remove prereleases
- all_candidates = [candidate for candidate in all_candidates
- if not candidate.version.is_prerelease]
-
- if not all_candidates:
- continue
- best_candidate = max(all_candidates,
- key=finder._candidate_sort_key)
- remote_version = best_candidate.version
- if best_candidate.location.is_wheel:
- typ = 'wheel'
- else:
- typ = 'sdist'
- # This is dirty but makes the rest of the code much cleaner
- dist.latest_version = remote_version
- dist.latest_filetype = typ
- yield dist
-
- def output_legacy(self, dist, options):
- if options.verbose >= 1:
- return '%s (%s, %s, %s)' % (
- dist.project_name,
- dist.version,
- dist.location,
- get_installer(dist),
- )
- elif dist_is_editable(dist):
- return '%s (%s, %s)' % (
- dist.project_name,
- dist.version,
- dist.location,
- )
- else:
- return '%s (%s)' % (dist.project_name, dist.version)
-
- def output_legacy_latest(self, dist, options):
- return '%s - Latest: %s [%s]' % (
- self.output_legacy(dist, options),
- dist.latest_version,
- dist.latest_filetype,
- )
-
- def output_package_listing(self, packages, options):
- packages = sorted(
- packages,
- key=lambda dist: dist.project_name.lower(),
- )
- if options.list_format == 'columns' and packages:
- data, header = format_for_columns(packages, options)
- self.output_package_listing_columns(data, header)
- elif options.list_format == 'freeze':
- for dist in packages:
- if options.verbose >= 1:
- logger.info("%s==%s (%s)", dist.project_name,
- dist.version, dist.location)
- else:
- logger.info("%s==%s", dist.project_name, dist.version)
- elif options.list_format == 'json':
- logger.info(format_for_json(packages, options))
- elif options.list_format == "legacy":
- for dist in packages:
- if options.outdated:
- logger.info(self.output_legacy_latest(dist, options))
- else:
- logger.info(self.output_legacy(dist, options))
-
- def output_package_listing_columns(self, data, header):
- # insert the header first: we need to know the size of column names
- if len(data) > 0:
- data.insert(0, header)
-
- pkg_strings, sizes = tabulate(data)
-
- # Create and add a separator.
- if len(data) > 0:
- pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes)))
-
- for val in pkg_strings:
- logger.info(val)
-
-
-def tabulate(vals):
- # From pfmoore on GitHub:
- # https://github.com/pypa/pip/issues/3651#issuecomment-216932564
- assert len(vals) > 0
-
- sizes = [0] * max(len(x) for x in vals)
- for row in vals:
- sizes = [max(s, len(str(c))) for s, c in zip_longest(sizes, row)]
-
- result = []
- for row in vals:
- display = " ".join([str(c).ljust(s) if c is not None else ''
- for s, c in zip_longest(sizes, row)])
- result.append(display)
-
- return result, sizes
-
-
-def format_for_columns(pkgs, options):
- """
- Convert the package data into something usable
- by output_package_listing_columns.
- """
- running_outdated = options.outdated
- # Adjust the header for the `pip list --outdated` case.
- if running_outdated:
- header = ["Package", "Version", "Latest", "Type"]
- else:
- header = ["Package", "Version"]
-
- data = []
- if options.verbose >= 1 or any(dist_is_editable(x) for x in pkgs):
- header.append("Location")
- if options.verbose >= 1:
- header.append("Installer")
-
- for proj in pkgs:
- # if we're working on the 'outdated' list, separate out the
- # latest_version and type
- row = [proj.project_name, proj.version]
-
- if running_outdated:
- row.append(proj.latest_version)
- row.append(proj.latest_filetype)
-
- if options.verbose >= 1 or dist_is_editable(proj):
- row.append(proj.location)
- if options.verbose >= 1:
- row.append(get_installer(proj))
-
- data.append(row)
-
- return data, header
-
-
-def format_for_json(packages, options):
- data = []
- for dist in packages:
- info = {
- 'name': dist.project_name,
- 'version': six.text_type(dist.version),
- }
- if options.verbose >= 1:
- info['location'] = dist.location
- info['installer'] = get_installer(dist)
- if options.outdated:
- info['latest_version'] = six.text_type(dist.latest_version)
- info['latest_filetype'] = dist.latest_filetype
- data.append(info)
- return json.dumps(data)
+from __future__ import absolute_import
+
+import json
+import logging
+import warnings
+
+from pip._vendor import six
+from pip._vendor.six.moves import zip_longest
+
+from pip._internal.basecommand import Command
+from pip._internal.cmdoptions import index_group, make_option_group
+from pip._internal.exceptions import CommandError
+from pip._internal.index import PackageFinder
+from pip._internal.utils.deprecation import RemovedInPip11Warning
+from pip._internal.utils.misc import (
+ dist_is_editable, get_installed_distributions,
+)
+from pip._internal.utils.packaging import get_installer
+
+logger = logging.getLogger(__name__)
+
+
+class ListCommand(Command):
+ """
+ List installed packages, including editables.
+
+ Packages are listed in a case-insensitive sorted order.
+ """
+ name = 'list'
+ usage = """
+ %prog [options]"""
+ summary = 'List installed packages.'
+
+ def __init__(self, *args, **kw):
+ super(ListCommand, self).__init__(*args, **kw)
+
+ cmd_opts = self.cmd_opts
+
+ cmd_opts.add_option(
+ '-o', '--outdated',
+ action='store_true',
+ default=False,
+ help='List outdated packages')
+ cmd_opts.add_option(
+ '-u', '--uptodate',
+ action='store_true',
+ default=False,
+ help='List uptodate packages')
+ cmd_opts.add_option(
+ '-e', '--editable',
+ action='store_true',
+ default=False,
+ help='List editable projects.')
+ cmd_opts.add_option(
+ '-l', '--local',
+ action='store_true',
+ default=False,
+ help=('If in a virtualenv that has global access, do not list '
+ 'globally-installed packages.'),
+ )
+ self.cmd_opts.add_option(
+ '--user',
+ dest='user',
+ action='store_true',
+ default=False,
+ help='Only output packages installed in user-site.')
+
+ cmd_opts.add_option(
+ '--pre',
+ action='store_true',
+ default=False,
+ help=("Include pre-release and development versions. By default, "
+ "pip only finds stable versions."),
+ )
+
+ cmd_opts.add_option(
+ '--format',
+ action='store',
+ dest='list_format',
+ default="columns",
+ choices=('legacy', 'columns', 'freeze', 'json'),
+ help="Select the output format among: columns (default), freeze, "
+ "json, or legacy.",
+ )
+
+ cmd_opts.add_option(
+ '--not-required',
+ action='store_true',
+ dest='not_required',
+ help="List packages that are not dependencies of "
+ "installed packages.",
+ )
+
+ cmd_opts.add_option(
+ '--exclude-editable',
+ action='store_false',
+ dest='include_editable',
+ help='Exclude editable package from output.',
+ )
+ cmd_opts.add_option(
+ '--include-editable',
+ action='store_true',
+ dest='include_editable',
+ help='Include editable package from output.',
+ default=True,
+ )
+ index_opts = make_option_group(index_group, self.parser)
+
+ self.parser.insert_option_group(0, index_opts)
+ self.parser.insert_option_group(0, cmd_opts)
+
+ def _build_package_finder(self, options, index_urls, session):
+ """
+ Create a package finder appropriate to this list command.
+ """
+ return PackageFinder(
+ find_links=options.find_links,
+ index_urls=index_urls,
+ allow_all_prereleases=options.pre,
+ trusted_hosts=options.trusted_hosts,
+ process_dependency_links=options.process_dependency_links,
+ session=session,
+ )
+
+ def run(self, options, args):
+ if options.list_format == "legacy":
+ warnings.warn(
+ "The legacy format has been deprecated and will be removed "
+ "in the future.",
+ RemovedInPip11Warning,
+ )
+
+ if options.outdated and options.uptodate:
+ raise CommandError(
+ "Options --outdated and --uptodate cannot be combined.")
+
+ packages = get_installed_distributions(
+ local_only=options.local,
+ user_only=options.user,
+ editables_only=options.editable,
+ include_editables=options.include_editable,
+ )
+
+ if options.outdated:
+ packages = self.get_outdated(packages, options)
+ elif options.uptodate:
+ packages = self.get_uptodate(packages, options)
+
+ if options.not_required:
+ packages = self.get_not_required(packages, options)
+
+ self.output_package_listing(packages, options)
+
+ def get_outdated(self, packages, options):
+ return [
+ dist for dist in self.iter_packages_latest_infos(packages, options)
+ if dist.latest_version > dist.parsed_version
+ ]
+
+ def get_uptodate(self, packages, options):
+ return [
+ dist for dist in self.iter_packages_latest_infos(packages, options)
+ if dist.latest_version == dist.parsed_version
+ ]
+
+ def get_not_required(self, packages, options):
+ dep_keys = set()
+ for dist in packages:
+ dep_keys.update(requirement.key for requirement in dist.requires())
+ return {pkg for pkg in packages if pkg.key not in dep_keys}
+
+ def iter_packages_latest_infos(self, packages, options):
+ index_urls = [options.index_url] + options.extra_index_urls
+ if options.no_index:
+ logger.debug('Ignoring indexes: %s', ','.join(index_urls))
+ index_urls = []
+
+ dependency_links = []
+ for dist in packages:
+ if dist.has_metadata('dependency_links.txt'):
+ dependency_links.extend(
+ dist.get_metadata_lines('dependency_links.txt'),
+ )
+
+ with self._build_session(options) as session:
+ finder = self._build_package_finder(options, index_urls, session)
+ finder.add_dependency_links(dependency_links)
+
+ for dist in packages:
+ typ = 'unknown'
+ all_candidates = finder.find_all_candidates(dist.key)
+ if not options.pre:
+ # Remove prereleases
+ all_candidates = [candidate for candidate in all_candidates
+ if not candidate.version.is_prerelease]
+
+ if not all_candidates:
+ continue
+ best_candidate = max(all_candidates,
+ key=finder._candidate_sort_key)
+ remote_version = best_candidate.version
+ if best_candidate.location.is_wheel:
+ typ = 'wheel'
+ else:
+ typ = 'sdist'
+ # This is dirty but makes the rest of the code much cleaner
+ dist.latest_version = remote_version
+ dist.latest_filetype = typ
+ yield dist
+
+ def output_legacy(self, dist, options):
+ if options.verbose >= 1:
+ return '%s (%s, %s, %s)' % (
+ dist.project_name,
+ dist.version,
+ dist.location,
+ get_installer(dist),
+ )
+ elif dist_is_editable(dist):
+ return '%s (%s, %s)' % (
+ dist.project_name,
+ dist.version,
+ dist.location,
+ )
+ else:
+ return '%s (%s)' % (dist.project_name, dist.version)
+
+ def output_legacy_latest(self, dist, options):
+ return '%s - Latest: %s [%s]' % (
+ self.output_legacy(dist, options),
+ dist.latest_version,
+ dist.latest_filetype,
+ )
+
+ def output_package_listing(self, packages, options):
+ packages = sorted(
+ packages,
+ key=lambda dist: dist.project_name.lower(),
+ )
+ if options.list_format == 'columns' and packages:
+ data, header = format_for_columns(packages, options)
+ self.output_package_listing_columns(data, header)
+ elif options.list_format == 'freeze':
+ for dist in packages:
+ if options.verbose >= 1:
+ logger.info("%s==%s (%s)", dist.project_name,
+ dist.version, dist.location)
+ else:
+ logger.info("%s==%s", dist.project_name, dist.version)
+ elif options.list_format == 'json':
+ logger.info(format_for_json(packages, options))
+ elif options.list_format == "legacy":
+ for dist in packages:
+ if options.outdated:
+ logger.info(self.output_legacy_latest(dist, options))
+ else:
+ logger.info(self.output_legacy(dist, options))
+
+ def output_package_listing_columns(self, data, header):
+ # insert the header first: we need to know the size of column names
+ if len(data) > 0:
+ data.insert(0, header)
+
+ pkg_strings, sizes = tabulate(data)
+
+ # Create and add a separator.
+ if len(data) > 0:
+ pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes)))
+
+ for val in pkg_strings:
+ logger.info(val)
+
+
+def tabulate(vals):
+ # From pfmoore on GitHub:
+ # https://github.com/pypa/pip/issues/3651#issuecomment-216932564
+ assert len(vals) > 0
+
+ sizes = [0] * max(len(x) for x in vals)
+ for row in vals:
+ sizes = [max(s, len(str(c))) for s, c in zip_longest(sizes, row)]
+
+ result = []
+ for row in vals:
+ display = " ".join([str(c).ljust(s) if c is not None else ''
+ for s, c in zip_longest(sizes, row)])
+ result.append(display)
+
+ return result, sizes
+
+
+def format_for_columns(pkgs, options):
+ """
+ Convert the package data into something usable
+ by output_package_listing_columns.
+ """
+ running_outdated = options.outdated
+ # Adjust the header for the `pip list --outdated` case.
+ if running_outdated:
+ header = ["Package", "Version", "Latest", "Type"]
+ else:
+ header = ["Package", "Version"]
+
+ data = []
+ if options.verbose >= 1 or any(dist_is_editable(x) for x in pkgs):
+ header.append("Location")
+ if options.verbose >= 1:
+ header.append("Installer")
+
+ for proj in pkgs:
+ # if we're working on the 'outdated' list, separate out the
+ # latest_version and type
+ row = [proj.project_name, proj.version]
+
+ if running_outdated:
+ row.append(proj.latest_version)
+ row.append(proj.latest_filetype)
+
+ if options.verbose >= 1 or dist_is_editable(proj):
+ row.append(proj.location)
+ if options.verbose >= 1:
+ row.append(get_installer(proj))
+
+ data.append(row)
+
+ return data, header
+
+
+def format_for_json(packages, options):
+ data = []
+ for dist in packages:
+ info = {
+ 'name': dist.project_name,
+ 'version': six.text_type(dist.version),
+ }
+ if options.verbose >= 1:
+ info['location'] = dist.location
+ info['installer'] = get_installer(dist)
+ if options.outdated:
+ info['latest_version'] = six.text_type(dist.latest_version)
+ info['latest_filetype'] = dist.latest_filetype
+ data.append(info)
+ return json.dumps(data)
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/search.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/search.py
index 3abdf59..83895ce 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/search.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/search.py
@@ -1,135 +1,135 @@
-from __future__ import absolute_import
-
-import logging
-import sys
-import textwrap
-from collections import OrderedDict
-
-from pip._vendor import pkg_resources
-from pip._vendor.packaging.version import parse as parse_version
-# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
-# why we ignore the type on this import
-from pip._vendor.six.moves import xmlrpc_client # type: ignore
-
-from pip._internal.basecommand import SUCCESS, Command
-from pip._internal.compat import get_terminal_size
-from pip._internal.download import PipXmlrpcTransport
-from pip._internal.exceptions import CommandError
-from pip._internal.models import PyPI
-from pip._internal.status_codes import NO_MATCHES_FOUND
-from pip._internal.utils.logging import indent_log
-
-logger = logging.getLogger(__name__)
-
-
-class SearchCommand(Command):
- """Search for PyPI packages whose name or summary contains ."""
- name = 'search'
- usage = """
- %prog [options] """
- summary = 'Search PyPI for packages.'
- ignore_require_venv = True
-
- def __init__(self, *args, **kw):
- super(SearchCommand, self).__init__(*args, **kw)
- self.cmd_opts.add_option(
- '-i', '--index',
- dest='index',
- metavar='URL',
- default=PyPI.pypi_url,
- help='Base URL of Python Package Index (default %default)')
-
- self.parser.insert_option_group(0, self.cmd_opts)
-
- def run(self, options, args):
- if not args:
- raise CommandError('Missing required argument (search query).')
- query = args
- pypi_hits = self.search(query, options)
- hits = transform_hits(pypi_hits)
-
- terminal_width = None
- if sys.stdout.isatty():
- terminal_width = get_terminal_size()[0]
-
- print_results(hits, terminal_width=terminal_width)
- if pypi_hits:
- return SUCCESS
- return NO_MATCHES_FOUND
-
- def search(self, query, options):
- index_url = options.index
- with self._build_session(options) as session:
- transport = PipXmlrpcTransport(index_url, session)
- pypi = xmlrpc_client.ServerProxy(index_url, transport)
- hits = pypi.search({'name': query, 'summary': query}, 'or')
- return hits
-
-
-def transform_hits(hits):
- """
- The list from pypi is really a list of versions. We want a list of
- packages with the list of versions stored inline. This converts the
- list from pypi into one we can use.
- """
- packages = OrderedDict()
- for hit in hits:
- name = hit['name']
- summary = hit['summary']
- version = hit['version']
-
- if name not in packages.keys():
- packages[name] = {
- 'name': name,
- 'summary': summary,
- 'versions': [version],
- }
- else:
- packages[name]['versions'].append(version)
-
- # if this is the highest version, replace summary and score
- if version == highest_version(packages[name]['versions']):
- packages[name]['summary'] = summary
-
- return list(packages.values())
-
-
-def print_results(hits, name_column_width=None, terminal_width=None):
- if not hits:
- return
- if name_column_width is None:
- name_column_width = max([
- len(hit['name']) + len(highest_version(hit.get('versions', ['-'])))
- for hit in hits
- ]) + 4
-
- installed_packages = [p.project_name for p in pkg_resources.working_set]
- for hit in hits:
- name = hit['name']
- summary = hit['summary'] or ''
- latest = highest_version(hit.get('versions', ['-']))
- if terminal_width is not None:
- target_width = terminal_width - name_column_width - 5
- if target_width > 10:
- # wrap and indent summary to fit terminal
- summary = textwrap.wrap(summary, target_width)
- summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)
-
- line = '%-*s - %s' % (name_column_width,
- '%s (%s)' % (name, latest), summary)
- try:
- logger.info(line)
- if name in installed_packages:
- dist = pkg_resources.get_distribution(name)
- with indent_log():
- if dist.version == latest:
- logger.info('INSTALLED: %s (latest)', dist.version)
- else:
- logger.info('INSTALLED: %s', dist.version)
- logger.info('LATEST: %s', latest)
- except UnicodeEncodeError:
- pass
-
-
-def highest_version(versions):
- return max(versions, key=parse_version)
+from __future__ import absolute_import
+
+import logging
+import sys
+import textwrap
+from collections import OrderedDict
+
+from pip._vendor import pkg_resources
+from pip._vendor.packaging.version import parse as parse_version
+# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
+# why we ignore the type on this import
+from pip._vendor.six.moves import xmlrpc_client # type: ignore
+
+from pip._internal.basecommand import SUCCESS, Command
+from pip._internal.compat import get_terminal_size
+from pip._internal.download import PipXmlrpcTransport
+from pip._internal.exceptions import CommandError
+from pip._internal.models import PyPI
+from pip._internal.status_codes import NO_MATCHES_FOUND
+from pip._internal.utils.logging import indent_log
+
+logger = logging.getLogger(__name__)
+
+
+class SearchCommand(Command):
+ """Search for PyPI packages whose name or summary contains ."""
+ name = 'search'
+ usage = """
+ %prog [options] """
+ summary = 'Search PyPI for packages.'
+ ignore_require_venv = True
+
+ def __init__(self, *args, **kw):
+ super(SearchCommand, self).__init__(*args, **kw)
+ self.cmd_opts.add_option(
+ '-i', '--index',
+ dest='index',
+ metavar='URL',
+ default=PyPI.pypi_url,
+ help='Base URL of Python Package Index (default %default)')
+
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options, args):
+ if not args:
+ raise CommandError('Missing required argument (search query).')
+ query = args
+ pypi_hits = self.search(query, options)
+ hits = transform_hits(pypi_hits)
+
+ terminal_width = None
+ if sys.stdout.isatty():
+ terminal_width = get_terminal_size()[0]
+
+ print_results(hits, terminal_width=terminal_width)
+ if pypi_hits:
+ return SUCCESS
+ return NO_MATCHES_FOUND
+
+ def search(self, query, options):
+ index_url = options.index
+ with self._build_session(options) as session:
+ transport = PipXmlrpcTransport(index_url, session)
+ pypi = xmlrpc_client.ServerProxy(index_url, transport)
+ hits = pypi.search({'name': query, 'summary': query}, 'or')
+ return hits
+
+
+def transform_hits(hits):
+ """
+ The list from pypi is really a list of versions. We want a list of
+ packages with the list of versions stored inline. This converts the
+ list from pypi into one we can use.
+ """
+ packages = OrderedDict()
+ for hit in hits:
+ name = hit['name']
+ summary = hit['summary']
+ version = hit['version']
+
+ if name not in packages.keys():
+ packages[name] = {
+ 'name': name,
+ 'summary': summary,
+ 'versions': [version],
+ }
+ else:
+ packages[name]['versions'].append(version)
+
+ # if this is the highest version, replace summary and score
+ if version == highest_version(packages[name]['versions']):
+ packages[name]['summary'] = summary
+
+ return list(packages.values())
+
+
+def print_results(hits, name_column_width=None, terminal_width=None):
+ if not hits:
+ return
+ if name_column_width is None:
+ name_column_width = max([
+ len(hit['name']) + len(highest_version(hit.get('versions', ['-'])))
+ for hit in hits
+ ]) + 4
+
+ installed_packages = [p.project_name for p in pkg_resources.working_set]
+ for hit in hits:
+ name = hit['name']
+ summary = hit['summary'] or ''
+ latest = highest_version(hit.get('versions', ['-']))
+ if terminal_width is not None:
+ target_width = terminal_width - name_column_width - 5
+ if target_width > 10:
+ # wrap and indent summary to fit terminal
+ summary = textwrap.wrap(summary, target_width)
+ summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)
+
+ line = '%-*s - %s' % (name_column_width,
+ '%s (%s)' % (name, latest), summary)
+ try:
+ logger.info(line)
+ if name in installed_packages:
+ dist = pkg_resources.get_distribution(name)
+ with indent_log():
+ if dist.version == latest:
+ logger.info('INSTALLED: %s (latest)', dist.version)
+ else:
+ logger.info('INSTALLED: %s', dist.version)
+ logger.info('LATEST: %s', latest)
+ except UnicodeEncodeError:
+ pass
+
+
+def highest_version(versions):
+ return max(versions, key=parse_version)
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/show.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/show.py
index 1a8d968..bad9628 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/show.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/show.py
@@ -1,164 +1,164 @@
-from __future__ import absolute_import
-
-import logging
-import os
-from email.parser import FeedParser # type: ignore
-
-from pip._vendor import pkg_resources
-from pip._vendor.packaging.utils import canonicalize_name
-
-from pip._internal.basecommand import Command
-from pip._internal.status_codes import ERROR, SUCCESS
-
-logger = logging.getLogger(__name__)
-
-
-class ShowCommand(Command):
- """Show information about one or more installed packages."""
- name = 'show'
- usage = """
- %prog [options] ..."""
- summary = 'Show information about installed packages.'
- ignore_require_venv = True
-
- def __init__(self, *args, **kw):
- super(ShowCommand, self).__init__(*args, **kw)
- self.cmd_opts.add_option(
- '-f', '--files',
- dest='files',
- action='store_true',
- default=False,
- help='Show the full list of installed files for each package.')
-
- self.parser.insert_option_group(0, self.cmd_opts)
-
- def run(self, options, args):
- if not args:
- logger.warning('ERROR: Please provide a package name or names.')
- return ERROR
- query = args
-
- results = search_packages_info(query)
- if not print_results(
- results, list_files=options.files, verbose=options.verbose):
- return ERROR
- return SUCCESS
-
-
-def search_packages_info(query):
- """
- Gather details from installed distributions. Print distribution name,
- version, location, and installed files. Installed files requires a
- pip generated 'installed-files.txt' in the distributions '.egg-info'
- directory.
- """
- installed = {}
- for p in pkg_resources.working_set:
- installed[canonicalize_name(p.project_name)] = p
-
- query_names = [canonicalize_name(name) for name in query]
-
- for dist in [installed[pkg] for pkg in query_names if pkg in installed]:
- package = {
- 'name': dist.project_name,
- 'version': dist.version,
- 'location': dist.location,
- 'requires': [dep.project_name for dep in dist.requires()],
- }
- file_list = None
- metadata = None
- if isinstance(dist, pkg_resources.DistInfoDistribution):
- # RECORDs should be part of .dist-info metadatas
- if dist.has_metadata('RECORD'):
- lines = dist.get_metadata_lines('RECORD')
- paths = [l.split(',')[0] for l in lines]
- paths = [os.path.join(dist.location, p) for p in paths]
- file_list = [os.path.relpath(p, dist.location) for p in paths]
-
- if dist.has_metadata('METADATA'):
- metadata = dist.get_metadata('METADATA')
- else:
- # Otherwise use pip's log for .egg-info's
- if dist.has_metadata('installed-files.txt'):
- paths = dist.get_metadata_lines('installed-files.txt')
- paths = [os.path.join(dist.egg_info, p) for p in paths]
- file_list = [os.path.relpath(p, dist.location) for p in paths]
-
- if dist.has_metadata('PKG-INFO'):
- metadata = dist.get_metadata('PKG-INFO')
-
- if dist.has_metadata('entry_points.txt'):
- entry_points = dist.get_metadata_lines('entry_points.txt')
- package['entry_points'] = entry_points
-
- if dist.has_metadata('INSTALLER'):
- for line in dist.get_metadata_lines('INSTALLER'):
- if line.strip():
- package['installer'] = line.strip()
- break
-
- # @todo: Should pkg_resources.Distribution have a
- # `get_pkg_info` method?
- feed_parser = FeedParser()
- feed_parser.feed(metadata)
- pkg_info_dict = feed_parser.close()
- for key in ('metadata-version', 'summary',
- 'home-page', 'author', 'author-email', 'license'):
- package[key] = pkg_info_dict.get(key)
-
- # It looks like FeedParser cannot deal with repeated headers
- classifiers = []
- for line in metadata.splitlines():
- if line.startswith('Classifier: '):
- classifiers.append(line[len('Classifier: '):])
- package['classifiers'] = classifiers
-
- if file_list:
- package['files'] = sorted(file_list)
- yield package
-
-
-def print_results(distributions, list_files=False, verbose=False):
- """
- Print the informations from installed distributions found.
- """
- results_printed = False
- for i, dist in enumerate(distributions):
- results_printed = True
- if i > 0:
- logger.info("---")
-
- name = dist.get('name', '')
- required_by = [
- pkg.project_name for pkg in pkg_resources.working_set
- if name in [required.name for required in pkg.requires()]
- ]
-
- logger.info("Name: %s", name)
- logger.info("Version: %s", dist.get('version', ''))
- logger.info("Summary: %s", dist.get('summary', ''))
- logger.info("Home-page: %s", dist.get('home-page', ''))
- logger.info("Author: %s", dist.get('author', ''))
- logger.info("Author-email: %s", dist.get('author-email', ''))
- logger.info("License: %s", dist.get('license', ''))
- logger.info("Location: %s", dist.get('location', ''))
- logger.info("Requires: %s", ', '.join(dist.get('requires', [])))
- logger.info("Required-by: %s", ', '.join(required_by))
-
- if verbose:
- logger.info("Metadata-Version: %s",
- dist.get('metadata-version', ''))
- logger.info("Installer: %s", dist.get('installer', ''))
- logger.info("Classifiers:")
- for classifier in dist.get('classifiers', []):
- logger.info(" %s", classifier)
- logger.info("Entry-points:")
- for entry in dist.get('entry_points', []):
- logger.info(" %s", entry.strip())
- if list_files:
- logger.info("Files:")
- for line in dist.get('files', []):
- logger.info(" %s", line.strip())
- if "files" not in dist:
- logger.info("Cannot locate installed-files.txt")
- return results_printed
+from __future__ import absolute_import
+
+import logging
+import os
+from email.parser import FeedParser # type: ignore
+
+from pip._vendor import pkg_resources
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.basecommand import Command
+from pip._internal.status_codes import ERROR, SUCCESS
+
+logger = logging.getLogger(__name__)
+
+
+class ShowCommand(Command):
+ """Show information about one or more installed packages."""
+ name = 'show'
+ usage = """
+ %prog [options] ..."""
+ summary = 'Show information about installed packages.'
+ ignore_require_venv = True
+
+ def __init__(self, *args, **kw):
+ super(ShowCommand, self).__init__(*args, **kw)
+ self.cmd_opts.add_option(
+ '-f', '--files',
+ dest='files',
+ action='store_true',
+ default=False,
+ help='Show the full list of installed files for each package.')
+
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options, args):
+ if not args:
+ logger.warning('ERROR: Please provide a package name or names.')
+ return ERROR
+ query = args
+
+ results = search_packages_info(query)
+ if not print_results(
+ results, list_files=options.files, verbose=options.verbose):
+ return ERROR
+ return SUCCESS
+
+
+def search_packages_info(query):
+ """
+ Gather details from installed distributions. Print distribution name,
+ version, location, and installed files. Installed files requires a
+ pip generated 'installed-files.txt' in the distributions '.egg-info'
+ directory.
+ """
+ installed = {}
+ for p in pkg_resources.working_set:
+ installed[canonicalize_name(p.project_name)] = p
+
+ query_names = [canonicalize_name(name) for name in query]
+
+ for dist in [installed[pkg] for pkg in query_names if pkg in installed]:
+ package = {
+ 'name': dist.project_name,
+ 'version': dist.version,
+ 'location': dist.location,
+ 'requires': [dep.project_name for dep in dist.requires()],
+ }
+ file_list = None
+ metadata = None
+ if isinstance(dist, pkg_resources.DistInfoDistribution):
+ # RECORDs should be part of .dist-info metadatas
+ if dist.has_metadata('RECORD'):
+ lines = dist.get_metadata_lines('RECORD')
+ paths = [l.split(',')[0] for l in lines]
+ paths = [os.path.join(dist.location, p) for p in paths]
+ file_list = [os.path.relpath(p, dist.location) for p in paths]
+
+ if dist.has_metadata('METADATA'):
+ metadata = dist.get_metadata('METADATA')
+ else:
+ # Otherwise use pip's log for .egg-info's
+ if dist.has_metadata('installed-files.txt'):
+ paths = dist.get_metadata_lines('installed-files.txt')
+ paths = [os.path.join(dist.egg_info, p) for p in paths]
+ file_list = [os.path.relpath(p, dist.location) for p in paths]
+
+ if dist.has_metadata('PKG-INFO'):
+ metadata = dist.get_metadata('PKG-INFO')
+
+ if dist.has_metadata('entry_points.txt'):
+ entry_points = dist.get_metadata_lines('entry_points.txt')
+ package['entry_points'] = entry_points
+
+ if dist.has_metadata('INSTALLER'):
+ for line in dist.get_metadata_lines('INSTALLER'):
+ if line.strip():
+ package['installer'] = line.strip()
+ break
+
+ # @todo: Should pkg_resources.Distribution have a
+ # `get_pkg_info` method?
+ feed_parser = FeedParser()
+ feed_parser.feed(metadata)
+ pkg_info_dict = feed_parser.close()
+ for key in ('metadata-version', 'summary',
+ 'home-page', 'author', 'author-email', 'license'):
+ package[key] = pkg_info_dict.get(key)
+
+ # It looks like FeedParser cannot deal with repeated headers
+ classifiers = []
+ for line in metadata.splitlines():
+ if line.startswith('Classifier: '):
+ classifiers.append(line[len('Classifier: '):])
+ package['classifiers'] = classifiers
+
+ if file_list:
+ package['files'] = sorted(file_list)
+ yield package
+
+
+def print_results(distributions, list_files=False, verbose=False):
+ """
+ Print the informations from installed distributions found.
+ """
+ results_printed = False
+ for i, dist in enumerate(distributions):
+ results_printed = True
+ if i > 0:
+ logger.info("---")
+
+ name = dist.get('name', '')
+ required_by = [
+ pkg.project_name for pkg in pkg_resources.working_set
+ if name in [required.name for required in pkg.requires()]
+ ]
+
+ logger.info("Name: %s", name)
+ logger.info("Version: %s", dist.get('version', ''))
+ logger.info("Summary: %s", dist.get('summary', ''))
+ logger.info("Home-page: %s", dist.get('home-page', ''))
+ logger.info("Author: %s", dist.get('author', ''))
+ logger.info("Author-email: %s", dist.get('author-email', ''))
+ logger.info("License: %s", dist.get('license', ''))
+ logger.info("Location: %s", dist.get('location', ''))
+ logger.info("Requires: %s", ', '.join(dist.get('requires', [])))
+ logger.info("Required-by: %s", ', '.join(required_by))
+
+ if verbose:
+ logger.info("Metadata-Version: %s",
+ dist.get('metadata-version', ''))
+ logger.info("Installer: %s", dist.get('installer', ''))
+ logger.info("Classifiers:")
+ for classifier in dist.get('classifiers', []):
+ logger.info(" %s", classifier)
+ logger.info("Entry-points:")
+ for entry in dist.get('entry_points', []):
+ logger.info(" %s", entry.strip())
+ if list_files:
+ logger.info("Files:")
+ for line in dist.get('files', []):
+ logger.info(" %s", line.strip())
+ if "files" not in dist:
+ logger.info("Cannot locate installed-files.txt")
+ return results_printed
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/uninstall.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/uninstall.py
index 7476fa6..3bfa07f 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/uninstall.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/uninstall.py
@@ -1,71 +1,71 @@
-from __future__ import absolute_import
-
-from pip._vendor.packaging.utils import canonicalize_name
-
-from pip._internal.basecommand import Command
-from pip._internal.exceptions import InstallationError
-from pip._internal.req import InstallRequirement, parse_requirements
-
-
-class UninstallCommand(Command):
- """
- Uninstall packages.
-
- pip is able to uninstall most installed packages. Known exceptions are:
-
- - Pure distutils packages installed with ``python setup.py install``, which
- leave behind no metadata to determine what files were installed.
- - Script wrappers installed by ``python setup.py develop``.
- """
- name = 'uninstall'
- usage = """
- %prog [options] ...
- %prog [options] -r ..."""
- summary = 'Uninstall packages.'
-
- def __init__(self, *args, **kw):
- super(UninstallCommand, self).__init__(*args, **kw)
- self.cmd_opts.add_option(
- '-r', '--requirement',
- dest='requirements',
- action='append',
- default=[],
- metavar='file',
- help='Uninstall all the packages listed in the given requirements '
- 'file. This option can be used multiple times.',
- )
- self.cmd_opts.add_option(
- '-y', '--yes',
- dest='yes',
- action='store_true',
- help="Don't ask for confirmation of uninstall deletions.")
-
- self.parser.insert_option_group(0, self.cmd_opts)
-
- def run(self, options, args):
- with self._build_session(options) as session:
- reqs_to_uninstall = {}
- for name in args:
- req = InstallRequirement.from_line(
- name, isolated=options.isolated_mode,
- )
- if req.name:
- reqs_to_uninstall[canonicalize_name(req.name)] = req
- for filename in options.requirements:
- for req in parse_requirements(
- filename,
- options=options,
- session=session):
- if req.name:
- reqs_to_uninstall[canonicalize_name(req.name)] = req
- if not reqs_to_uninstall:
- raise InstallationError(
- 'You must give at least one requirement to %(name)s (see '
- '"pip help %(name)s")' % dict(name=self.name)
- )
- for req in reqs_to_uninstall.values():
- uninstall_pathset = req.uninstall(
- auto_confirm=options.yes, verbose=self.verbosity > 0,
- )
- if uninstall_pathset:
- uninstall_pathset.commit()
+from __future__ import absolute_import
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.basecommand import Command
+from pip._internal.exceptions import InstallationError
+from pip._internal.req import InstallRequirement, parse_requirements
+
+
+class UninstallCommand(Command):
+ """
+ Uninstall packages.
+
+ pip is able to uninstall most installed packages. Known exceptions are:
+
+ - Pure distutils packages installed with ``python setup.py install``, which
+ leave behind no metadata to determine what files were installed.
+ - Script wrappers installed by ``python setup.py develop``.
+ """
+ name = 'uninstall'
+ usage = """
+ %prog [options] ...
+ %prog [options] -r ..."""
+ summary = 'Uninstall packages.'
+
+ def __init__(self, *args, **kw):
+ super(UninstallCommand, self).__init__(*args, **kw)
+ self.cmd_opts.add_option(
+ '-r', '--requirement',
+ dest='requirements',
+ action='append',
+ default=[],
+ metavar='file',
+ help='Uninstall all the packages listed in the given requirements '
+ 'file. This option can be used multiple times.',
+ )
+ self.cmd_opts.add_option(
+ '-y', '--yes',
+ dest='yes',
+ action='store_true',
+ help="Don't ask for confirmation of uninstall deletions.")
+
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options, args):
+ with self._build_session(options) as session:
+ reqs_to_uninstall = {}
+ for name in args:
+ req = InstallRequirement.from_line(
+ name, isolated=options.isolated_mode,
+ )
+ if req.name:
+ reqs_to_uninstall[canonicalize_name(req.name)] = req
+ for filename in options.requirements:
+ for req in parse_requirements(
+ filename,
+ options=options,
+ session=session):
+ if req.name:
+ reqs_to_uninstall[canonicalize_name(req.name)] = req
+ if not reqs_to_uninstall:
+ raise InstallationError(
+ 'You must give at least one requirement to %(name)s (see '
+ '"pip help %(name)s")' % dict(name=self.name)
+ )
+ for req in reqs_to_uninstall.values():
+ uninstall_pathset = req.uninstall(
+ auto_confirm=options.yes, verbose=self.verbosity > 0,
+ )
+ if uninstall_pathset:
+ uninstall_pathset.commit()
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/wheel.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/wheel.py
index ac55f91..ed8cdfc 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/wheel.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/wheel.py
@@ -1,179 +1,179 @@
-# -*- coding: utf-8 -*-
-from __future__ import absolute_import
-
-import logging
-import os
-
-from pip._internal import cmdoptions
-from pip._internal.basecommand import RequirementCommand
-from pip._internal.cache import WheelCache
-from pip._internal.exceptions import CommandError, PreviousBuildDirError
-from pip._internal.operations.prepare import RequirementPreparer
-from pip._internal.req import RequirementSet
-from pip._internal.resolve import Resolver
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.wheel import WheelBuilder
-
-logger = logging.getLogger(__name__)
-
-
-class WheelCommand(RequirementCommand):
- """
- Build Wheel archives for your requirements and dependencies.
-
- Wheel is a built-package format, and offers the advantage of not
- recompiling your software during every install. For more details, see the
- wheel docs: https://wheel.readthedocs.io/en/latest/
-
- Requirements: setuptools>=0.8, and wheel.
-
- 'pip wheel' uses the bdist_wheel setuptools extension from the wheel
- package to build individual wheels.
-
- """
-
- name = 'wheel'
- usage = """
- %prog [options] ...
- %prog [options] -r ...
- %prog [options] [-e] ...
- %prog [options] [-e] ...
- %prog [options] ..."""
-
- summary = 'Build wheels from your requirements.'
-
- def __init__(self, *args, **kw):
- super(WheelCommand, self).__init__(*args, **kw)
-
- cmd_opts = self.cmd_opts
-
- cmd_opts.add_option(
- '-w', '--wheel-dir',
- dest='wheel_dir',
- metavar='dir',
- default=os.curdir,
- help=("Build wheels into , where the default is the "
- "current working directory."),
- )
- cmd_opts.add_option(cmdoptions.no_binary())
- cmd_opts.add_option(cmdoptions.only_binary())
- cmd_opts.add_option(
- '--build-option',
- dest='build_options',
- metavar='options',
- action='append',
- help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
- )
- cmd_opts.add_option(cmdoptions.no_build_isolation())
- cmd_opts.add_option(cmdoptions.constraints())
- cmd_opts.add_option(cmdoptions.editable())
- cmd_opts.add_option(cmdoptions.requirements())
- cmd_opts.add_option(cmdoptions.src())
- cmd_opts.add_option(cmdoptions.ignore_requires_python())
- cmd_opts.add_option(cmdoptions.no_deps())
- cmd_opts.add_option(cmdoptions.build_dir())
- cmd_opts.add_option(cmdoptions.progress_bar())
-
- cmd_opts.add_option(
- '--global-option',
- dest='global_options',
- action='append',
- metavar='options',
- help="Extra global options to be supplied to the setup.py "
- "call before the 'bdist_wheel' command.")
-
- cmd_opts.add_option(
- '--pre',
- action='store_true',
- default=False,
- help=("Include pre-release and development versions. By default, "
- "pip only finds stable versions."),
- )
-
- cmd_opts.add_option(cmdoptions.no_clean())
- cmd_opts.add_option(cmdoptions.require_hashes())
-
- index_opts = cmdoptions.make_option_group(
- cmdoptions.index_group,
- self.parser,
- )
-
- self.parser.insert_option_group(0, index_opts)
- self.parser.insert_option_group(0, cmd_opts)
-
- def run(self, options, args):
- cmdoptions.check_install_build_global(options)
-
- index_urls = [options.index_url] + options.extra_index_urls
- if options.no_index:
- logger.debug('Ignoring indexes: %s', ','.join(index_urls))
- index_urls = []
-
- if options.build_dir:
- options.build_dir = os.path.abspath(options.build_dir)
-
- options.src_dir = os.path.abspath(options.src_dir)
-
- with self._build_session(options) as session:
- finder = self._build_package_finder(options, session)
- build_delete = (not (options.no_clean or options.build_dir))
- wheel_cache = WheelCache(options.cache_dir, options.format_control)
-
- with TempDirectory(
- options.build_dir, delete=build_delete, kind="wheel"
- ) as directory:
- requirement_set = RequirementSet(
- require_hashes=options.require_hashes,
- )
-
- try:
- self.populate_requirement_set(
- requirement_set, args, options, finder, session,
- self.name, wheel_cache
- )
-
- preparer = RequirementPreparer(
- build_dir=directory.path,
- src_dir=options.src_dir,
- download_dir=None,
- wheel_download_dir=options.wheel_dir,
- progress_bar=options.progress_bar,
- build_isolation=options.build_isolation,
- )
-
- resolver = Resolver(
- preparer=preparer,
- finder=finder,
- session=session,
- wheel_cache=wheel_cache,
- use_user_site=False,
- upgrade_strategy="to-satisfy-only",
- force_reinstall=False,
- ignore_dependencies=options.ignore_dependencies,
- ignore_requires_python=options.ignore_requires_python,
- ignore_installed=True,
- isolated=options.isolated_mode,
- )
- resolver.resolve(requirement_set)
-
- # build wheels
- wb = WheelBuilder(
- finder, preparer, wheel_cache,
- build_options=options.build_options or [],
- global_options=options.global_options or [],
- no_clean=options.no_clean,
- )
- wheels_built_successfully = wb.build(
- requirement_set.requirements.values(), session=session,
- )
- if not wheels_built_successfully:
- raise CommandError(
- "Failed to build one or more wheels"
- )
- except PreviousBuildDirError:
- options.no_clean = True
- raise
- finally:
- if not options.no_clean:
- requirement_set.cleanup_files()
- wheel_cache.cleanup()
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+import logging
+import os
+
+from pip._internal import cmdoptions
+from pip._internal.basecommand import RequirementCommand
+from pip._internal.cache import WheelCache
+from pip._internal.exceptions import CommandError, PreviousBuildDirError
+from pip._internal.operations.prepare import RequirementPreparer
+from pip._internal.req import RequirementSet
+from pip._internal.resolve import Resolver
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.wheel import WheelBuilder
+
+logger = logging.getLogger(__name__)
+
+
+class WheelCommand(RequirementCommand):
+ """
+ Build Wheel archives for your requirements and dependencies.
+
+ Wheel is a built-package format, and offers the advantage of not
+ recompiling your software during every install. For more details, see the
+ wheel docs: https://wheel.readthedocs.io/en/latest/
+
+ Requirements: setuptools>=0.8, and wheel.
+
+ 'pip wheel' uses the bdist_wheel setuptools extension from the wheel
+ package to build individual wheels.
+
+ """
+
+ name = 'wheel'
+ usage = """
+ %prog [options] ...
+ %prog [options] -r ...
+ %prog [options] [-e] ...
+ %prog [options] [-e] ...
+ %prog [options] ..."""
+
+ summary = 'Build wheels from your requirements.'
+
+ def __init__(self, *args, **kw):
+ super(WheelCommand, self).__init__(*args, **kw)
+
+ cmd_opts = self.cmd_opts
+
+ cmd_opts.add_option(
+ '-w', '--wheel-dir',
+ dest='wheel_dir',
+ metavar='dir',
+ default=os.curdir,
+ help=("Build wheels into , where the default is the "
+ "current working directory."),
+ )
+ cmd_opts.add_option(cmdoptions.no_binary())
+ cmd_opts.add_option(cmdoptions.only_binary())
+ cmd_opts.add_option(
+ '--build-option',
+ dest='build_options',
+ metavar='options',
+ action='append',
+ help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
+ )
+ cmd_opts.add_option(cmdoptions.no_build_isolation())
+ cmd_opts.add_option(cmdoptions.constraints())
+ cmd_opts.add_option(cmdoptions.editable())
+ cmd_opts.add_option(cmdoptions.requirements())
+ cmd_opts.add_option(cmdoptions.src())
+ cmd_opts.add_option(cmdoptions.ignore_requires_python())
+ cmd_opts.add_option(cmdoptions.no_deps())
+ cmd_opts.add_option(cmdoptions.build_dir())
+ cmd_opts.add_option(cmdoptions.progress_bar())
+
+ cmd_opts.add_option(
+ '--global-option',
+ dest='global_options',
+ action='append',
+ metavar='options',
+ help="Extra global options to be supplied to the setup.py "
+ "call before the 'bdist_wheel' command.")
+
+ cmd_opts.add_option(
+ '--pre',
+ action='store_true',
+ default=False,
+ help=("Include pre-release and development versions. By default, "
+ "pip only finds stable versions."),
+ )
+
+ cmd_opts.add_option(cmdoptions.no_clean())
+ cmd_opts.add_option(cmdoptions.require_hashes())
+
+ index_opts = cmdoptions.make_option_group(
+ cmdoptions.index_group,
+ self.parser,
+ )
+
+ self.parser.insert_option_group(0, index_opts)
+ self.parser.insert_option_group(0, cmd_opts)
+
+ def run(self, options, args):
+ cmdoptions.check_install_build_global(options)
+
+ index_urls = [options.index_url] + options.extra_index_urls
+ if options.no_index:
+ logger.debug('Ignoring indexes: %s', ','.join(index_urls))
+ index_urls = []
+
+ if options.build_dir:
+ options.build_dir = os.path.abspath(options.build_dir)
+
+ options.src_dir = os.path.abspath(options.src_dir)
+
+ with self._build_session(options) as session:
+ finder = self._build_package_finder(options, session)
+ build_delete = (not (options.no_clean or options.build_dir))
+ wheel_cache = WheelCache(options.cache_dir, options.format_control)
+
+ with TempDirectory(
+ options.build_dir, delete=build_delete, kind="wheel"
+ ) as directory:
+ requirement_set = RequirementSet(
+ require_hashes=options.require_hashes,
+ )
+
+ try:
+ self.populate_requirement_set(
+ requirement_set, args, options, finder, session,
+ self.name, wheel_cache
+ )
+
+ preparer = RequirementPreparer(
+ build_dir=directory.path,
+ src_dir=options.src_dir,
+ download_dir=None,
+ wheel_download_dir=options.wheel_dir,
+ progress_bar=options.progress_bar,
+ build_isolation=options.build_isolation,
+ )
+
+ resolver = Resolver(
+ preparer=preparer,
+ finder=finder,
+ session=session,
+ wheel_cache=wheel_cache,
+ use_user_site=False,
+ upgrade_strategy="to-satisfy-only",
+ force_reinstall=False,
+ ignore_dependencies=options.ignore_dependencies,
+ ignore_requires_python=options.ignore_requires_python,
+ ignore_installed=True,
+ isolated=options.isolated_mode,
+ )
+ resolver.resolve(requirement_set)
+
+ # build wheels
+ wb = WheelBuilder(
+ finder, preparer, wheel_cache,
+ build_options=options.build_options or [],
+ global_options=options.global_options or [],
+ no_clean=options.no_clean,
+ )
+ wheels_built_successfully = wb.build(
+ requirement_set.requirements.values(), session=session,
+ )
+ if not wheels_built_successfully:
+ raise CommandError(
+ "Failed to build one or more wheels"
+ )
+ except PreviousBuildDirError:
+ options.no_clean = True
+ raise
+ finally:
+ if not options.no_clean:
+ requirement_set.cleanup_files()
+ wheel_cache.cleanup()
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/compat.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/compat.py
index 4aefd58..064717d 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/compat.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/compat.py
@@ -1,235 +1,235 @@
-"""Stuff that differs in different Python versions and platform
-distributions."""
-from __future__ import absolute_import, division
-
-import codecs
-import locale
-import logging
-import os
-import shutil
-import sys
-
-from pip._vendor.six import text_type
-
-try:
- import ipaddress
-except ImportError:
- try:
- from pip._vendor import ipaddress # type: ignore
- except ImportError:
- import ipaddr as ipaddress # type: ignore
- ipaddress.ip_address = ipaddress.IPAddress
- ipaddress.ip_network = ipaddress.IPNetwork
-
-
-__all__ = [
- "ipaddress", "uses_pycache", "console_to_str", "native_str",
- "get_path_uid", "stdlib_pkgs", "WINDOWS", "samefile", "get_terminal_size",
-]
-
-
-logger = logging.getLogger(__name__)
-
-if sys.version_info >= (3, 4):
- uses_pycache = True
- from importlib.util import cache_from_source
-else:
- import imp
-
- try:
- cache_from_source = imp.cache_from_source # type: ignore
- except AttributeError:
- # does not use __pycache__
- cache_from_source = None
-
- uses_pycache = cache_from_source is not None
-
-
-if sys.version_info >= (3, 5):
- backslashreplace_decode = "backslashreplace"
-else:
- # In version 3.4 and older, backslashreplace exists
- # but does not support use for decoding.
- # We implement our own replace handler for this
- # situation, so that we can consistently use
- # backslash replacement for all versions.
- def backslashreplace_decode_fn(err):
- raw_bytes = (err.object[i] for i in range(err.start, err.end))
- if sys.version_info[0] == 2:
- # Python 2 gave us characters - convert to numeric bytes
- raw_bytes = (ord(b) for b in raw_bytes)
- return u"".join(u"\\x%x" % c for c in raw_bytes), err.end
- codecs.register_error(
- "backslashreplace_decode",
- backslashreplace_decode_fn,
- )
- backslashreplace_decode = "backslashreplace_decode"
-
-
-def console_to_str(data):
- """Return a string, safe for output, of subprocess output.
-
- We assume the data is in the locale preferred encoding.
- If it won't decode properly, we warn the user but decode as
- best we can.
-
- We also ensure that the output can be safely written to
- standard output without encoding errors.
- """
-
- # First, get the encoding we assume. This is the preferred
- # encoding for the locale, unless that is not found, or
- # it is ASCII, in which case assume UTF-8
- encoding = locale.getpreferredencoding()
- if (not encoding) or codecs.lookup(encoding).name == "ascii":
- encoding = "utf-8"
-
- # Now try to decode the data - if we fail, warn the user and
- # decode with replacement.
- try:
- s = data.decode(encoding)
- except UnicodeDecodeError:
- logger.warning(
- "Subprocess output does not appear to be encoded as %s",
- encoding,
- )
- s = data.decode(encoding, errors=backslashreplace_decode)
-
- # Make sure we can print the output, by encoding it to the output
- # encoding with replacement of unencodable characters, and then
- # decoding again.
- # We use stderr's encoding because it's less likely to be
- # redirected and if we don't find an encoding we skip this
- # step (on the assumption that output is wrapped by something
- # that won't fail).
- # The double getattr is to deal with the possibility that we're
- # being called in a situation where sys.__stderr__ doesn't exist,
- # or doesn't have an encoding attribute. Neither of these cases
- # should occur in normal pip use, but there's no harm in checking
- # in case people use pip in (unsupported) unusual situations.
- output_encoding = getattr(getattr(sys, "__stderr__", None),
- "encoding", None)
-
- if output_encoding:
- s = s.encode(output_encoding, errors="backslashreplace")
- s = s.decode(output_encoding)
-
- return s
-
-
-if sys.version_info >= (3,):
- def native_str(s, replace=False):
- if isinstance(s, bytes):
- return s.decode('utf-8', 'replace' if replace else 'strict')
- return s
-
-else:
- def native_str(s, replace=False):
- # Replace is ignored -- unicode to UTF-8 can't fail
- if isinstance(s, text_type):
- return s.encode('utf-8')
- return s
-
-
-def get_path_uid(path):
- """
- Return path's uid.
-
- Does not follow symlinks:
- https://github.com/pypa/pip/pull/935#discussion_r5307003
-
- Placed this function in compat due to differences on AIX and
- Jython, that should eventually go away.
-
- :raises OSError: When path is a symlink or can't be read.
- """
- if hasattr(os, 'O_NOFOLLOW'):
- fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)
- file_uid = os.fstat(fd).st_uid
- os.close(fd)
- else: # AIX and Jython
- # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW
- if not os.path.islink(path):
- # older versions of Jython don't have `os.fstat`
- file_uid = os.stat(path).st_uid
- else:
- # raise OSError for parity with os.O_NOFOLLOW above
- raise OSError(
- "%s is a symlink; Will not return uid for symlinks" % path
- )
- return file_uid
-
-
-def expanduser(path):
- """
- Expand ~ and ~user constructions.
-
- Includes a workaround for http://bugs.python.org/issue14768
- """
- expanded = os.path.expanduser(path)
- if path.startswith('~/') and expanded.startswith('//'):
- expanded = expanded[1:]
- return expanded
-
-
-# packages in the stdlib that may have installation metadata, but should not be
-# considered 'installed'. this theoretically could be determined based on
-# dist.location (py27:`sysconfig.get_paths()['stdlib']`,
-# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may
-# make this ineffective, so hard-coding
-stdlib_pkgs = {"python", "wsgiref", "argparse"}
-
-
-# windows detection, covers cpython and ironpython
-WINDOWS = (sys.platform.startswith("win") or
- (sys.platform == 'cli' and os.name == 'nt'))
-
-
-def samefile(file1, file2):
- """Provide an alternative for os.path.samefile on Windows/Python2"""
- if hasattr(os.path, 'samefile'):
- return os.path.samefile(file1, file2)
- else:
- path1 = os.path.normcase(os.path.abspath(file1))
- path2 = os.path.normcase(os.path.abspath(file2))
- return path1 == path2
-
-
-if hasattr(shutil, 'get_terminal_size'):
- def get_terminal_size():
- """
- Returns a tuple (x, y) representing the width(x) and the height(y)
- in characters of the terminal window.
- """
- return tuple(shutil.get_terminal_size())
-else:
- def get_terminal_size():
- """
- Returns a tuple (x, y) representing the width(x) and the height(y)
- in characters of the terminal window.
- """
- def ioctl_GWINSZ(fd):
- try:
- import fcntl
- import termios
- import struct
- cr = struct.unpack_from(
- 'hh',
- fcntl.ioctl(fd, termios.TIOCGWINSZ, '12345678')
- )
- except:
- return None
- if cr == (0, 0):
- return None
- return cr
- cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
- if not cr:
- try:
- fd = os.open(os.ctermid(), os.O_RDONLY)
- cr = ioctl_GWINSZ(fd)
- os.close(fd)
- except:
- pass
- if not cr:
- cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
- return int(cr[1]), int(cr[0])
+"""Stuff that differs in different Python versions and platform
+distributions."""
+from __future__ import absolute_import, division
+
+import codecs
+import locale
+import logging
+import os
+import shutil
+import sys
+
+from pip._vendor.six import text_type
+
+try:
+ import ipaddress
+except ImportError:
+ try:
+ from pip._vendor import ipaddress # type: ignore
+ except ImportError:
+ import ipaddr as ipaddress # type: ignore
+ ipaddress.ip_address = ipaddress.IPAddress
+ ipaddress.ip_network = ipaddress.IPNetwork
+
+
+__all__ = [
+ "ipaddress", "uses_pycache", "console_to_str", "native_str",
+ "get_path_uid", "stdlib_pkgs", "WINDOWS", "samefile", "get_terminal_size",
+]
+
+
+logger = logging.getLogger(__name__)
+
+if sys.version_info >= (3, 4):
+ uses_pycache = True
+ from importlib.util import cache_from_source
+else:
+ import imp
+
+ try:
+ cache_from_source = imp.cache_from_source # type: ignore
+ except AttributeError:
+ # does not use __pycache__
+ cache_from_source = None
+
+ uses_pycache = cache_from_source is not None
+
+
+if sys.version_info >= (3, 5):
+ backslashreplace_decode = "backslashreplace"
+else:
+ # In version 3.4 and older, backslashreplace exists
+ # but does not support use for decoding.
+ # We implement our own replace handler for this
+ # situation, so that we can consistently use
+ # backslash replacement for all versions.
+ def backslashreplace_decode_fn(err):
+ raw_bytes = (err.object[i] for i in range(err.start, err.end))
+ if sys.version_info[0] == 2:
+ # Python 2 gave us characters - convert to numeric bytes
+ raw_bytes = (ord(b) for b in raw_bytes)
+ return u"".join(u"\\x%x" % c for c in raw_bytes), err.end
+ codecs.register_error(
+ "backslashreplace_decode",
+ backslashreplace_decode_fn,
+ )
+ backslashreplace_decode = "backslashreplace_decode"
+
+
+def console_to_str(data):
+ """Return a string, safe for output, of subprocess output.
+
+ We assume the data is in the locale preferred encoding.
+ If it won't decode properly, we warn the user but decode as
+ best we can.
+
+ We also ensure that the output can be safely written to
+ standard output without encoding errors.
+ """
+
+ # First, get the encoding we assume. This is the preferred
+ # encoding for the locale, unless that is not found, or
+ # it is ASCII, in which case assume UTF-8
+ encoding = locale.getpreferredencoding()
+ if (not encoding) or codecs.lookup(encoding).name == "ascii":
+ encoding = "utf-8"
+
+ # Now try to decode the data - if we fail, warn the user and
+ # decode with replacement.
+ try:
+ s = data.decode(encoding)
+ except UnicodeDecodeError:
+ logger.warning(
+ "Subprocess output does not appear to be encoded as %s",
+ encoding,
+ )
+ s = data.decode(encoding, errors=backslashreplace_decode)
+
+ # Make sure we can print the output, by encoding it to the output
+ # encoding with replacement of unencodable characters, and then
+ # decoding again.
+ # We use stderr's encoding because it's less likely to be
+ # redirected and if we don't find an encoding we skip this
+ # step (on the assumption that output is wrapped by something
+ # that won't fail).
+ # The double getattr is to deal with the possibility that we're
+ # being called in a situation where sys.__stderr__ doesn't exist,
+ # or doesn't have an encoding attribute. Neither of these cases
+ # should occur in normal pip use, but there's no harm in checking
+ # in case people use pip in (unsupported) unusual situations.
+ output_encoding = getattr(getattr(sys, "__stderr__", None),
+ "encoding", None)
+
+ if output_encoding:
+ s = s.encode(output_encoding, errors="backslashreplace")
+ s = s.decode(output_encoding)
+
+ return s
+
+
+if sys.version_info >= (3,):
+ def native_str(s, replace=False):
+ if isinstance(s, bytes):
+ return s.decode('utf-8', 'replace' if replace else 'strict')
+ return s
+
+else:
+ def native_str(s, replace=False):
+ # Replace is ignored -- unicode to UTF-8 can't fail
+ if isinstance(s, text_type):
+ return s.encode('utf-8')
+ return s
+
+
+def get_path_uid(path):
+ """
+ Return path's uid.
+
+ Does not follow symlinks:
+ https://github.com/pypa/pip/pull/935#discussion_r5307003
+
+ Placed this function in compat due to differences on AIX and
+ Jython, that should eventually go away.
+
+ :raises OSError: When path is a symlink or can't be read.
+ """
+ if hasattr(os, 'O_NOFOLLOW'):
+ fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)
+ file_uid = os.fstat(fd).st_uid
+ os.close(fd)
+ else: # AIX and Jython
+ # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW
+ if not os.path.islink(path):
+ # older versions of Jython don't have `os.fstat`
+ file_uid = os.stat(path).st_uid
+ else:
+ # raise OSError for parity with os.O_NOFOLLOW above
+ raise OSError(
+ "%s is a symlink; Will not return uid for symlinks" % path
+ )
+ return file_uid
+
+
+def expanduser(path):
+ """
+ Expand ~ and ~user constructions.
+
+ Includes a workaround for http://bugs.python.org/issue14768
+ """
+ expanded = os.path.expanduser(path)
+ if path.startswith('~/') and expanded.startswith('//'):
+ expanded = expanded[1:]
+ return expanded
+
+
+# packages in the stdlib that may have installation metadata, but should not be
+# considered 'installed'. this theoretically could be determined based on
+# dist.location (py27:`sysconfig.get_paths()['stdlib']`,
+# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may
+# make this ineffective, so hard-coding
+stdlib_pkgs = {"python", "wsgiref", "argparse"}
+
+
+# windows detection, covers cpython and ironpython
+WINDOWS = (sys.platform.startswith("win") or
+ (sys.platform == 'cli' and os.name == 'nt'))
+
+
+def samefile(file1, file2):
+ """Provide an alternative for os.path.samefile on Windows/Python2"""
+ if hasattr(os.path, 'samefile'):
+ return os.path.samefile(file1, file2)
+ else:
+ path1 = os.path.normcase(os.path.abspath(file1))
+ path2 = os.path.normcase(os.path.abspath(file2))
+ return path1 == path2
+
+
+if hasattr(shutil, 'get_terminal_size'):
+ def get_terminal_size():
+ """
+ Returns a tuple (x, y) representing the width(x) and the height(y)
+ in characters of the terminal window.
+ """
+ return tuple(shutil.get_terminal_size())
+else:
+ def get_terminal_size():
+ """
+ Returns a tuple (x, y) representing the width(x) and the height(y)
+ in characters of the terminal window.
+ """
+ def ioctl_GWINSZ(fd):
+ try:
+ import fcntl
+ import termios
+ import struct
+ cr = struct.unpack_from(
+ 'hh',
+ fcntl.ioctl(fd, termios.TIOCGWINSZ, '12345678')
+ )
+ except:
+ return None
+ if cr == (0, 0):
+ return None
+ return cr
+ cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
+ if not cr:
+ try:
+ fd = os.open(os.ctermid(), os.O_RDONLY)
+ cr = ioctl_GWINSZ(fd)
+ os.close(fd)
+ except:
+ pass
+ if not cr:
+ cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
+ return int(cr[1]), int(cr[0])
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/configuration.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/configuration.py
index b15e3d5..07af373 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/configuration.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/configuration.py
@@ -1,378 +1,378 @@
-"""Configuration management setup
-
-Some terminology:
-- name
- As written in config files.
-- value
- Value associated with a name
-- key
- Name combined with it's section (section.name)
-- variant
- A single word describing where the configuration key-value pair came from
-"""
-
-import locale
-import logging
-import os
-
-from pip._vendor import six
-from pip._vendor.six.moves import configparser
-
-from pip._internal.exceptions import ConfigurationError
-from pip._internal.locations import (
- legacy_config_file, new_config_file, running_under_virtualenv,
- site_config_files, venv_config_file,
-)
-from pip._internal.utils.misc import ensure_dir, enum
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple
-
- RawConfigParser = configparser.RawConfigParser # Shorthand
- Kind = NewType("Kind", str)
-
-logger = logging.getLogger(__name__)
-
-
-# NOTE: Maybe use the optionx attribute to normalize keynames.
-def _normalize_name(name):
- # type: (str) -> str
- """Make a name consistent regardless of source (environment or file)
- """
- name = name.lower().replace('_', '-')
- if name.startswith('--'):
- name = name[2:] # only prefer long opts
- return name
-
-
-def _disassemble_key(name):
- # type: (str) -> List[str]
- return name.split(".", 1)
-
-
-# The kinds of configurations there are.
-kinds = enum(
- USER="user", # User Specific
- GLOBAL="global", # System Wide
- VENV="venv", # Virtual Environment Specific
- ENV="env", # from PIP_CONFIG_FILE
- ENV_VAR="env-var", # from Environment Variables
-)
-
-
-class Configuration(object):
- """Handles management of configuration.
-
- Provides an interface to accessing and managing configuration files.
-
- This class converts provides an API that takes "section.key-name" style
- keys and stores the value associated with it as "key-name" under the
- section "section".
-
- This allows for a clean interface wherein the both the section and the
- key-name are preserved in an easy to manage form in the configuration files
- and the data stored is also nice.
- """
-
- def __init__(self, isolated, load_only=None):
- # type: (bool, Kind) -> None
- super(Configuration, self).__init__()
-
- _valid_load_only = [kinds.USER, kinds.GLOBAL, kinds.VENV, None]
- if load_only not in _valid_load_only:
- raise ConfigurationError(
- "Got invalid value for load_only - should be one of {}".format(
- ", ".join(map(repr, _valid_load_only[:-1]))
- )
- )
- self.isolated = isolated # type: bool
- self.load_only = load_only # type: Optional[Kind]
-
- # The order here determines the override order.
- self._override_order = [
- kinds.GLOBAL, kinds.USER, kinds.VENV, kinds.ENV, kinds.ENV_VAR
- ]
-
- self._ignore_env_names = ["version", "help"]
-
- # Because we keep track of where we got the data from
- self._parsers = {
- variant: [] for variant in self._override_order
- } # type: Dict[Kind, List[Tuple[str, RawConfigParser]]]
- self._config = {
- variant: {} for variant in self._override_order
- } # type: Dict[Kind, Dict[str, Any]]
- self._modified_parsers = [] # type: List[Tuple[str, RawConfigParser]]
-
- def load(self):
- # type: () -> None
- """Loads configuration from configuration files and environment
- """
- self._load_config_files()
- if not self.isolated:
- self._load_environment_vars()
-
- def get_file_to_edit(self):
- # type: () -> Optional[str]
- """Returns the file with highest priority in configuration
- """
- assert self.load_only is not None, \
- "Need to be specified a file to be editing"
-
- try:
- return self._get_parser_to_modify()[0]
- except IndexError:
- return None
-
- def items(self):
- # type: () -> Iterable[Tuple[str, Any]]
- """Returns key-value pairs like dict.items() representing the loaded
- configuration
- """
- return self._dictionary.items()
-
- def get_value(self, key):
- # type: (str) -> Any
- """Get a value from the configuration.
- """
- try:
- return self._dictionary[key]
- except KeyError:
- raise ConfigurationError("No such key - {}".format(key))
-
- def set_value(self, key, value):
- # type: (str, Any) -> None
- """Modify a value in the configuration.
- """
- self._ensure_have_load_only()
-
- fname, parser = self._get_parser_to_modify()
-
- if parser is not None:
- section, name = _disassemble_key(key)
-
- # Modify the parser and the configuration
- if not parser.has_section(section):
- parser.add_section(section)
- parser.set(section, name, value)
-
- self._config[self.load_only][key] = value
- self._mark_as_modified(fname, parser)
-
- def unset_value(self, key):
- # type: (str) -> None
- """Unset a value in the configuration.
- """
- self._ensure_have_load_only()
-
- if key not in self._config[self.load_only]:
- raise ConfigurationError("No such key - {}".format(key))
-
- fname, parser = self._get_parser_to_modify()
-
- if parser is not None:
- section, name = _disassemble_key(key)
-
- # Remove the key in the parser
- modified_something = False
- if parser.has_section(section):
- # Returns whether the option was removed or not
- modified_something = parser.remove_option(section, name)
-
- if modified_something:
- # name removed from parser, section may now be empty
- section_iter = iter(parser.items(section))
- try:
- val = six.next(section_iter)
- except StopIteration:
- val = None
-
- if val is None:
- parser.remove_section(section)
-
- self._mark_as_modified(fname, parser)
- else:
- raise ConfigurationError(
- "Fatal Internal error [id=1]. Please report as a bug."
- )
-
- del self._config[self.load_only][key]
-
- def save(self):
- # type: () -> None
- """Save the currentin-memory state.
- """
- self._ensure_have_load_only()
-
- for fname, parser in self._modified_parsers:
- logger.info("Writing to %s", fname)
-
- # Ensure directory exists.
- ensure_dir(os.path.dirname(fname))
-
- with open(fname, "w") as f:
- parser.write(f) # type: ignore
-
- #
- # Private routines
- #
-
- def _ensure_have_load_only(self):
- # type: () -> None
- if self.load_only is None:
- raise ConfigurationError("Needed a specific file to be modifying.")
- logger.debug("Will be working with %s variant only", self.load_only)
-
- @property
- def _dictionary(self):
- # type: () -> Dict[str, Any]
- """A dictionary representing the loaded configuration.
- """
- # NOTE: Dictionaries are not populated if not loaded. So, conditionals
- # are not needed here.
- retval = {}
-
- for variant in self._override_order:
- retval.update(self._config[variant])
-
- return retval
-
- def _load_config_files(self):
- # type: () -> None
- """Loads configuration from configuration files
- """
- config_files = dict(self._iter_config_files())
- if config_files[kinds.ENV][0:1] == [os.devnull]:
- logger.debug(
- "Skipping loading configuration files due to "
- "environment's PIP_CONFIG_FILE being os.devnull"
- )
- return
-
- for variant, files in config_files.items():
- for fname in files:
- # If there's specific variant set in `load_only`, load only
- # that variant, not the others.
- if self.load_only is not None and variant != self.load_only:
- logger.debug(
- "Skipping file '%s' (variant: %s)", fname, variant
- )
- continue
-
- parser = self._load_file(variant, fname)
-
- # Keeping track of the parsers used
- self._parsers[variant].append((fname, parser))
-
- def _load_file(self, variant, fname):
- # type: (Kind, str) -> RawConfigParser
- logger.debug("For variant '%s', will try loading '%s'", variant, fname)
- parser = self._construct_parser(fname)
-
- for section in parser.sections():
- items = parser.items(section)
- self._config[variant].update(self._normalized_keys(section, items))
-
- return parser
-
- def _construct_parser(self, fname):
- # type: (str) -> RawConfigParser
- parser = configparser.RawConfigParser()
- # If there is no such file, don't bother reading it but create the
- # parser anyway, to hold the data.
- # Doing this is useful when modifying and saving files, where we don't
- # need to construct a parser.
- if os.path.exists(fname):
- try:
- parser.read(fname)
- except UnicodeDecodeError:
- raise ConfigurationError((
- "ERROR: "
- "Configuration file contains invalid %s characters.\n"
- "Please fix your configuration, located at %s\n"
- ) % (locale.getpreferredencoding(False), fname))
- return parser
-
- def _load_environment_vars(self):
- # type: () -> None
- """Loads configuration from environment variables
- """
- self._config[kinds.ENV_VAR].update(
- self._normalized_keys(":env:", self._get_environ_vars())
- )
-
- def _normalized_keys(self, section, items):
- # type: (str, Iterable[Tuple[str, Any]]) -> Dict[str, Any]
- """Normalizes items to construct a dictionary with normalized keys.
-
- This routine is where the names become keys and are made the same
- regardless of source - configuration files or environment.
- """
- normalized = {}
- for name, val in items:
- key = section + "." + _normalize_name(name)
- normalized[key] = val
- return normalized
-
- def _get_environ_vars(self):
- # type: () -> Iterable[Tuple[str, str]]
- """Returns a generator with all environmental vars with prefix PIP_"""
- for key, val in os.environ.items():
- should_be_yielded = (
- key.startswith("PIP_") and
- key[4:].lower() not in self._ignore_env_names
- )
- if should_be_yielded:
- yield key[4:].lower(), val
-
- # XXX: This is patched in the tests.
- def _iter_config_files(self):
- # type: () -> Iterable[Tuple[Kind, List[str]]]
- """Yields variant and configuration files associated with it.
-
- This should be treated like items of a dictionary.
- """
- # SMELL: Move the conditions out of this function
-
- # environment variables have the lowest priority
- config_file = os.environ.get('PIP_CONFIG_FILE', None)
- if config_file is not None:
- yield kinds.ENV, [config_file]
- else:
- yield kinds.ENV, []
-
- # at the base we have any global configuration
- yield kinds.GLOBAL, list(site_config_files)
-
- # per-user configuration next
- should_load_user_config = not self.isolated and not (
- config_file and os.path.exists(config_file)
- )
- if should_load_user_config:
- # The legacy config file is overridden by the new config file
- yield kinds.USER, [legacy_config_file, new_config_file]
-
- # finally virtualenv configuration first trumping others
- if running_under_virtualenv():
- yield kinds.VENV, [venv_config_file]
-
- def _get_parser_to_modify(self):
- # type: () -> Tuple[str, RawConfigParser]
- # Determine which parser to modify
- parsers = self._parsers[self.load_only]
- if not parsers:
- # This should not happen if everything works correctly.
- raise ConfigurationError(
- "Fatal Internal error [id=2]. Please report as a bug."
- )
-
- # Use the highest priority parser.
- return parsers[-1]
-
- # XXX: This is patched in the tests.
- def _mark_as_modified(self, fname, parser):
- # type: (str, RawConfigParser) -> None
- file_parser_tuple = (fname, parser)
- if file_parser_tuple not in self._modified_parsers:
- self._modified_parsers.append(file_parser_tuple)
+"""Configuration management setup
+
+Some terminology:
+- name
+ As written in config files.
+- value
+ Value associated with a name
+- key
+ Name combined with it's section (section.name)
+- variant
+ A single word describing where the configuration key-value pair came from
+"""
+
+import locale
+import logging
+import os
+
+from pip._vendor import six
+from pip._vendor.six.moves import configparser
+
+from pip._internal.exceptions import ConfigurationError
+from pip._internal.locations import (
+ legacy_config_file, new_config_file, running_under_virtualenv,
+ site_config_files, venv_config_file,
+)
+from pip._internal.utils.misc import ensure_dir, enum
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple
+
+ RawConfigParser = configparser.RawConfigParser # Shorthand
+ Kind = NewType("Kind", str)
+
+logger = logging.getLogger(__name__)
+
+
+# NOTE: Maybe use the optionx attribute to normalize keynames.
+def _normalize_name(name):
+ # type: (str) -> str
+ """Make a name consistent regardless of source (environment or file)
+ """
+ name = name.lower().replace('_', '-')
+ if name.startswith('--'):
+ name = name[2:] # only prefer long opts
+ return name
+
+
+def _disassemble_key(name):
+ # type: (str) -> List[str]
+ return name.split(".", 1)
+
+
+# The kinds of configurations there are.
+kinds = enum(
+ USER="user", # User Specific
+ GLOBAL="global", # System Wide
+ VENV="venv", # Virtual Environment Specific
+ ENV="env", # from PIP_CONFIG_FILE
+ ENV_VAR="env-var", # from Environment Variables
+)
+
+
+class Configuration(object):
+ """Handles management of configuration.
+
+ Provides an interface to accessing and managing configuration files.
+
+ This class converts provides an API that takes "section.key-name" style
+ keys and stores the value associated with it as "key-name" under the
+ section "section".
+
+ This allows for a clean interface wherein the both the section and the
+ key-name are preserved in an easy to manage form in the configuration files
+ and the data stored is also nice.
+ """
+
+ def __init__(self, isolated, load_only=None):
+ # type: (bool, Kind) -> None
+ super(Configuration, self).__init__()
+
+ _valid_load_only = [kinds.USER, kinds.GLOBAL, kinds.VENV, None]
+ if load_only not in _valid_load_only:
+ raise ConfigurationError(
+ "Got invalid value for load_only - should be one of {}".format(
+ ", ".join(map(repr, _valid_load_only[:-1]))
+ )
+ )
+ self.isolated = isolated # type: bool
+ self.load_only = load_only # type: Optional[Kind]
+
+ # The order here determines the override order.
+ self._override_order = [
+ kinds.GLOBAL, kinds.USER, kinds.VENV, kinds.ENV, kinds.ENV_VAR
+ ]
+
+ self._ignore_env_names = ["version", "help"]
+
+ # Because we keep track of where we got the data from
+ self._parsers = {
+ variant: [] for variant in self._override_order
+ } # type: Dict[Kind, List[Tuple[str, RawConfigParser]]]
+ self._config = {
+ variant: {} for variant in self._override_order
+ } # type: Dict[Kind, Dict[str, Any]]
+ self._modified_parsers = [] # type: List[Tuple[str, RawConfigParser]]
+
+ def load(self):
+ # type: () -> None
+ """Loads configuration from configuration files and environment
+ """
+ self._load_config_files()
+ if not self.isolated:
+ self._load_environment_vars()
+
+ def get_file_to_edit(self):
+ # type: () -> Optional[str]
+ """Returns the file with highest priority in configuration
+ """
+ assert self.load_only is not None, \
+ "Need to be specified a file to be editing"
+
+ try:
+ return self._get_parser_to_modify()[0]
+ except IndexError:
+ return None
+
+ def items(self):
+ # type: () -> Iterable[Tuple[str, Any]]
+ """Returns key-value pairs like dict.items() representing the loaded
+ configuration
+ """
+ return self._dictionary.items()
+
+ def get_value(self, key):
+ # type: (str) -> Any
+ """Get a value from the configuration.
+ """
+ try:
+ return self._dictionary[key]
+ except KeyError:
+ raise ConfigurationError("No such key - {}".format(key))
+
+ def set_value(self, key, value):
+ # type: (str, Any) -> None
+ """Modify a value in the configuration.
+ """
+ self._ensure_have_load_only()
+
+ fname, parser = self._get_parser_to_modify()
+
+ if parser is not None:
+ section, name = _disassemble_key(key)
+
+ # Modify the parser and the configuration
+ if not parser.has_section(section):
+ parser.add_section(section)
+ parser.set(section, name, value)
+
+ self._config[self.load_only][key] = value
+ self._mark_as_modified(fname, parser)
+
+ def unset_value(self, key):
+ # type: (str) -> None
+ """Unset a value in the configuration.
+ """
+ self._ensure_have_load_only()
+
+ if key not in self._config[self.load_only]:
+ raise ConfigurationError("No such key - {}".format(key))
+
+ fname, parser = self._get_parser_to_modify()
+
+ if parser is not None:
+ section, name = _disassemble_key(key)
+
+ # Remove the key in the parser
+ modified_something = False
+ if parser.has_section(section):
+ # Returns whether the option was removed or not
+ modified_something = parser.remove_option(section, name)
+
+ if modified_something:
+ # name removed from parser, section may now be empty
+ section_iter = iter(parser.items(section))
+ try:
+ val = six.next(section_iter)
+ except StopIteration:
+ val = None
+
+ if val is None:
+ parser.remove_section(section)
+
+ self._mark_as_modified(fname, parser)
+ else:
+ raise ConfigurationError(
+ "Fatal Internal error [id=1]. Please report as a bug."
+ )
+
+ del self._config[self.load_only][key]
+
+ def save(self):
+ # type: () -> None
+ """Save the currentin-memory state.
+ """
+ self._ensure_have_load_only()
+
+ for fname, parser in self._modified_parsers:
+ logger.info("Writing to %s", fname)
+
+ # Ensure directory exists.
+ ensure_dir(os.path.dirname(fname))
+
+ with open(fname, "w") as f:
+ parser.write(f) # type: ignore
+
+ #
+ # Private routines
+ #
+
+ def _ensure_have_load_only(self):
+ # type: () -> None
+ if self.load_only is None:
+ raise ConfigurationError("Needed a specific file to be modifying.")
+ logger.debug("Will be working with %s variant only", self.load_only)
+
+ @property
+ def _dictionary(self):
+ # type: () -> Dict[str, Any]
+ """A dictionary representing the loaded configuration.
+ """
+ # NOTE: Dictionaries are not populated if not loaded. So, conditionals
+ # are not needed here.
+ retval = {}
+
+ for variant in self._override_order:
+ retval.update(self._config[variant])
+
+ return retval
+
+ def _load_config_files(self):
+ # type: () -> None
+ """Loads configuration from configuration files
+ """
+ config_files = dict(self._iter_config_files())
+ if config_files[kinds.ENV][0:1] == [os.devnull]:
+ logger.debug(
+ "Skipping loading configuration files due to "
+ "environment's PIP_CONFIG_FILE being os.devnull"
+ )
+ return
+
+ for variant, files in config_files.items():
+ for fname in files:
+ # If there's specific variant set in `load_only`, load only
+ # that variant, not the others.
+ if self.load_only is not None and variant != self.load_only:
+ logger.debug(
+ "Skipping file '%s' (variant: %s)", fname, variant
+ )
+ continue
+
+ parser = self._load_file(variant, fname)
+
+ # Keeping track of the parsers used
+ self._parsers[variant].append((fname, parser))
+
+ def _load_file(self, variant, fname):
+ # type: (Kind, str) -> RawConfigParser
+ logger.debug("For variant '%s', will try loading '%s'", variant, fname)
+ parser = self._construct_parser(fname)
+
+ for section in parser.sections():
+ items = parser.items(section)
+ self._config[variant].update(self._normalized_keys(section, items))
+
+ return parser
+
+ def _construct_parser(self, fname):
+ # type: (str) -> RawConfigParser
+ parser = configparser.RawConfigParser()
+ # If there is no such file, don't bother reading it but create the
+ # parser anyway, to hold the data.
+ # Doing this is useful when modifying and saving files, where we don't
+ # need to construct a parser.
+ if os.path.exists(fname):
+ try:
+ parser.read(fname)
+ except UnicodeDecodeError:
+ raise ConfigurationError((
+ "ERROR: "
+ "Configuration file contains invalid %s characters.\n"
+ "Please fix your configuration, located at %s\n"
+ ) % (locale.getpreferredencoding(False), fname))
+ return parser
+
+ def _load_environment_vars(self):
+ # type: () -> None
+ """Loads configuration from environment variables
+ """
+ self._config[kinds.ENV_VAR].update(
+ self._normalized_keys(":env:", self._get_environ_vars())
+ )
+
+ def _normalized_keys(self, section, items):
+ # type: (str, Iterable[Tuple[str, Any]]) -> Dict[str, Any]
+ """Normalizes items to construct a dictionary with normalized keys.
+
+ This routine is where the names become keys and are made the same
+ regardless of source - configuration files or environment.
+ """
+ normalized = {}
+ for name, val in items:
+ key = section + "." + _normalize_name(name)
+ normalized[key] = val
+ return normalized
+
+ def _get_environ_vars(self):
+ # type: () -> Iterable[Tuple[str, str]]
+ """Returns a generator with all environmental vars with prefix PIP_"""
+ for key, val in os.environ.items():
+ should_be_yielded = (
+ key.startswith("PIP_") and
+ key[4:].lower() not in self._ignore_env_names
+ )
+ if should_be_yielded:
+ yield key[4:].lower(), val
+
+ # XXX: This is patched in the tests.
+ def _iter_config_files(self):
+ # type: () -> Iterable[Tuple[Kind, List[str]]]
+ """Yields variant and configuration files associated with it.
+
+ This should be treated like items of a dictionary.
+ """
+ # SMELL: Move the conditions out of this function
+
+ # environment variables have the lowest priority
+ config_file = os.environ.get('PIP_CONFIG_FILE', None)
+ if config_file is not None:
+ yield kinds.ENV, [config_file]
+ else:
+ yield kinds.ENV, []
+
+ # at the base we have any global configuration
+ yield kinds.GLOBAL, list(site_config_files)
+
+ # per-user configuration next
+ should_load_user_config = not self.isolated and not (
+ config_file and os.path.exists(config_file)
+ )
+ if should_load_user_config:
+ # The legacy config file is overridden by the new config file
+ yield kinds.USER, [legacy_config_file, new_config_file]
+
+ # finally virtualenv configuration first trumping others
+ if running_under_virtualenv():
+ yield kinds.VENV, [venv_config_file]
+
+ def _get_parser_to_modify(self):
+ # type: () -> Tuple[str, RawConfigParser]
+ # Determine which parser to modify
+ parsers = self._parsers[self.load_only]
+ if not parsers:
+ # This should not happen if everything works correctly.
+ raise ConfigurationError(
+ "Fatal Internal error [id=2]. Please report as a bug."
+ )
+
+ # Use the highest priority parser.
+ return parsers[-1]
+
+ # XXX: This is patched in the tests.
+ def _mark_as_modified(self, fname, parser):
+ # type: (str, RawConfigParser) -> None
+ file_parser_tuple = (fname, parser)
+ if file_parser_tuple not in self._modified_parsers:
+ self._modified_parsers.append(file_parser_tuple)
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/download.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/download.py
index 06d7201..e0e2d24 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/download.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/download.py
@@ -1,922 +1,922 @@
-from __future__ import absolute_import
-
-import cgi
-import email.utils
-import getpass
-import json
-import logging
-import mimetypes
-import os
-import platform
-import re
-import shutil
-import sys
-
-from pip._vendor import requests, six, urllib3
-from pip._vendor.cachecontrol import CacheControlAdapter
-from pip._vendor.cachecontrol.caches import FileCache
-from pip._vendor.lockfile import LockError
-from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
-from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
-from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
-from pip._vendor.requests.structures import CaseInsensitiveDict
-from pip._vendor.requests.utils import get_netrc_auth
-# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
-# why we ignore the type on this import
-from pip._vendor.six.moves import xmlrpc_client # type: ignore
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-from pip._vendor.six.moves.urllib import request as urllib_request
-from pip._vendor.six.moves.urllib.parse import unquote as urllib_unquote
-from pip._vendor.urllib3.util import IS_PYOPENSSL
-
-import pip
-from pip._internal.compat import WINDOWS
-from pip._internal.exceptions import HashMismatch, InstallationError
-from pip._internal.locations import write_delete_marker_file
-from pip._internal.models import PyPI
-from pip._internal.utils.encoding import auto_decode
-from pip._internal.utils.filesystem import check_path_owner
-from pip._internal.utils.glibc import libc_ver
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import (
- ARCHIVE_EXTENSIONS, ask_path_exists, backup_dir, call_subprocess, consume,
- display_path, format_size, get_installed_version, rmtree, splitext,
- unpack_file,
-)
-from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.utils.ui import DownloadProgressProvider
-from pip._internal.vcs import vcs
-
-try:
- import ssl # noqa
-except ImportError:
- ssl = None
-
-HAS_TLS = (ssl is not None) or IS_PYOPENSSL
-
-__all__ = ['get_file_content',
- 'is_url', 'url_to_path', 'path_to_url',
- 'is_archive_file', 'unpack_vcs_link',
- 'unpack_file_url', 'is_vcs_url', 'is_file_url',
- 'unpack_http_url', 'unpack_url']
-
-
-logger = logging.getLogger(__name__)
-
-
-def user_agent():
- """
- Return a string representing the user agent.
- """
- data = {
- "installer": {"name": "pip", "version": pip.__version__},
- "python": platform.python_version(),
- "implementation": {
- "name": platform.python_implementation(),
- },
- }
-
- if data["implementation"]["name"] == 'CPython':
- data["implementation"]["version"] = platform.python_version()
- elif data["implementation"]["name"] == 'PyPy':
- if sys.pypy_version_info.releaselevel == 'final':
- pypy_version_info = sys.pypy_version_info[:3]
- else:
- pypy_version_info = sys.pypy_version_info
- data["implementation"]["version"] = ".".join(
- [str(x) for x in pypy_version_info]
- )
- elif data["implementation"]["name"] == 'Jython':
- # Complete Guess
- data["implementation"]["version"] = platform.python_version()
- elif data["implementation"]["name"] == 'IronPython':
- # Complete Guess
- data["implementation"]["version"] = platform.python_version()
-
- if sys.platform.startswith("linux"):
- from pip._vendor import distro
- distro_infos = dict(filter(
- lambda x: x[1],
- zip(["name", "version", "id"], distro.linux_distribution()),
- ))
- libc = dict(filter(
- lambda x: x[1],
- zip(["lib", "version"], libc_ver()),
- ))
- if libc:
- distro_infos["libc"] = libc
- if distro_infos:
- data["distro"] = distro_infos
-
- if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
- data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}
-
- if platform.system():
- data.setdefault("system", {})["name"] = platform.system()
-
- if platform.release():
- data.setdefault("system", {})["release"] = platform.release()
-
- if platform.machine():
- data["cpu"] = platform.machine()
-
- if HAS_TLS:
- data["openssl_version"] = ssl.OPENSSL_VERSION
-
- setuptools_version = get_installed_version("setuptools")
- if setuptools_version is not None:
- data["setuptools_version"] = setuptools_version
-
- return "{data[installer][name]}/{data[installer][version]} {json}".format(
- data=data,
- json=json.dumps(data, separators=(",", ":"), sort_keys=True),
- )
-
-
-class MultiDomainBasicAuth(AuthBase):
-
- def __init__(self, prompting=True):
- self.prompting = prompting
- self.passwords = {}
-
- def __call__(self, req):
- parsed = urllib_parse.urlparse(req.url)
-
- # Get the netloc without any embedded credentials
- netloc = parsed.netloc.rsplit("@", 1)[-1]
-
- # Set the url of the request to the url without any credentials
- req.url = urllib_parse.urlunparse(parsed[:1] + (netloc,) + parsed[2:])
-
- # Use any stored credentials that we have for this netloc
- username, password = self.passwords.get(netloc, (None, None))
-
- # Extract credentials embedded in the url if we have none stored
- if username is None:
- username, password = self.parse_credentials(parsed.netloc)
-
- # Get creds from netrc if we still don't have them
- if username is None and password is None:
- netrc_auth = get_netrc_auth(req.url)
- username, password = netrc_auth if netrc_auth else (None, None)
-
- if username or password:
- # Store the username and password
- self.passwords[netloc] = (username, password)
-
- # Send the basic auth with this request
- req = HTTPBasicAuth(username or "", password or "")(req)
-
- # Attach a hook to handle 401 responses
- req.register_hook("response", self.handle_401)
-
- return req
-
- def handle_401(self, resp, **kwargs):
- # We only care about 401 responses, anything else we want to just
- # pass through the actual response
- if resp.status_code != 401:
- return resp
-
- # We are not able to prompt the user so simply return the response
- if not self.prompting:
- return resp
-
- parsed = urllib_parse.urlparse(resp.url)
-
- # Prompt the user for a new username and password
- username = six.moves.input("User for %s: " % parsed.netloc)
- password = getpass.getpass("Password: ")
-
- # Store the new username and password to use for future requests
- if username or password:
- self.passwords[parsed.netloc] = (username, password)
-
- # Consume content and release the original connection to allow our new
- # request to reuse the same one.
- resp.content
- resp.raw.release_conn()
-
- # Add our new username and password to the request
- req = HTTPBasicAuth(username or "", password or "")(resp.request)
-
- # Send our new request
- new_resp = resp.connection.send(req, **kwargs)
- new_resp.history.append(resp)
-
- return new_resp
-
- def parse_credentials(self, netloc):
- if "@" in netloc:
- userinfo = netloc.rsplit("@", 1)[0]
- if ":" in userinfo:
- user, pwd = userinfo.split(":", 1)
- return (urllib_unquote(user), urllib_unquote(pwd))
- return urllib_unquote(userinfo), None
- return None, None
-
-
-class LocalFSAdapter(BaseAdapter):
-
- def send(self, request, stream=None, timeout=None, verify=None, cert=None,
- proxies=None):
- pathname = url_to_path(request.url)
-
- resp = Response()
- resp.status_code = 200
- resp.url = request.url
-
- try:
- stats = os.stat(pathname)
- except OSError as exc:
- resp.status_code = 404
- resp.raw = exc
- else:
- modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
- content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
- resp.headers = CaseInsensitiveDict({
- "Content-Type": content_type,
- "Content-Length": stats.st_size,
- "Last-Modified": modified,
- })
-
- resp.raw = open(pathname, "rb")
- resp.close = resp.raw.close
-
- return resp
-
- def close(self):
- pass
-
-
-class SafeFileCache(FileCache):
- """
- A file based cache which is safe to use even when the target directory may
- not be accessible or writable.
- """
-
- def __init__(self, *args, **kwargs):
- super(SafeFileCache, self).__init__(*args, **kwargs)
-
- # Check to ensure that the directory containing our cache directory
- # is owned by the user current executing pip. If it does not exist
- # we will check the parent directory until we find one that does exist.
- # If it is not owned by the user executing pip then we will disable
- # the cache and log a warning.
- if not check_path_owner(self.directory):
- logger.warning(
- "The directory '%s' or its parent directory is not owned by "
- "the current user and the cache has been disabled. Please "
- "check the permissions and owner of that directory. If "
- "executing pip with sudo, you may want sudo's -H flag.",
- self.directory,
- )
-
- # Set our directory to None to disable the Cache
- self.directory = None
-
- def get(self, *args, **kwargs):
- # If we don't have a directory, then the cache should be a no-op.
- if self.directory is None:
- return
-
- try:
- return super(SafeFileCache, self).get(*args, **kwargs)
- except (LockError, OSError, IOError):
- # We intentionally silence this error, if we can't access the cache
- # then we can just skip caching and process the request as if
- # caching wasn't enabled.
- pass
-
- def set(self, *args, **kwargs):
- # If we don't have a directory, then the cache should be a no-op.
- if self.directory is None:
- return
-
- try:
- return super(SafeFileCache, self).set(*args, **kwargs)
- except (LockError, OSError, IOError):
- # We intentionally silence this error, if we can't access the cache
- # then we can just skip caching and process the request as if
- # caching wasn't enabled.
- pass
-
- def delete(self, *args, **kwargs):
- # If we don't have a directory, then the cache should be a no-op.
- if self.directory is None:
- return
-
- try:
- return super(SafeFileCache, self).delete(*args, **kwargs)
- except (LockError, OSError, IOError):
- # We intentionally silence this error, if we can't access the cache
- # then we can just skip caching and process the request as if
- # caching wasn't enabled.
- pass
-
-
-class InsecureHTTPAdapter(HTTPAdapter):
-
- def cert_verify(self, conn, url, verify, cert):
- conn.cert_reqs = 'CERT_NONE'
- conn.ca_certs = None
-
-
-class PipSession(requests.Session):
-
- timeout = None
-
- def __init__(self, *args, **kwargs):
- retries = kwargs.pop("retries", 0)
- cache = kwargs.pop("cache", None)
- insecure_hosts = kwargs.pop("insecure_hosts", [])
-
- super(PipSession, self).__init__(*args, **kwargs)
-
- # Attach our User Agent to the request
- self.headers["User-Agent"] = user_agent()
-
- # Attach our Authentication handler to the session
- self.auth = MultiDomainBasicAuth()
-
- # Create our urllib3.Retry instance which will allow us to customize
- # how we handle retries.
- retries = urllib3.Retry(
- # Set the total number of retries that a particular request can
- # have.
- total=retries,
-
- # A 503 error from PyPI typically means that the Fastly -> Origin
- # connection got interrupted in some way. A 503 error in general
- # is typically considered a transient error so we'll go ahead and
- # retry it.
- # A 500 may indicate transient error in Amazon S3
- # A 520 or 527 - may indicate transient error in CloudFlare
- status_forcelist=[500, 503, 520, 527],
-
- # Add a small amount of back off between failed requests in
- # order to prevent hammering the service.
- backoff_factor=0.25,
- )
-
- # We want to _only_ cache responses on securely fetched origins. We do
- # this because we can't validate the response of an insecurely fetched
- # origin, and we don't want someone to be able to poison the cache and
- # require manual eviction from the cache to fix it.
- if cache:
- secure_adapter = CacheControlAdapter(
- cache=SafeFileCache(cache, use_dir_lock=True),
- max_retries=retries,
- )
- else:
- secure_adapter = HTTPAdapter(max_retries=retries)
-
- # Our Insecure HTTPAdapter disables HTTPS validation. It does not
- # support caching (see above) so we'll use it for all http:// URLs as
- # well as any https:// host that we've marked as ignoring TLS errors
- # for.
- insecure_adapter = InsecureHTTPAdapter(max_retries=retries)
-
- self.mount("https://", secure_adapter)
- self.mount("http://", insecure_adapter)
-
- # Enable file:// urls
- self.mount("file://", LocalFSAdapter())
-
- # We want to use a non-validating adapter for any requests which are
- # deemed insecure.
- for host in insecure_hosts:
- self.mount("https://{}/".format(host), insecure_adapter)
-
- def request(self, method, url, *args, **kwargs):
- # Allow setting a default timeout on a session
- kwargs.setdefault("timeout", self.timeout)
-
- # Dispatch the actual request
- return super(PipSession, self).request(method, url, *args, **kwargs)
-
-
-def get_file_content(url, comes_from=None, session=None):
- """Gets the content of a file; it may be a filename, file: URL, or
- http: URL. Returns (location, content). Content is unicode.
-
- :param url: File path or url.
- :param comes_from: Origin description of requirements.
- :param session: Instance of pip.download.PipSession.
- """
- if session is None:
- raise TypeError(
- "get_file_content() missing 1 required keyword argument: 'session'"
- )
-
- match = _scheme_re.search(url)
- if match:
- scheme = match.group(1).lower()
- if (scheme == 'file' and comes_from and
- comes_from.startswith('http')):
- raise InstallationError(
- 'Requirements file %s references URL %s, which is local'
- % (comes_from, url))
- if scheme == 'file':
- path = url.split(':', 1)[1]
- path = path.replace('\\', '/')
- match = _url_slash_drive_re.match(path)
- if match:
- path = match.group(1) + ':' + path.split('|', 1)[1]
- path = urllib_parse.unquote(path)
- if path.startswith('/'):
- path = '/' + path.lstrip('/')
- url = path
- else:
- # FIXME: catch some errors
- resp = session.get(url)
- resp.raise_for_status()
- return resp.url, resp.text
- try:
- with open(url, 'rb') as f:
- content = auto_decode(f.read())
- except IOError as exc:
- raise InstallationError(
- 'Could not open requirements file: %s' % str(exc)
- )
- return url, content
-
-
-_scheme_re = re.compile(r'^(http|https|file):', re.I)
-_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
-
-
-def is_url(name):
- """Returns true if the name looks like a URL"""
- if ':' not in name:
- return False
- scheme = name.split(':', 1)[0].lower()
- return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
-
-
-def url_to_path(url):
- """
- Convert a file: URL to a path.
- """
- assert url.startswith('file:'), (
- "You can only turn file: urls into filenames (not %r)" % url)
-
- _, netloc, path, _, _ = urllib_parse.urlsplit(url)
-
- # if we have a UNC path, prepend UNC share notation
- if netloc:
- netloc = '\\\\' + netloc
-
- path = urllib_request.url2pathname(netloc + path)
- return path
-
-
-def path_to_url(path):
- """
- Convert a path to a file: URL. The path will be made absolute and have
- quoted path parts.
- """
- path = os.path.normpath(os.path.abspath(path))
- url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path))
- return url
-
-
-def is_archive_file(name):
- """Return True if `name` is a considered as an archive file."""
- ext = splitext(name)[1].lower()
- if ext in ARCHIVE_EXTENSIONS:
- return True
- return False
-
-
-def unpack_vcs_link(link, location):
- vcs_backend = _get_used_vcs_backend(link)
- vcs_backend.unpack(location)
-
-
-def _get_used_vcs_backend(link):
- for backend in vcs.backends:
- if link.scheme in backend.schemes:
- vcs_backend = backend(link.url)
- return vcs_backend
-
-
-def is_vcs_url(link):
- return bool(_get_used_vcs_backend(link))
-
-
-def is_file_url(link):
- return link.url.lower().startswith('file:')
-
-
-def is_dir_url(link):
- """Return whether a file:// Link points to a directory.
-
- ``link`` must not have any other scheme but file://. Call is_file_url()
- first.
-
- """
- link_path = url_to_path(link.url_without_fragment)
- return os.path.isdir(link_path)
-
-
-def _progress_indicator(iterable, *args, **kwargs):
- return iterable
-
-
-def _download_url(resp, link, content_file, hashes, progress_bar):
- try:
- total_length = int(resp.headers['content-length'])
- except (ValueError, KeyError, TypeError):
- total_length = 0
-
- cached_resp = getattr(resp, "from_cache", False)
- if logger.getEffectiveLevel() > logging.INFO:
- show_progress = False
- elif cached_resp:
- show_progress = False
- elif total_length > (40 * 1000):
- show_progress = True
- elif not total_length:
- show_progress = True
- else:
- show_progress = False
-
- show_url = link.show_url
-
- def resp_read(chunk_size):
- try:
- # Special case for urllib3.
- for chunk in resp.raw.stream(
- chunk_size,
- # We use decode_content=False here because we don't
- # want urllib3 to mess with the raw bytes we get
- # from the server. If we decompress inside of
- # urllib3 then we cannot verify the checksum
- # because the checksum will be of the compressed
- # file. This breakage will only occur if the
- # server adds a Content-Encoding header, which
- # depends on how the server was configured:
- # - Some servers will notice that the file isn't a
- # compressible file and will leave the file alone
- # and with an empty Content-Encoding
- # - Some servers will notice that the file is
- # already compressed and will leave the file
- # alone and will add a Content-Encoding: gzip
- # header
- # - Some servers won't notice anything at all and
- # will take a file that's already been compressed
- # and compress it again and set the
- # Content-Encoding: gzip header
- #
- # By setting this not to decode automatically we
- # hope to eliminate problems with the second case.
- decode_content=False):
- yield chunk
- except AttributeError:
- # Standard file-like object.
- while True:
- chunk = resp.raw.read(chunk_size)
- if not chunk:
- break
- yield chunk
-
- def written_chunks(chunks):
- for chunk in chunks:
- content_file.write(chunk)
- yield chunk
-
- progress_indicator = _progress_indicator
-
- if link.netloc == PyPI.netloc:
- url = show_url
- else:
- url = link.url_without_fragment
-
- if show_progress: # We don't show progress on cached responses
- progress_indicator = DownloadProgressProvider(progress_bar,
- max=total_length)
- if total_length:
- logger.info("Downloading %s (%s)", url, format_size(total_length))
- else:
- logger.info("Downloading %s", url)
- elif cached_resp:
- logger.info("Using cached %s", url)
- else:
- logger.info("Downloading %s", url)
-
- logger.debug('Downloading from URL %s', link)
-
- downloaded_chunks = written_chunks(
- progress_indicator(
- resp_read(CONTENT_CHUNK_SIZE),
- CONTENT_CHUNK_SIZE
- )
- )
- if hashes:
- hashes.check_against_chunks(downloaded_chunks)
- else:
- consume(downloaded_chunks)
-
-
-def _copy_file(filename, location, link):
- copy = True
- download_location = os.path.join(location, link.filename)
- if os.path.exists(download_location):
- response = ask_path_exists(
- 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' %
- display_path(download_location), ('i', 'w', 'b', 'a'))
- if response == 'i':
- copy = False
- elif response == 'w':
- logger.warning('Deleting %s', display_path(download_location))
- os.remove(download_location)
- elif response == 'b':
- dest_file = backup_dir(download_location)
- logger.warning(
- 'Backing up %s to %s',
- display_path(download_location),
- display_path(dest_file),
- )
- shutil.move(download_location, dest_file)
- elif response == 'a':
- sys.exit(-1)
- if copy:
- shutil.copy(filename, download_location)
- logger.info('Saved %s', display_path(download_location))
-
-
-def unpack_http_url(link, location, download_dir=None,
- session=None, hashes=None, progress_bar="on"):
- if session is None:
- raise TypeError(
- "unpack_http_url() missing 1 required keyword argument: 'session'"
- )
-
- with TempDirectory(kind="unpack") as temp_dir:
- # If a download dir is specified, is the file already downloaded there?
- already_downloaded_path = None
- if download_dir:
- already_downloaded_path = _check_download_dir(link,
- download_dir,
- hashes)
-
- if already_downloaded_path:
- from_path = already_downloaded_path
- content_type = mimetypes.guess_type(from_path)[0]
- else:
- # let's download to a tmp dir
- from_path, content_type = _download_http_url(link,
- session,
- temp_dir.path,
- hashes,
- progress_bar)
-
- # unpack the archive to the build dir location. even when only
- # downloading archives, they have to be unpacked to parse dependencies
- unpack_file(from_path, location, content_type, link)
-
- # a download dir is specified; let's copy the archive there
- if download_dir and not already_downloaded_path:
- _copy_file(from_path, download_dir, link)
-
- if not already_downloaded_path:
- os.unlink(from_path)
-
-
-def unpack_file_url(link, location, download_dir=None, hashes=None):
- """Unpack link into location.
-
- If download_dir is provided and link points to a file, make a copy
- of the link file inside download_dir.
- """
- link_path = url_to_path(link.url_without_fragment)
-
- # If it's a url to a local directory
- if is_dir_url(link):
- if os.path.isdir(location):
- rmtree(location)
- shutil.copytree(link_path, location, symlinks=True)
- if download_dir:
- logger.info('Link is a directory, ignoring download_dir')
- return
-
- # If --require-hashes is off, `hashes` is either empty, the
- # link's embedded hash, or MissingHashes; it is required to
- # match. If --require-hashes is on, we are satisfied by any
- # hash in `hashes` matching: a URL-based or an option-based
- # one; no internet-sourced hash will be in `hashes`.
- if hashes:
- hashes.check_against_path(link_path)
-
- # If a download dir is specified, is the file already there and valid?
- already_downloaded_path = None
- if download_dir:
- already_downloaded_path = _check_download_dir(link,
- download_dir,
- hashes)
-
- if already_downloaded_path:
- from_path = already_downloaded_path
- else:
- from_path = link_path
-
- content_type = mimetypes.guess_type(from_path)[0]
-
- # unpack the archive to the build dir location. even when only downloading
- # archives, they have to be unpacked to parse dependencies
- unpack_file(from_path, location, content_type, link)
-
- # a download dir is specified and not already downloaded
- if download_dir and not already_downloaded_path:
- _copy_file(from_path, download_dir, link)
-
-
-def _copy_dist_from_dir(link_path, location):
- """Copy distribution files in `link_path` to `location`.
-
- Invoked when user requests to install a local directory. E.g.:
-
- pip install .
- pip install ~/dev/git-repos/python-prompt-toolkit
-
- """
-
- # Note: This is currently VERY SLOW if you have a lot of data in the
- # directory, because it copies everything with `shutil.copytree`.
- # What it should really do is build an sdist and install that.
- # See https://github.com/pypa/pip/issues/2195
-
- if os.path.isdir(location):
- rmtree(location)
-
- # build an sdist
- setup_py = 'setup.py'
- sdist_args = [sys.executable]
- sdist_args.append('-c')
- sdist_args.append(SETUPTOOLS_SHIM % setup_py)
- sdist_args.append('sdist')
- sdist_args += ['--dist-dir', location]
- logger.info('Running setup.py sdist for %s', link_path)
-
- with indent_log():
- call_subprocess(sdist_args, cwd=link_path, show_stdout=False)
-
- # unpack sdist into `location`
- sdist = os.path.join(location, os.listdir(location)[0])
- logger.info('Unpacking sdist %s into %s', sdist, location)
- unpack_file(sdist, location, content_type=None, link=None)
-
-
-class PipXmlrpcTransport(xmlrpc_client.Transport):
- """Provide a `xmlrpclib.Transport` implementation via a `PipSession`
- object.
- """
-
- def __init__(self, index_url, session, use_datetime=False):
- xmlrpc_client.Transport.__init__(self, use_datetime)
- index_parts = urllib_parse.urlparse(index_url)
- self._scheme = index_parts.scheme
- self._session = session
-
- def request(self, host, handler, request_body, verbose=False):
- parts = (self._scheme, host, handler, None, None, None)
- url = urllib_parse.urlunparse(parts)
- try:
- headers = {'Content-Type': 'text/xml'}
- response = self._session.post(url, data=request_body,
- headers=headers, stream=True)
- response.raise_for_status()
- self.verbose = verbose
- return self.parse_response(response.raw)
- except requests.HTTPError as exc:
- logger.critical(
- "HTTP error %s while getting %s",
- exc.response.status_code, url,
- )
- raise
-
-
-def unpack_url(link, location, download_dir=None,
- only_download=False, session=None, hashes=None,
- progress_bar="on"):
- """Unpack link.
- If link is a VCS link:
- if only_download, export into download_dir and ignore location
- else unpack into location
- for other types of link:
- - unpack into location
- - if download_dir, copy the file into download_dir
- - if only_download, mark location for deletion
-
- :param hashes: A Hashes object, one of whose embedded hashes must match,
- or HashMismatch will be raised. If the Hashes is empty, no matches are
- required, and unhashable types of requirements (like VCS ones, which
- would ordinarily raise HashUnsupported) are allowed.
- """
- # non-editable vcs urls
- if is_vcs_url(link):
- unpack_vcs_link(link, location)
-
- # file urls
- elif is_file_url(link):
- unpack_file_url(link, location, download_dir, hashes=hashes)
-
- # http urls
- else:
- if session is None:
- session = PipSession()
-
- unpack_http_url(
- link,
- location,
- download_dir,
- session,
- hashes=hashes,
- progress_bar=progress_bar
- )
- if only_download:
- write_delete_marker_file(location)
-
-
-def _download_http_url(link, session, temp_dir, hashes, progress_bar):
- """Download link url into temp_dir using provided session"""
- target_url = link.url.split('#', 1)[0]
- try:
- resp = session.get(
- target_url,
- # We use Accept-Encoding: identity here because requests
- # defaults to accepting compressed responses. This breaks in
- # a variety of ways depending on how the server is configured.
- # - Some servers will notice that the file isn't a compressible
- # file and will leave the file alone and with an empty
- # Content-Encoding
- # - Some servers will notice that the file is already
- # compressed and will leave the file alone and will add a
- # Content-Encoding: gzip header
- # - Some servers won't notice anything at all and will take
- # a file that's already been compressed and compress it again
- # and set the Content-Encoding: gzip header
- # By setting this to request only the identity encoding We're
- # hoping to eliminate the third case. Hopefully there does not
- # exist a server which when given a file will notice it is
- # already compressed and that you're not asking for a
- # compressed file and will then decompress it before sending
- # because if that's the case I don't think it'll ever be
- # possible to make this work.
- headers={"Accept-Encoding": "identity"},
- stream=True,
- )
- resp.raise_for_status()
- except requests.HTTPError as exc:
- logger.critical(
- "HTTP error %s while getting %s", exc.response.status_code, link,
- )
- raise
-
- content_type = resp.headers.get('content-type', '')
- filename = link.filename # fallback
- # Have a look at the Content-Disposition header for a better guess
- content_disposition = resp.headers.get('content-disposition')
- if content_disposition:
- type, params = cgi.parse_header(content_disposition)
- # We use ``or`` here because we don't want to use an "empty" value
- # from the filename param.
- filename = params.get('filename') or filename
- ext = splitext(filename)[1]
- if not ext:
- ext = mimetypes.guess_extension(content_type)
- if ext:
- filename += ext
- if not ext and link.url != resp.url:
- ext = os.path.splitext(resp.url)[1]
- if ext:
- filename += ext
- file_path = os.path.join(temp_dir, filename)
- with open(file_path, 'wb') as content_file:
- _download_url(resp, link, content_file, hashes, progress_bar)
- return file_path, content_type
-
-
-def _check_download_dir(link, download_dir, hashes):
- """ Check download_dir for previously downloaded file with correct hash
- If a correct file is found return its path else None
- """
- download_path = os.path.join(download_dir, link.filename)
- if os.path.exists(download_path):
- # If already downloaded, does its hash match?
- logger.info('File was already downloaded %s', download_path)
- if hashes:
- try:
- hashes.check_against_path(download_path)
- except HashMismatch:
- logger.warning(
- 'Previously-downloaded file %s has bad hash. '
- 'Re-downloading.',
- download_path
- )
- os.unlink(download_path)
- return None
- return download_path
- return None
+from __future__ import absolute_import
+
+import cgi
+import email.utils
+import getpass
+import json
+import logging
+import mimetypes
+import os
+import platform
+import re
+import shutil
+import sys
+
+from pip._vendor import requests, six, urllib3
+from pip._vendor.cachecontrol import CacheControlAdapter
+from pip._vendor.cachecontrol.caches import FileCache
+from pip._vendor.lockfile import LockError
+from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
+from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
+from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
+from pip._vendor.requests.structures import CaseInsensitiveDict
+from pip._vendor.requests.utils import get_netrc_auth
+# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
+# why we ignore the type on this import
+from pip._vendor.six.moves import xmlrpc_client # type: ignore
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+from pip._vendor.six.moves.urllib import request as urllib_request
+from pip._vendor.six.moves.urllib.parse import unquote as urllib_unquote
+from pip._vendor.urllib3.util import IS_PYOPENSSL
+
+import pip
+from pip._internal.compat import WINDOWS
+from pip._internal.exceptions import HashMismatch, InstallationError
+from pip._internal.locations import write_delete_marker_file
+from pip._internal.models import PyPI
+from pip._internal.utils.encoding import auto_decode
+from pip._internal.utils.filesystem import check_path_owner
+from pip._internal.utils.glibc import libc_ver
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import (
+ ARCHIVE_EXTENSIONS, ask_path_exists, backup_dir, call_subprocess, consume,
+ display_path, format_size, get_installed_version, rmtree, splitext,
+ unpack_file,
+)
+from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.ui import DownloadProgressProvider
+from pip._internal.vcs import vcs
+
+try:
+ import ssl # noqa
+except ImportError:
+ ssl = None
+
+HAS_TLS = (ssl is not None) or IS_PYOPENSSL
+
+__all__ = ['get_file_content',
+ 'is_url', 'url_to_path', 'path_to_url',
+ 'is_archive_file', 'unpack_vcs_link',
+ 'unpack_file_url', 'is_vcs_url', 'is_file_url',
+ 'unpack_http_url', 'unpack_url']
+
+
+logger = logging.getLogger(__name__)
+
+
+def user_agent():
+ """
+ Return a string representing the user agent.
+ """
+ data = {
+ "installer": {"name": "pip", "version": pip.__version__},
+ "python": platform.python_version(),
+ "implementation": {
+ "name": platform.python_implementation(),
+ },
+ }
+
+ if data["implementation"]["name"] == 'CPython':
+ data["implementation"]["version"] = platform.python_version()
+ elif data["implementation"]["name"] == 'PyPy':
+ if sys.pypy_version_info.releaselevel == 'final':
+ pypy_version_info = sys.pypy_version_info[:3]
+ else:
+ pypy_version_info = sys.pypy_version_info
+ data["implementation"]["version"] = ".".join(
+ [str(x) for x in pypy_version_info]
+ )
+ elif data["implementation"]["name"] == 'Jython':
+ # Complete Guess
+ data["implementation"]["version"] = platform.python_version()
+ elif data["implementation"]["name"] == 'IronPython':
+ # Complete Guess
+ data["implementation"]["version"] = platform.python_version()
+
+ if sys.platform.startswith("linux"):
+ from pip._vendor import distro
+ distro_infos = dict(filter(
+ lambda x: x[1],
+ zip(["name", "version", "id"], distro.linux_distribution()),
+ ))
+ libc = dict(filter(
+ lambda x: x[1],
+ zip(["lib", "version"], libc_ver()),
+ ))
+ if libc:
+ distro_infos["libc"] = libc
+ if distro_infos:
+ data["distro"] = distro_infos
+
+ if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
+ data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}
+
+ if platform.system():
+ data.setdefault("system", {})["name"] = platform.system()
+
+ if platform.release():
+ data.setdefault("system", {})["release"] = platform.release()
+
+ if platform.machine():
+ data["cpu"] = platform.machine()
+
+ if HAS_TLS:
+ data["openssl_version"] = ssl.OPENSSL_VERSION
+
+ setuptools_version = get_installed_version("setuptools")
+ if setuptools_version is not None:
+ data["setuptools_version"] = setuptools_version
+
+ return "{data[installer][name]}/{data[installer][version]} {json}".format(
+ data=data,
+ json=json.dumps(data, separators=(",", ":"), sort_keys=True),
+ )
+
+
+class MultiDomainBasicAuth(AuthBase):
+
+ def __init__(self, prompting=True):
+ self.prompting = prompting
+ self.passwords = {}
+
+ def __call__(self, req):
+ parsed = urllib_parse.urlparse(req.url)
+
+ # Get the netloc without any embedded credentials
+ netloc = parsed.netloc.rsplit("@", 1)[-1]
+
+ # Set the url of the request to the url without any credentials
+ req.url = urllib_parse.urlunparse(parsed[:1] + (netloc,) + parsed[2:])
+
+ # Use any stored credentials that we have for this netloc
+ username, password = self.passwords.get(netloc, (None, None))
+
+ # Extract credentials embedded in the url if we have none stored
+ if username is None:
+ username, password = self.parse_credentials(parsed.netloc)
+
+ # Get creds from netrc if we still don't have them
+ if username is None and password is None:
+ netrc_auth = get_netrc_auth(req.url)
+ username, password = netrc_auth if netrc_auth else (None, None)
+
+ if username or password:
+ # Store the username and password
+ self.passwords[netloc] = (username, password)
+
+ # Send the basic auth with this request
+ req = HTTPBasicAuth(username or "", password or "")(req)
+
+ # Attach a hook to handle 401 responses
+ req.register_hook("response", self.handle_401)
+
+ return req
+
+ def handle_401(self, resp, **kwargs):
+ # We only care about 401 responses, anything else we want to just
+ # pass through the actual response
+ if resp.status_code != 401:
+ return resp
+
+ # We are not able to prompt the user so simply return the response
+ if not self.prompting:
+ return resp
+
+ parsed = urllib_parse.urlparse(resp.url)
+
+ # Prompt the user for a new username and password
+ username = six.moves.input("User for %s: " % parsed.netloc)
+ password = getpass.getpass("Password: ")
+
+ # Store the new username and password to use for future requests
+ if username or password:
+ self.passwords[parsed.netloc] = (username, password)
+
+ # Consume content and release the original connection to allow our new
+ # request to reuse the same one.
+ resp.content
+ resp.raw.release_conn()
+
+ # Add our new username and password to the request
+ req = HTTPBasicAuth(username or "", password or "")(resp.request)
+
+ # Send our new request
+ new_resp = resp.connection.send(req, **kwargs)
+ new_resp.history.append(resp)
+
+ return new_resp
+
+ def parse_credentials(self, netloc):
+ if "@" in netloc:
+ userinfo = netloc.rsplit("@", 1)[0]
+ if ":" in userinfo:
+ user, pwd = userinfo.split(":", 1)
+ return (urllib_unquote(user), urllib_unquote(pwd))
+ return urllib_unquote(userinfo), None
+ return None, None
+
+
+class LocalFSAdapter(BaseAdapter):
+
+ def send(self, request, stream=None, timeout=None, verify=None, cert=None,
+ proxies=None):
+ pathname = url_to_path(request.url)
+
+ resp = Response()
+ resp.status_code = 200
+ resp.url = request.url
+
+ try:
+ stats = os.stat(pathname)
+ except OSError as exc:
+ resp.status_code = 404
+ resp.raw = exc
+ else:
+ modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
+ content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
+ resp.headers = CaseInsensitiveDict({
+ "Content-Type": content_type,
+ "Content-Length": stats.st_size,
+ "Last-Modified": modified,
+ })
+
+ resp.raw = open(pathname, "rb")
+ resp.close = resp.raw.close
+
+ return resp
+
+ def close(self):
+ pass
+
+
+class SafeFileCache(FileCache):
+ """
+ A file based cache which is safe to use even when the target directory may
+ not be accessible or writable.
+ """
+
+ def __init__(self, *args, **kwargs):
+ super(SafeFileCache, self).__init__(*args, **kwargs)
+
+ # Check to ensure that the directory containing our cache directory
+ # is owned by the user current executing pip. If it does not exist
+ # we will check the parent directory until we find one that does exist.
+ # If it is not owned by the user executing pip then we will disable
+ # the cache and log a warning.
+ if not check_path_owner(self.directory):
+ logger.warning(
+ "The directory '%s' or its parent directory is not owned by "
+ "the current user and the cache has been disabled. Please "
+ "check the permissions and owner of that directory. If "
+ "executing pip with sudo, you may want sudo's -H flag.",
+ self.directory,
+ )
+
+ # Set our directory to None to disable the Cache
+ self.directory = None
+
+ def get(self, *args, **kwargs):
+ # If we don't have a directory, then the cache should be a no-op.
+ if self.directory is None:
+ return
+
+ try:
+ return super(SafeFileCache, self).get(*args, **kwargs)
+ except (LockError, OSError, IOError):
+ # We intentionally silence this error, if we can't access the cache
+ # then we can just skip caching and process the request as if
+ # caching wasn't enabled.
+ pass
+
+ def set(self, *args, **kwargs):
+ # If we don't have a directory, then the cache should be a no-op.
+ if self.directory is None:
+ return
+
+ try:
+ return super(SafeFileCache, self).set(*args, **kwargs)
+ except (LockError, OSError, IOError):
+ # We intentionally silence this error, if we can't access the cache
+ # then we can just skip caching and process the request as if
+ # caching wasn't enabled.
+ pass
+
+ def delete(self, *args, **kwargs):
+ # If we don't have a directory, then the cache should be a no-op.
+ if self.directory is None:
+ return
+
+ try:
+ return super(SafeFileCache, self).delete(*args, **kwargs)
+ except (LockError, OSError, IOError):
+ # We intentionally silence this error, if we can't access the cache
+ # then we can just skip caching and process the request as if
+ # caching wasn't enabled.
+ pass
+
+
+class InsecureHTTPAdapter(HTTPAdapter):
+
+ def cert_verify(self, conn, url, verify, cert):
+ conn.cert_reqs = 'CERT_NONE'
+ conn.ca_certs = None
+
+
+class PipSession(requests.Session):
+
+ timeout = None
+
+ def __init__(self, *args, **kwargs):
+ retries = kwargs.pop("retries", 0)
+ cache = kwargs.pop("cache", None)
+ insecure_hosts = kwargs.pop("insecure_hosts", [])
+
+ super(PipSession, self).__init__(*args, **kwargs)
+
+ # Attach our User Agent to the request
+ self.headers["User-Agent"] = user_agent()
+
+ # Attach our Authentication handler to the session
+ self.auth = MultiDomainBasicAuth()
+
+ # Create our urllib3.Retry instance which will allow us to customize
+ # how we handle retries.
+ retries = urllib3.Retry(
+ # Set the total number of retries that a particular request can
+ # have.
+ total=retries,
+
+ # A 503 error from PyPI typically means that the Fastly -> Origin
+ # connection got interrupted in some way. A 503 error in general
+ # is typically considered a transient error so we'll go ahead and
+ # retry it.
+ # A 500 may indicate transient error in Amazon S3
+ # A 520 or 527 - may indicate transient error in CloudFlare
+ status_forcelist=[500, 503, 520, 527],
+
+ # Add a small amount of back off between failed requests in
+ # order to prevent hammering the service.
+ backoff_factor=0.25,
+ )
+
+ # We want to _only_ cache responses on securely fetched origins. We do
+ # this because we can't validate the response of an insecurely fetched
+ # origin, and we don't want someone to be able to poison the cache and
+ # require manual eviction from the cache to fix it.
+ if cache:
+ secure_adapter = CacheControlAdapter(
+ cache=SafeFileCache(cache, use_dir_lock=True),
+ max_retries=retries,
+ )
+ else:
+ secure_adapter = HTTPAdapter(max_retries=retries)
+
+ # Our Insecure HTTPAdapter disables HTTPS validation. It does not
+ # support caching (see above) so we'll use it for all http:// URLs as
+ # well as any https:// host that we've marked as ignoring TLS errors
+ # for.
+ insecure_adapter = InsecureHTTPAdapter(max_retries=retries)
+
+ self.mount("https://", secure_adapter)
+ self.mount("http://", insecure_adapter)
+
+ # Enable file:// urls
+ self.mount("file://", LocalFSAdapter())
+
+ # We want to use a non-validating adapter for any requests which are
+ # deemed insecure.
+ for host in insecure_hosts:
+ self.mount("https://{}/".format(host), insecure_adapter)
+
+ def request(self, method, url, *args, **kwargs):
+ # Allow setting a default timeout on a session
+ kwargs.setdefault("timeout", self.timeout)
+
+ # Dispatch the actual request
+ return super(PipSession, self).request(method, url, *args, **kwargs)
+
+
+def get_file_content(url, comes_from=None, session=None):
+ """Gets the content of a file; it may be a filename, file: URL, or
+ http: URL. Returns (location, content). Content is unicode.
+
+ :param url: File path or url.
+ :param comes_from: Origin description of requirements.
+ :param session: Instance of pip.download.PipSession.
+ """
+ if session is None:
+ raise TypeError(
+ "get_file_content() missing 1 required keyword argument: 'session'"
+ )
+
+ match = _scheme_re.search(url)
+ if match:
+ scheme = match.group(1).lower()
+ if (scheme == 'file' and comes_from and
+ comes_from.startswith('http')):
+ raise InstallationError(
+ 'Requirements file %s references URL %s, which is local'
+ % (comes_from, url))
+ if scheme == 'file':
+ path = url.split(':', 1)[1]
+ path = path.replace('\\', '/')
+ match = _url_slash_drive_re.match(path)
+ if match:
+ path = match.group(1) + ':' + path.split('|', 1)[1]
+ path = urllib_parse.unquote(path)
+ if path.startswith('/'):
+ path = '/' + path.lstrip('/')
+ url = path
+ else:
+ # FIXME: catch some errors
+ resp = session.get(url)
+ resp.raise_for_status()
+ return resp.url, resp.text
+ try:
+ with open(url, 'rb') as f:
+ content = auto_decode(f.read())
+ except IOError as exc:
+ raise InstallationError(
+ 'Could not open requirements file: %s' % str(exc)
+ )
+ return url, content
+
+
+_scheme_re = re.compile(r'^(http|https|file):', re.I)
+_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
+
+
+def is_url(name):
+ """Returns true if the name looks like a URL"""
+ if ':' not in name:
+ return False
+ scheme = name.split(':', 1)[0].lower()
+ return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
+
+
+def url_to_path(url):
+ """
+ Convert a file: URL to a path.
+ """
+ assert url.startswith('file:'), (
+ "You can only turn file: urls into filenames (not %r)" % url)
+
+ _, netloc, path, _, _ = urllib_parse.urlsplit(url)
+
+ # if we have a UNC path, prepend UNC share notation
+ if netloc:
+ netloc = '\\\\' + netloc
+
+ path = urllib_request.url2pathname(netloc + path)
+ return path
+
+
+def path_to_url(path):
+ """
+ Convert a path to a file: URL. The path will be made absolute and have
+ quoted path parts.
+ """
+ path = os.path.normpath(os.path.abspath(path))
+ url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path))
+ return url
+
+
+def is_archive_file(name):
+ """Return True if `name` is a considered as an archive file."""
+ ext = splitext(name)[1].lower()
+ if ext in ARCHIVE_EXTENSIONS:
+ return True
+ return False
+
+
+def unpack_vcs_link(link, location):
+ vcs_backend = _get_used_vcs_backend(link)
+ vcs_backend.unpack(location)
+
+
+def _get_used_vcs_backend(link):
+ for backend in vcs.backends:
+ if link.scheme in backend.schemes:
+ vcs_backend = backend(link.url)
+ return vcs_backend
+
+
+def is_vcs_url(link):
+ return bool(_get_used_vcs_backend(link))
+
+
+def is_file_url(link):
+ return link.url.lower().startswith('file:')
+
+
+def is_dir_url(link):
+ """Return whether a file:// Link points to a directory.
+
+ ``link`` must not have any other scheme but file://. Call is_file_url()
+ first.
+
+ """
+ link_path = url_to_path(link.url_without_fragment)
+ return os.path.isdir(link_path)
+
+
+def _progress_indicator(iterable, *args, **kwargs):
+ return iterable
+
+
+def _download_url(resp, link, content_file, hashes, progress_bar):
+ try:
+ total_length = int(resp.headers['content-length'])
+ except (ValueError, KeyError, TypeError):
+ total_length = 0
+
+ cached_resp = getattr(resp, "from_cache", False)
+ if logger.getEffectiveLevel() > logging.INFO:
+ show_progress = False
+ elif cached_resp:
+ show_progress = False
+ elif total_length > (40 * 1000):
+ show_progress = True
+ elif not total_length:
+ show_progress = True
+ else:
+ show_progress = False
+
+ show_url = link.show_url
+
+ def resp_read(chunk_size):
+ try:
+ # Special case for urllib3.
+ for chunk in resp.raw.stream(
+ chunk_size,
+ # We use decode_content=False here because we don't
+ # want urllib3 to mess with the raw bytes we get
+ # from the server. If we decompress inside of
+ # urllib3 then we cannot verify the checksum
+ # because the checksum will be of the compressed
+ # file. This breakage will only occur if the
+ # server adds a Content-Encoding header, which
+ # depends on how the server was configured:
+ # - Some servers will notice that the file isn't a
+ # compressible file and will leave the file alone
+ # and with an empty Content-Encoding
+ # - Some servers will notice that the file is
+ # already compressed and will leave the file
+ # alone and will add a Content-Encoding: gzip
+ # header
+ # - Some servers won't notice anything at all and
+ # will take a file that's already been compressed
+ # and compress it again and set the
+ # Content-Encoding: gzip header
+ #
+ # By setting this not to decode automatically we
+ # hope to eliminate problems with the second case.
+ decode_content=False):
+ yield chunk
+ except AttributeError:
+ # Standard file-like object.
+ while True:
+ chunk = resp.raw.read(chunk_size)
+ if not chunk:
+ break
+ yield chunk
+
+ def written_chunks(chunks):
+ for chunk in chunks:
+ content_file.write(chunk)
+ yield chunk
+
+ progress_indicator = _progress_indicator
+
+ if link.netloc == PyPI.netloc:
+ url = show_url
+ else:
+ url = link.url_without_fragment
+
+ if show_progress: # We don't show progress on cached responses
+ progress_indicator = DownloadProgressProvider(progress_bar,
+ max=total_length)
+ if total_length:
+ logger.info("Downloading %s (%s)", url, format_size(total_length))
+ else:
+ logger.info("Downloading %s", url)
+ elif cached_resp:
+ logger.info("Using cached %s", url)
+ else:
+ logger.info("Downloading %s", url)
+
+ logger.debug('Downloading from URL %s', link)
+
+ downloaded_chunks = written_chunks(
+ progress_indicator(
+ resp_read(CONTENT_CHUNK_SIZE),
+ CONTENT_CHUNK_SIZE
+ )
+ )
+ if hashes:
+ hashes.check_against_chunks(downloaded_chunks)
+ else:
+ consume(downloaded_chunks)
+
+
+def _copy_file(filename, location, link):
+ copy = True
+ download_location = os.path.join(location, link.filename)
+ if os.path.exists(download_location):
+ response = ask_path_exists(
+ 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' %
+ display_path(download_location), ('i', 'w', 'b', 'a'))
+ if response == 'i':
+ copy = False
+ elif response == 'w':
+ logger.warning('Deleting %s', display_path(download_location))
+ os.remove(download_location)
+ elif response == 'b':
+ dest_file = backup_dir(download_location)
+ logger.warning(
+ 'Backing up %s to %s',
+ display_path(download_location),
+ display_path(dest_file),
+ )
+ shutil.move(download_location, dest_file)
+ elif response == 'a':
+ sys.exit(-1)
+ if copy:
+ shutil.copy(filename, download_location)
+ logger.info('Saved %s', display_path(download_location))
+
+
+def unpack_http_url(link, location, download_dir=None,
+ session=None, hashes=None, progress_bar="on"):
+ if session is None:
+ raise TypeError(
+ "unpack_http_url() missing 1 required keyword argument: 'session'"
+ )
+
+ with TempDirectory(kind="unpack") as temp_dir:
+ # If a download dir is specified, is the file already downloaded there?
+ already_downloaded_path = None
+ if download_dir:
+ already_downloaded_path = _check_download_dir(link,
+ download_dir,
+ hashes)
+
+ if already_downloaded_path:
+ from_path = already_downloaded_path
+ content_type = mimetypes.guess_type(from_path)[0]
+ else:
+ # let's download to a tmp dir
+ from_path, content_type = _download_http_url(link,
+ session,
+ temp_dir.path,
+ hashes,
+ progress_bar)
+
+ # unpack the archive to the build dir location. even when only
+ # downloading archives, they have to be unpacked to parse dependencies
+ unpack_file(from_path, location, content_type, link)
+
+ # a download dir is specified; let's copy the archive there
+ if download_dir and not already_downloaded_path:
+ _copy_file(from_path, download_dir, link)
+
+ if not already_downloaded_path:
+ os.unlink(from_path)
+
+
+def unpack_file_url(link, location, download_dir=None, hashes=None):
+ """Unpack link into location.
+
+ If download_dir is provided and link points to a file, make a copy
+ of the link file inside download_dir.
+ """
+ link_path = url_to_path(link.url_without_fragment)
+
+ # If it's a url to a local directory
+ if is_dir_url(link):
+ if os.path.isdir(location):
+ rmtree(location)
+ shutil.copytree(link_path, location, symlinks=True)
+ if download_dir:
+ logger.info('Link is a directory, ignoring download_dir')
+ return
+
+ # If --require-hashes is off, `hashes` is either empty, the
+ # link's embedded hash, or MissingHashes; it is required to
+ # match. If --require-hashes is on, we are satisfied by any
+ # hash in `hashes` matching: a URL-based or an option-based
+ # one; no internet-sourced hash will be in `hashes`.
+ if hashes:
+ hashes.check_against_path(link_path)
+
+ # If a download dir is specified, is the file already there and valid?
+ already_downloaded_path = None
+ if download_dir:
+ already_downloaded_path = _check_download_dir(link,
+ download_dir,
+ hashes)
+
+ if already_downloaded_path:
+ from_path = already_downloaded_path
+ else:
+ from_path = link_path
+
+ content_type = mimetypes.guess_type(from_path)[0]
+
+ # unpack the archive to the build dir location. even when only downloading
+ # archives, they have to be unpacked to parse dependencies
+ unpack_file(from_path, location, content_type, link)
+
+ # a download dir is specified and not already downloaded
+ if download_dir and not already_downloaded_path:
+ _copy_file(from_path, download_dir, link)
+
+
+def _copy_dist_from_dir(link_path, location):
+ """Copy distribution files in `link_path` to `location`.
+
+ Invoked when user requests to install a local directory. E.g.:
+
+ pip install .
+ pip install ~/dev/git-repos/python-prompt-toolkit
+
+ """
+
+ # Note: This is currently VERY SLOW if you have a lot of data in the
+ # directory, because it copies everything with `shutil.copytree`.
+ # What it should really do is build an sdist and install that.
+ # See https://github.com/pypa/pip/issues/2195
+
+ if os.path.isdir(location):
+ rmtree(location)
+
+ # build an sdist
+ setup_py = 'setup.py'
+ sdist_args = [sys.executable]
+ sdist_args.append('-c')
+ sdist_args.append(SETUPTOOLS_SHIM % setup_py)
+ sdist_args.append('sdist')
+ sdist_args += ['--dist-dir', location]
+ logger.info('Running setup.py sdist for %s', link_path)
+
+ with indent_log():
+ call_subprocess(sdist_args, cwd=link_path, show_stdout=False)
+
+ # unpack sdist into `location`
+ sdist = os.path.join(location, os.listdir(location)[0])
+ logger.info('Unpacking sdist %s into %s', sdist, location)
+ unpack_file(sdist, location, content_type=None, link=None)
+
+
+class PipXmlrpcTransport(xmlrpc_client.Transport):
+ """Provide a `xmlrpclib.Transport` implementation via a `PipSession`
+ object.
+ """
+
+ def __init__(self, index_url, session, use_datetime=False):
+ xmlrpc_client.Transport.__init__(self, use_datetime)
+ index_parts = urllib_parse.urlparse(index_url)
+ self._scheme = index_parts.scheme
+ self._session = session
+
+ def request(self, host, handler, request_body, verbose=False):
+ parts = (self._scheme, host, handler, None, None, None)
+ url = urllib_parse.urlunparse(parts)
+ try:
+ headers = {'Content-Type': 'text/xml'}
+ response = self._session.post(url, data=request_body,
+ headers=headers, stream=True)
+ response.raise_for_status()
+ self.verbose = verbose
+ return self.parse_response(response.raw)
+ except requests.HTTPError as exc:
+ logger.critical(
+ "HTTP error %s while getting %s",
+ exc.response.status_code, url,
+ )
+ raise
+
+
+def unpack_url(link, location, download_dir=None,
+ only_download=False, session=None, hashes=None,
+ progress_bar="on"):
+ """Unpack link.
+ If link is a VCS link:
+ if only_download, export into download_dir and ignore location
+ else unpack into location
+ for other types of link:
+ - unpack into location
+ - if download_dir, copy the file into download_dir
+ - if only_download, mark location for deletion
+
+ :param hashes: A Hashes object, one of whose embedded hashes must match,
+ or HashMismatch will be raised. If the Hashes is empty, no matches are
+ required, and unhashable types of requirements (like VCS ones, which
+ would ordinarily raise HashUnsupported) are allowed.
+ """
+ # non-editable vcs urls
+ if is_vcs_url(link):
+ unpack_vcs_link(link, location)
+
+ # file urls
+ elif is_file_url(link):
+ unpack_file_url(link, location, download_dir, hashes=hashes)
+
+ # http urls
+ else:
+ if session is None:
+ session = PipSession()
+
+ unpack_http_url(
+ link,
+ location,
+ download_dir,
+ session,
+ hashes=hashes,
+ progress_bar=progress_bar
+ )
+ if only_download:
+ write_delete_marker_file(location)
+
+
+def _download_http_url(link, session, temp_dir, hashes, progress_bar):
+ """Download link url into temp_dir using provided session"""
+ target_url = link.url.split('#', 1)[0]
+ try:
+ resp = session.get(
+ target_url,
+ # We use Accept-Encoding: identity here because requests
+ # defaults to accepting compressed responses. This breaks in
+ # a variety of ways depending on how the server is configured.
+ # - Some servers will notice that the file isn't a compressible
+ # file and will leave the file alone and with an empty
+ # Content-Encoding
+ # - Some servers will notice that the file is already
+ # compressed and will leave the file alone and will add a
+ # Content-Encoding: gzip header
+ # - Some servers won't notice anything at all and will take
+ # a file that's already been compressed and compress it again
+ # and set the Content-Encoding: gzip header
+ # By setting this to request only the identity encoding We're
+ # hoping to eliminate the third case. Hopefully there does not
+ # exist a server which when given a file will notice it is
+ # already compressed and that you're not asking for a
+ # compressed file and will then decompress it before sending
+ # because if that's the case I don't think it'll ever be
+ # possible to make this work.
+ headers={"Accept-Encoding": "identity"},
+ stream=True,
+ )
+ resp.raise_for_status()
+ except requests.HTTPError as exc:
+ logger.critical(
+ "HTTP error %s while getting %s", exc.response.status_code, link,
+ )
+ raise
+
+ content_type = resp.headers.get('content-type', '')
+ filename = link.filename # fallback
+ # Have a look at the Content-Disposition header for a better guess
+ content_disposition = resp.headers.get('content-disposition')
+ if content_disposition:
+ type, params = cgi.parse_header(content_disposition)
+ # We use ``or`` here because we don't want to use an "empty" value
+ # from the filename param.
+ filename = params.get('filename') or filename
+ ext = splitext(filename)[1]
+ if not ext:
+ ext = mimetypes.guess_extension(content_type)
+ if ext:
+ filename += ext
+ if not ext and link.url != resp.url:
+ ext = os.path.splitext(resp.url)[1]
+ if ext:
+ filename += ext
+ file_path = os.path.join(temp_dir, filename)
+ with open(file_path, 'wb') as content_file:
+ _download_url(resp, link, content_file, hashes, progress_bar)
+ return file_path, content_type
+
+
+def _check_download_dir(link, download_dir, hashes):
+ """ Check download_dir for previously downloaded file with correct hash
+ If a correct file is found return its path else None
+ """
+ download_path = os.path.join(download_dir, link.filename)
+ if os.path.exists(download_path):
+ # If already downloaded, does its hash match?
+ logger.info('File was already downloaded %s', download_path)
+ if hashes:
+ try:
+ hashes.check_against_path(download_path)
+ except HashMismatch:
+ logger.warning(
+ 'Previously-downloaded file %s has bad hash. '
+ 'Re-downloading.',
+ download_path
+ )
+ os.unlink(download_path)
+ return None
+ return download_path
+ return None
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/exceptions.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/exceptions.py
index ad6f412..28705c8 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/exceptions.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/exceptions.py
@@ -1,249 +1,249 @@
-"""Exceptions used throughout package"""
-from __future__ import absolute_import
-
-from itertools import chain, groupby, repeat
-
-from pip._vendor.six import iteritems
-
-
-class PipError(Exception):
- """Base pip exception"""
-
-
-class ConfigurationError(PipError):
- """General exception in configuration"""
-
-
-class InstallationError(PipError):
- """General exception during installation"""
-
-
-class UninstallationError(PipError):
- """General exception during uninstallation"""
-
-
-class DistributionNotFound(InstallationError):
- """Raised when a distribution cannot be found to satisfy a requirement"""
-
-
-class RequirementsFileParseError(InstallationError):
- """Raised when a general error occurs parsing a requirements file line."""
-
-
-class BestVersionAlreadyInstalled(PipError):
- """Raised when the most up-to-date version of a package is already
- installed."""
-
-
-class BadCommand(PipError):
- """Raised when virtualenv or a command is not found"""
-
-
-class CommandError(PipError):
- """Raised when there is an error in command-line arguments"""
-
-
-class PreviousBuildDirError(PipError):
- """Raised when there's a previous conflicting build directory"""
-
-
-class InvalidWheelFilename(InstallationError):
- """Invalid wheel filename."""
-
-
-class UnsupportedWheel(InstallationError):
- """Unsupported wheel."""
-
-
-class HashErrors(InstallationError):
- """Multiple HashError instances rolled into one for reporting"""
-
- def __init__(self):
- self.errors = []
-
- def append(self, error):
- self.errors.append(error)
-
- def __str__(self):
- lines = []
- self.errors.sort(key=lambda e: e.order)
- for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
- lines.append(cls.head)
- lines.extend(e.body() for e in errors_of_cls)
- if lines:
- return '\n'.join(lines)
-
- def __nonzero__(self):
- return bool(self.errors)
-
- def __bool__(self):
- return self.__nonzero__()
-
-
-class HashError(InstallationError):
- """
- A failure to verify a package against known-good hashes
-
- :cvar order: An int sorting hash exception classes by difficulty of
- recovery (lower being harder), so the user doesn't bother fretting
- about unpinned packages when he has deeper issues, like VCS
- dependencies, to deal with. Also keeps error reports in a
- deterministic order.
- :cvar head: A section heading for display above potentially many
- exceptions of this kind
- :ivar req: The InstallRequirement that triggered this error. This is
- pasted on after the exception is instantiated, because it's not
- typically available earlier.
-
- """
- req = None
- head = ''
-
- def body(self):
- """Return a summary of me for display under the heading.
-
- This default implementation simply prints a description of the
- triggering requirement.
-
- :param req: The InstallRequirement that provoked this error, with
- populate_link() having already been called
-
- """
- return ' %s' % self._requirement_name()
-
- def __str__(self):
- return '%s\n%s' % (self.head, self.body())
-
- def _requirement_name(self):
- """Return a description of the requirement that triggered me.
-
- This default implementation returns long description of the req, with
- line numbers
-
- """
- return str(self.req) if self.req else 'unknown package'
-
-
-class VcsHashUnsupported(HashError):
- """A hash was provided for a version-control-system-based requirement, but
- we don't have a method for hashing those."""
-
- order = 0
- head = ("Can't verify hashes for these requirements because we don't "
- "have a way to hash version control repositories:")
-
-
-class DirectoryUrlHashUnsupported(HashError):
- """A hash was provided for a version-control-system-based requirement, but
- we don't have a method for hashing those."""
-
- order = 1
- head = ("Can't verify hashes for these file:// requirements because they "
- "point to directories:")
-
-
-class HashMissing(HashError):
- """A hash was needed for a requirement but is absent."""
-
- order = 2
- head = ('Hashes are required in --require-hashes mode, but they are '
- 'missing from some requirements. Here is a list of those '
- 'requirements along with the hashes their downloaded archives '
- 'actually had. Add lines like these to your requirements files to '
- 'prevent tampering. (If you did not enable --require-hashes '
- 'manually, note that it turns on automatically when any package '
- 'has a hash.)')
-
- def __init__(self, gotten_hash):
- """
- :param gotten_hash: The hash of the (possibly malicious) archive we
- just downloaded
- """
- self.gotten_hash = gotten_hash
-
- def body(self):
- # Dodge circular import.
- from pip._internal.utils.hashes import FAVORITE_HASH
-
- package = None
- if self.req:
- # In the case of URL-based requirements, display the original URL
- # seen in the requirements file rather than the package name,
- # so the output can be directly copied into the requirements file.
- package = (self.req.original_link if self.req.original_link
- # In case someone feeds something downright stupid
- # to InstallRequirement's constructor.
- else getattr(self.req, 'req', None))
- return ' %s --hash=%s:%s' % (package or 'unknown package',
- FAVORITE_HASH,
- self.gotten_hash)
-
-
-class HashUnpinned(HashError):
- """A requirement had a hash specified but was not pinned to a specific
- version."""
-
- order = 3
- head = ('In --require-hashes mode, all requirements must have their '
- 'versions pinned with ==. These do not:')
-
-
-class HashMismatch(HashError):
- """
- Distribution file hash values don't match.
-
- :ivar package_name: The name of the package that triggered the hash
- mismatch. Feel free to write to this after the exception is raise to
- improve its error message.
-
- """
- order = 4
- head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS '
- 'FILE. If you have updated the package versions, please update '
- 'the hashes. Otherwise, examine the package contents carefully; '
- 'someone may have tampered with them.')
-
- def __init__(self, allowed, gots):
- """
- :param allowed: A dict of algorithm names pointing to lists of allowed
- hex digests
- :param gots: A dict of algorithm names pointing to hashes we
- actually got from the files under suspicion
- """
- self.allowed = allowed
- self.gots = gots
-
- def body(self):
- return ' %s:\n%s' % (self._requirement_name(),
- self._hash_comparison())
-
- def _hash_comparison(self):
- """
- Return a comparison of actual and expected hash values.
-
- Example::
-
- Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
- or 123451234512345123451234512345123451234512345
- Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
-
- """
- def hash_then_or(hash_name):
- # For now, all the decent hashes have 6-char names, so we can get
- # away with hard-coding space literals.
- return chain([hash_name], repeat(' or'))
-
- lines = []
- for hash_name, expecteds in iteritems(self.allowed):
- prefix = hash_then_or(hash_name)
- lines.extend((' Expected %s %s' % (next(prefix), e))
- for e in expecteds)
- lines.append(' Got %s\n' %
- self.gots[hash_name].hexdigest())
- prefix = ' or'
- return '\n'.join(lines)
-
-
-class UnsupportedPythonVersion(InstallationError):
- """Unsupported python version according to Requires-Python package
- metadata."""
+"""Exceptions used throughout package"""
+from __future__ import absolute_import
+
+from itertools import chain, groupby, repeat
+
+from pip._vendor.six import iteritems
+
+
+class PipError(Exception):
+ """Base pip exception"""
+
+
+class ConfigurationError(PipError):
+ """General exception in configuration"""
+
+
+class InstallationError(PipError):
+ """General exception during installation"""
+
+
+class UninstallationError(PipError):
+ """General exception during uninstallation"""
+
+
+class DistributionNotFound(InstallationError):
+ """Raised when a distribution cannot be found to satisfy a requirement"""
+
+
+class RequirementsFileParseError(InstallationError):
+ """Raised when a general error occurs parsing a requirements file line."""
+
+
+class BestVersionAlreadyInstalled(PipError):
+ """Raised when the most up-to-date version of a package is already
+ installed."""
+
+
+class BadCommand(PipError):
+ """Raised when virtualenv or a command is not found"""
+
+
+class CommandError(PipError):
+ """Raised when there is an error in command-line arguments"""
+
+
+class PreviousBuildDirError(PipError):
+ """Raised when there's a previous conflicting build directory"""
+
+
+class InvalidWheelFilename(InstallationError):
+ """Invalid wheel filename."""
+
+
+class UnsupportedWheel(InstallationError):
+ """Unsupported wheel."""
+
+
+class HashErrors(InstallationError):
+ """Multiple HashError instances rolled into one for reporting"""
+
+ def __init__(self):
+ self.errors = []
+
+ def append(self, error):
+ self.errors.append(error)
+
+ def __str__(self):
+ lines = []
+ self.errors.sort(key=lambda e: e.order)
+ for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
+ lines.append(cls.head)
+ lines.extend(e.body() for e in errors_of_cls)
+ if lines:
+ return '\n'.join(lines)
+
+ def __nonzero__(self):
+ return bool(self.errors)
+
+ def __bool__(self):
+ return self.__nonzero__()
+
+
+class HashError(InstallationError):
+ """
+ A failure to verify a package against known-good hashes
+
+ :cvar order: An int sorting hash exception classes by difficulty of
+ recovery (lower being harder), so the user doesn't bother fretting
+ about unpinned packages when he has deeper issues, like VCS
+ dependencies, to deal with. Also keeps error reports in a
+ deterministic order.
+ :cvar head: A section heading for display above potentially many
+ exceptions of this kind
+ :ivar req: The InstallRequirement that triggered this error. This is
+ pasted on after the exception is instantiated, because it's not
+ typically available earlier.
+
+ """
+ req = None
+ head = ''
+
+ def body(self):
+ """Return a summary of me for display under the heading.
+
+ This default implementation simply prints a description of the
+ triggering requirement.
+
+ :param req: The InstallRequirement that provoked this error, with
+ populate_link() having already been called
+
+ """
+ return ' %s' % self._requirement_name()
+
+ def __str__(self):
+ return '%s\n%s' % (self.head, self.body())
+
+ def _requirement_name(self):
+ """Return a description of the requirement that triggered me.
+
+ This default implementation returns long description of the req, with
+ line numbers
+
+ """
+ return str(self.req) if self.req else 'unknown package'
+
+
+class VcsHashUnsupported(HashError):
+ """A hash was provided for a version-control-system-based requirement, but
+ we don't have a method for hashing those."""
+
+ order = 0
+ head = ("Can't verify hashes for these requirements because we don't "
+ "have a way to hash version control repositories:")
+
+
+class DirectoryUrlHashUnsupported(HashError):
+ """A hash was provided for a version-control-system-based requirement, but
+ we don't have a method for hashing those."""
+
+ order = 1
+ head = ("Can't verify hashes for these file:// requirements because they "
+ "point to directories:")
+
+
+class HashMissing(HashError):
+ """A hash was needed for a requirement but is absent."""
+
+ order = 2
+ head = ('Hashes are required in --require-hashes mode, but they are '
+ 'missing from some requirements. Here is a list of those '
+ 'requirements along with the hashes their downloaded archives '
+ 'actually had. Add lines like these to your requirements files to '
+ 'prevent tampering. (If you did not enable --require-hashes '
+ 'manually, note that it turns on automatically when any package '
+ 'has a hash.)')
+
+ def __init__(self, gotten_hash):
+ """
+ :param gotten_hash: The hash of the (possibly malicious) archive we
+ just downloaded
+ """
+ self.gotten_hash = gotten_hash
+
+ def body(self):
+ # Dodge circular import.
+ from pip._internal.utils.hashes import FAVORITE_HASH
+
+ package = None
+ if self.req:
+ # In the case of URL-based requirements, display the original URL
+ # seen in the requirements file rather than the package name,
+ # so the output can be directly copied into the requirements file.
+ package = (self.req.original_link if self.req.original_link
+ # In case someone feeds something downright stupid
+ # to InstallRequirement's constructor.
+ else getattr(self.req, 'req', None))
+ return ' %s --hash=%s:%s' % (package or 'unknown package',
+ FAVORITE_HASH,
+ self.gotten_hash)
+
+
+class HashUnpinned(HashError):
+ """A requirement had a hash specified but was not pinned to a specific
+ version."""
+
+ order = 3
+ head = ('In --require-hashes mode, all requirements must have their '
+ 'versions pinned with ==. These do not:')
+
+
+class HashMismatch(HashError):
+ """
+ Distribution file hash values don't match.
+
+ :ivar package_name: The name of the package that triggered the hash
+ mismatch. Feel free to write to this after the exception is raise to
+ improve its error message.
+
+ """
+ order = 4
+ head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS '
+ 'FILE. If you have updated the package versions, please update '
+ 'the hashes. Otherwise, examine the package contents carefully; '
+ 'someone may have tampered with them.')
+
+ def __init__(self, allowed, gots):
+ """
+ :param allowed: A dict of algorithm names pointing to lists of allowed
+ hex digests
+ :param gots: A dict of algorithm names pointing to hashes we
+ actually got from the files under suspicion
+ """
+ self.allowed = allowed
+ self.gots = gots
+
+ def body(self):
+ return ' %s:\n%s' % (self._requirement_name(),
+ self._hash_comparison())
+
+ def _hash_comparison(self):
+ """
+ Return a comparison of actual and expected hash values.
+
+ Example::
+
+ Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
+ or 123451234512345123451234512345123451234512345
+ Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
+
+ """
+ def hash_then_or(hash_name):
+ # For now, all the decent hashes have 6-char names, so we can get
+ # away with hard-coding space literals.
+ return chain([hash_name], repeat(' or'))
+
+ lines = []
+ for hash_name, expecteds in iteritems(self.allowed):
+ prefix = hash_then_or(hash_name)
+ lines.extend((' Expected %s %s' % (next(prefix), e))
+ for e in expecteds)
+ lines.append(' Got %s\n' %
+ self.gots[hash_name].hexdigest())
+ prefix = ' or'
+ return '\n'.join(lines)
+
+
+class UnsupportedPythonVersion(InstallationError):
+ """Unsupported python version according to Requires-Python package
+ metadata."""
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/index.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/index.py
index 3c3a92b..15e0bf3 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/index.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/index.py
@@ -1,1117 +1,1117 @@
-"""Routines related to PyPI, indexes"""
-from __future__ import absolute_import
-
-import cgi
-import itertools
-import logging
-import mimetypes
-import os
-import posixpath
-import re
-import sys
-import warnings
-from collections import namedtuple
-
-from pip._vendor import html5lib, requests, six
-from pip._vendor.distlib.compat import unescape
-from pip._vendor.packaging import specifiers
-from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.packaging.version import parse as parse_version
-from pip._vendor.requests.exceptions import SSLError
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-from pip._vendor.six.moves.urllib import request as urllib_request
-
-from pip._internal.compat import ipaddress
-from pip._internal.download import HAS_TLS, is_url, path_to_url, url_to_path
-from pip._internal.exceptions import (
- BestVersionAlreadyInstalled, DistributionNotFound, InvalidWheelFilename,
- UnsupportedWheel,
-)
-from pip._internal.models import PyPI
-from pip._internal.pep425tags import get_supported
-from pip._internal.utils.deprecation import RemovedInPip11Warning
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import (
- ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS, cached_property, normalize_path,
- splitext,
-)
-from pip._internal.utils.packaging import check_requires_python
-from pip._internal.wheel import Wheel, wheel_ext
-
-__all__ = ['FormatControl', 'fmt_ctl_handle_mutual_exclude', 'PackageFinder']
-
-
-SECURE_ORIGINS = [
- # protocol, hostname, port
- # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)
- ("https", "*", "*"),
- ("*", "localhost", "*"),
- ("*", "127.0.0.0/8", "*"),
- ("*", "::1/128", "*"),
- ("file", "*", None),
- # ssh is always secure.
- ("ssh", "*", "*"),
-]
-
-
-logger = logging.getLogger(__name__)
-
-
-class InstallationCandidate(object):
-
- def __init__(self, project, version, location):
- self.project = project
- self.version = parse_version(version)
- self.location = location
- self._key = (self.project, self.version, self.location)
-
- def __repr__(self):
- return "".format(
- self.project, self.version, self.location,
- )
-
- def __hash__(self):
- return hash(self._key)
-
- def __lt__(self, other):
- return self._compare(other, lambda s, o: s < o)
-
- def __le__(self, other):
- return self._compare(other, lambda s, o: s <= o)
-
- def __eq__(self, other):
- return self._compare(other, lambda s, o: s == o)
-
- def __ge__(self, other):
- return self._compare(other, lambda s, o: s >= o)
-
- def __gt__(self, other):
- return self._compare(other, lambda s, o: s > o)
-
- def __ne__(self, other):
- return self._compare(other, lambda s, o: s != o)
-
- def _compare(self, other, method):
- if not isinstance(other, InstallationCandidate):
- return NotImplemented
-
- return method(self._key, other._key)
-
-
-class PackageFinder(object):
- """This finds packages.
-
- This is meant to match easy_install's technique for looking for
- packages, by reading pages and looking for appropriate links.
- """
-
- def __init__(self, find_links, index_urls, allow_all_prereleases=False,
- trusted_hosts=None, process_dependency_links=False,
- session=None, format_control=None, platform=None,
- versions=None, abi=None, implementation=None):
- """Create a PackageFinder.
-
- :param format_control: A FormatControl object or None. Used to control
- the selection of source packages / binary packages when consulting
- the index and links.
- :param platform: A string or None. If None, searches for packages
- that are supported by the current system. Otherwise, will find
- packages that can be built on the platform passed in. These
- packages will only be downloaded for distribution: they will
- not be built locally.
- :param versions: A list of strings or None. This is passed directly
- to pep425tags.py in the get_supported() method.
- :param abi: A string or None. This is passed directly
- to pep425tags.py in the get_supported() method.
- :param implementation: A string or None. This is passed directly
- to pep425tags.py in the get_supported() method.
- """
- if session is None:
- raise TypeError(
- "PackageFinder() missing 1 required keyword argument: "
- "'session'"
- )
-
- # Build find_links. If an argument starts with ~, it may be
- # a local file relative to a home directory. So try normalizing
- # it and if it exists, use the normalized version.
- # This is deliberately conservative - it might be fine just to
- # blindly normalize anything starting with a ~...
- self.find_links = []
- for link in find_links:
- if link.startswith('~'):
- new_link = normalize_path(link)
- if os.path.exists(new_link):
- link = new_link
- self.find_links.append(link)
-
- self.index_urls = index_urls
- self.dependency_links = []
-
- # These are boring links that have already been logged somehow:
- self.logged_links = set()
-
- self.format_control = format_control or FormatControl(set(), set())
-
- # Domains that we won't emit warnings for when not using HTTPS
- self.secure_origins = [
- ("*", host, "*")
- for host in (trusted_hosts if trusted_hosts else [])
- ]
-
- # Do we want to allow _all_ pre-releases?
- self.allow_all_prereleases = allow_all_prereleases
-
- # Do we process dependency links?
- self.process_dependency_links = process_dependency_links
-
- # The Session we'll use to make requests
- self.session = session
-
- # The valid tags to check potential found wheel candidates against
- self.valid_tags = get_supported(
- versions=versions,
- platform=platform,
- abi=abi,
- impl=implementation,
- )
-
- # If we don't have TLS enabled, then WARN if anyplace we're looking
- # relies on TLS.
- if not HAS_TLS:
- for link in itertools.chain(self.index_urls, self.find_links):
- parsed = urllib_parse.urlparse(link)
- if parsed.scheme == "https":
- logger.warning(
- "pip is configured with locations that require "
- "TLS/SSL, however the ssl module in Python is not "
- "available."
- )
- break
-
- def get_formatted_locations(self):
- lines = []
- if self.index_urls and self.index_urls != [PyPI.simple_url]:
- lines.append(
- "Looking in indexes: {}".format(", ".join(self.index_urls))
- )
- if self.find_links:
- lines.append(
- "Looking in links: {}".format(", ".join(self.find_links))
- )
- return "\n".join(lines)
-
- def add_dependency_links(self, links):
- # # FIXME: this shouldn't be global list this, it should only
- # # apply to requirements of the package that specifies the
- # # dependency_links value
- # # FIXME: also, we should track comes_from (i.e., use Link)
- if self.process_dependency_links:
- warnings.warn(
- "Dependency Links processing has been deprecated and will be "
- "removed in a future release.",
- RemovedInPip11Warning,
- )
- self.dependency_links.extend(links)
-
- @staticmethod
- def _sort_locations(locations, expand_dir=False):
- """
- Sort locations into "files" (archives) and "urls", and return
- a pair of lists (files,urls)
- """
- files = []
- urls = []
-
- # puts the url for the given file path into the appropriate list
- def sort_path(path):
- url = path_to_url(path)
- if mimetypes.guess_type(url, strict=False)[0] == 'text/html':
- urls.append(url)
- else:
- files.append(url)
-
- for url in locations:
-
- is_local_path = os.path.exists(url)
- is_file_url = url.startswith('file:')
-
- if is_local_path or is_file_url:
- if is_local_path:
- path = url
- else:
- path = url_to_path(url)
- if os.path.isdir(path):
- if expand_dir:
- path = os.path.realpath(path)
- for item in os.listdir(path):
- sort_path(os.path.join(path, item))
- elif is_file_url:
- urls.append(url)
- elif os.path.isfile(path):
- sort_path(path)
- else:
- logger.warning(
- "Url '%s' is ignored: it is neither a file "
- "nor a directory.", url,
- )
- elif is_url(url):
- # Only add url with clear scheme
- urls.append(url)
- else:
- logger.warning(
- "Url '%s' is ignored. It is either a non-existing "
- "path or lacks a specific scheme.", url,
- )
-
- return files, urls
-
- def _candidate_sort_key(self, candidate):
- """
- Function used to generate link sort key for link tuples.
- The greater the return value, the more preferred it is.
- If not finding wheels, then sorted by version only.
- If finding wheels, then the sort order is by version, then:
- 1. existing installs
- 2. wheels ordered via Wheel.support_index_min(self.valid_tags)
- 3. source archives
- Note: it was considered to embed this logic into the Link
- comparison operators, but then different sdist links
- with the same version, would have to be considered equal
- """
- support_num = len(self.valid_tags)
- build_tag = tuple()
- if candidate.location.is_wheel:
- # can raise InvalidWheelFilename
- wheel = Wheel(candidate.location.filename)
- if not wheel.supported(self.valid_tags):
- raise UnsupportedWheel(
- "%s is not a supported wheel for this platform. It "
- "can't be sorted." % wheel.filename
- )
- pri = -(wheel.support_index_min(self.valid_tags))
- if wheel.build_tag is not None:
- match = re.match(r'^(\d+)(.*)$', wheel.build_tag)
- build_tag_groups = match.groups()
- build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
- else: # sdist
- pri = -(support_num)
- return (candidate.version, build_tag, pri)
-
- def _validate_secure_origin(self, logger, location):
- # Determine if this url used a secure transport mechanism
- parsed = urllib_parse.urlparse(str(location))
- origin = (parsed.scheme, parsed.hostname, parsed.port)
-
- # The protocol to use to see if the protocol matches.
- # Don't count the repository type as part of the protocol: in
- # cases such as "git+ssh", only use "ssh". (I.e., Only verify against
- # the last scheme.)
- protocol = origin[0].rsplit('+', 1)[-1]
-
- # Determine if our origin is a secure origin by looking through our
- # hardcoded list of secure origins, as well as any additional ones
- # configured on this PackageFinder instance.
- for secure_origin in (SECURE_ORIGINS + self.secure_origins):
- if protocol != secure_origin[0] and secure_origin[0] != "*":
- continue
-
- try:
- # We need to do this decode dance to ensure that we have a
- # unicode object, even on Python 2.x.
- addr = ipaddress.ip_address(
- origin[1]
- if (
- isinstance(origin[1], six.text_type) or
- origin[1] is None
- )
- else origin[1].decode("utf8")
- )
- network = ipaddress.ip_network(
- secure_origin[1]
- if isinstance(secure_origin[1], six.text_type)
- else secure_origin[1].decode("utf8")
- )
- except ValueError:
- # We don't have both a valid address or a valid network, so
- # we'll check this origin against hostnames.
- if (origin[1] and
- origin[1].lower() != secure_origin[1].lower() and
- secure_origin[1] != "*"):
- continue
- else:
- # We have a valid address and network, so see if the address
- # is contained within the network.
- if addr not in network:
- continue
-
- # Check to see if the port patches
- if (origin[2] != secure_origin[2] and
- secure_origin[2] != "*" and
- secure_origin[2] is not None):
- continue
-
- # If we've gotten here, then this origin matches the current
- # secure origin and we should return True
- return True
-
- # If we've gotten to this point, then the origin isn't secure and we
- # will not accept it as a valid location to search. We will however
- # log a warning that we are ignoring it.
- logger.warning(
- "The repository located at %s is not a trusted or secure host and "
- "is being ignored. If this repository is available via HTTPS we "
- "recommend you use HTTPS instead, otherwise you may silence "
- "this warning and allow it anyway with '--trusted-host %s'.",
- parsed.hostname,
- parsed.hostname,
- )
-
- return False
-
- def _get_index_urls_locations(self, project_name):
- """Returns the locations found via self.index_urls
-
- Checks the url_name on the main (first in the list) index and
- use this url_name to produce all locations
- """
-
- def mkurl_pypi_url(url):
- loc = posixpath.join(
- url,
- urllib_parse.quote(canonicalize_name(project_name)))
- # For maximum compatibility with easy_install, ensure the path
- # ends in a trailing slash. Although this isn't in the spec
- # (and PyPI can handle it without the slash) some other index
- # implementations might break if they relied on easy_install's
- # behavior.
- if not loc.endswith('/'):
- loc = loc + '/'
- return loc
-
- return [mkurl_pypi_url(url) for url in self.index_urls]
-
- def find_all_candidates(self, project_name):
- """Find all available InstallationCandidate for project_name
-
- This checks index_urls, find_links and dependency_links.
- All versions found are returned as an InstallationCandidate list.
-
- See _link_package_versions for details on which files are accepted
- """
- index_locations = self._get_index_urls_locations(project_name)
- index_file_loc, index_url_loc = self._sort_locations(index_locations)
- fl_file_loc, fl_url_loc = self._sort_locations(
- self.find_links, expand_dir=True,
- )
- dep_file_loc, dep_url_loc = self._sort_locations(self.dependency_links)
-
- file_locations = (Link(url) for url in itertools.chain(
- index_file_loc, fl_file_loc, dep_file_loc,
- ))
-
- # We trust every url that the user has given us whether it was given
- # via --index-url or --find-links
- # We explicitly do not trust links that came from dependency_links
- # We want to filter out any thing which does not have a secure origin.
- url_locations = [
- link for link in itertools.chain(
- (Link(url) for url in index_url_loc),
- (Link(url) for url in fl_url_loc),
- (Link(url) for url in dep_url_loc),
- )
- if self._validate_secure_origin(logger, link)
- ]
-
- logger.debug('%d location(s) to search for versions of %s:',
- len(url_locations), project_name)
-
- for location in url_locations:
- logger.debug('* %s', location)
-
- canonical_name = canonicalize_name(project_name)
- formats = fmt_ctl_formats(self.format_control, canonical_name)
- search = Search(project_name, canonical_name, formats)
- find_links_versions = self._package_versions(
- # We trust every directly linked archive in find_links
- (Link(url, '-f') for url in self.find_links),
- search
- )
-
- page_versions = []
- for page in self._get_pages(url_locations, project_name):
- logger.debug('Analyzing links from page %s', page.url)
- with indent_log():
- page_versions.extend(
- self._package_versions(page.links, search)
- )
-
- dependency_versions = self._package_versions(
- (Link(url) for url in self.dependency_links), search
- )
- if dependency_versions:
- logger.debug(
- 'dependency_links found: %s',
- ', '.join([
- version.location.url for version in dependency_versions
- ])
- )
-
- file_versions = self._package_versions(file_locations, search)
- if file_versions:
- file_versions.sort(reverse=True)
- logger.debug(
- 'Local files found: %s',
- ', '.join([
- url_to_path(candidate.location.url)
- for candidate in file_versions
- ])
- )
-
- # This is an intentional priority ordering
- return (
- file_versions + find_links_versions + page_versions +
- dependency_versions
- )
-
- def find_requirement(self, req, upgrade):
- """Try to find a Link matching req
-
- Expects req, an InstallRequirement and upgrade, a boolean
- Returns a Link if found,
- Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
- """
- all_candidates = self.find_all_candidates(req.name)
-
- # Filter out anything which doesn't match our specifier
- compatible_versions = set(
- req.specifier.filter(
- # We turn the version object into a str here because otherwise
- # when we're debundled but setuptools isn't, Python will see
- # packaging.version.Version and
- # pkg_resources._vendor.packaging.version.Version as different
- # types. This way we'll use a str as a common data interchange
- # format. If we stop using the pkg_resources provided specifier
- # and start using our own, we can drop the cast to str().
- [str(c.version) for c in all_candidates],
- prereleases=(
- self.allow_all_prereleases
- if self.allow_all_prereleases else None
- ),
- )
- )
- applicable_candidates = [
- # Again, converting to str to deal with debundling.
- c for c in all_candidates if str(c.version) in compatible_versions
- ]
-
- if applicable_candidates:
- best_candidate = max(applicable_candidates,
- key=self._candidate_sort_key)
- else:
- best_candidate = None
-
- if req.satisfied_by is not None:
- installed_version = parse_version(req.satisfied_by.version)
- else:
- installed_version = None
-
- if installed_version is None and best_candidate is None:
- logger.critical(
- 'Could not find a version that satisfies the requirement %s '
- '(from versions: %s)',
- req,
- ', '.join(
- sorted(
- {str(c.version) for c in all_candidates},
- key=parse_version,
- )
- )
- )
-
- raise DistributionNotFound(
- 'No matching distribution found for %s' % req
- )
-
- best_installed = False
- if installed_version and (
- best_candidate is None or
- best_candidate.version <= installed_version):
- best_installed = True
-
- if not upgrade and installed_version is not None:
- if best_installed:
- logger.debug(
- 'Existing installed version (%s) is most up-to-date and '
- 'satisfies requirement',
- installed_version,
- )
- else:
- logger.debug(
- 'Existing installed version (%s) satisfies requirement '
- '(most up-to-date version is %s)',
- installed_version,
- best_candidate.version,
- )
- return None
-
- if best_installed:
- # We have an existing version, and its the best version
- logger.debug(
- 'Installed version (%s) is most up-to-date (past versions: '
- '%s)',
- installed_version,
- ', '.join(sorted(compatible_versions, key=parse_version)) or
- "none",
- )
- raise BestVersionAlreadyInstalled
-
- logger.debug(
- 'Using version %s (newest of versions: %s)',
- best_candidate.version,
- ', '.join(sorted(compatible_versions, key=parse_version))
- )
- return best_candidate.location
-
- def _get_pages(self, locations, project_name):
- """
- Yields (page, page_url) from the given locations, skipping
- locations that have errors.
- """
- seen = set()
- for location in locations:
- if location in seen:
- continue
- seen.add(location)
-
- page = self._get_page(location)
- if page is None:
- continue
-
- yield page
-
- _py_version_re = re.compile(r'-py([123]\.?[0-9]?)$')
-
- def _sort_links(self, links):
- """
- Returns elements of links in order, non-egg links first, egg links
- second, while eliminating duplicates
- """
- eggs, no_eggs = [], []
- seen = set()
- for link in links:
- if link not in seen:
- seen.add(link)
- if link.egg_fragment:
- eggs.append(link)
- else:
- no_eggs.append(link)
- return no_eggs + eggs
-
- def _package_versions(self, links, search):
- result = []
- for link in self._sort_links(links):
- v = self._link_package_versions(link, search)
- if v is not None:
- result.append(v)
- return result
-
- def _log_skipped_link(self, link, reason):
- if link not in self.logged_links:
- logger.debug('Skipping link %s; %s', link, reason)
- self.logged_links.add(link)
-
- def _link_package_versions(self, link, search):
- """Return an InstallationCandidate or None"""
- version = None
- if link.egg_fragment:
- egg_info = link.egg_fragment
- ext = link.ext
- else:
- egg_info, ext = link.splitext()
- if not ext:
- self._log_skipped_link(link, 'not a file')
- return
- if ext not in SUPPORTED_EXTENSIONS:
- self._log_skipped_link(
- link, 'unsupported archive format: %s' % ext,
- )
- return
- if "binary" not in search.formats and ext == wheel_ext:
- self._log_skipped_link(
- link, 'No binaries permitted for %s' % search.supplied,
- )
- return
- if "macosx10" in link.path and ext == '.zip':
- self._log_skipped_link(link, 'macosx10 one')
- return
- if ext == wheel_ext:
- try:
- wheel = Wheel(link.filename)
- except InvalidWheelFilename:
- self._log_skipped_link(link, 'invalid wheel filename')
- return
- if canonicalize_name(wheel.name) != search.canonical:
- self._log_skipped_link(
- link, 'wrong project name (not %s)' % search.supplied)
- return
-
- if not wheel.supported(self.valid_tags):
- self._log_skipped_link(
- link, 'it is not compatible with this Python')
- return
-
- version = wheel.version
-
- # This should be up by the search.ok_binary check, but see issue 2700.
- if "source" not in search.formats and ext != wheel_ext:
- self._log_skipped_link(
- link, 'No sources permitted for %s' % search.supplied,
- )
- return
-
- if not version:
- version = egg_info_matches(egg_info, search.supplied, link)
- if version is None:
- self._log_skipped_link(
- link, 'wrong project name (not %s)' % search.supplied)
- return
-
- match = self._py_version_re.search(version)
- if match:
- version = version[:match.start()]
- py_version = match.group(1)
- if py_version != sys.version[:3]:
- self._log_skipped_link(
- link, 'Python version is incorrect')
- return
- try:
- support_this_python = check_requires_python(link.requires_python)
- except specifiers.InvalidSpecifier:
- logger.debug("Package %s has an invalid Requires-Python entry: %s",
- link.filename, link.requires_python)
- support_this_python = True
-
- if not support_this_python:
- logger.debug("The package %s is incompatible with the python"
- "version in use. Acceptable python versions are:%s",
- link, link.requires_python)
- return
- logger.debug('Found link %s, version: %s', link, version)
-
- return InstallationCandidate(search.supplied, version, link)
-
- def _get_page(self, link):
- return HTMLPage.get_page(link, session=self.session)
-
-
-def egg_info_matches(
- egg_info, search_name, link,
- _egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)):
- """Pull the version part out of a string.
-
- :param egg_info: The string to parse. E.g. foo-2.1
- :param search_name: The name of the package this belongs to. None to
- infer the name. Note that this cannot unambiguously parse strings
- like foo-2-2 which might be foo, 2-2 or foo-2, 2.
- :param link: The link the string came from, for logging on failure.
- """
- match = _egg_info_re.search(egg_info)
- if not match:
- logger.debug('Could not parse version from link: %s', link)
- return None
- if search_name is None:
- full_match = match.group(0)
- return full_match[full_match.index('-'):]
- name = match.group(0).lower()
- # To match the "safe" name that pkg_resources creates:
- name = name.replace('_', '-')
- # project name and version must be separated by a dash
- look_for = search_name.lower() + "-"
- if name.startswith(look_for):
- return match.group(0)[len(look_for):]
- else:
- return None
-
-
-class HTMLPage(object):
- """Represents one page, along with its URL"""
-
- def __init__(self, content, url, headers=None):
- # Determine if we have any encoding information in our headers
- encoding = None
- if headers and "Content-Type" in headers:
- content_type, params = cgi.parse_header(headers["Content-Type"])
-
- if "charset" in params:
- encoding = params['charset']
-
- self.content = content
- self.parsed = html5lib.parse(
- self.content,
- transport_encoding=encoding,
- namespaceHTMLElements=False,
- )
- self.url = url
- self.headers = headers
-
- def __str__(self):
- return self.url
-
- @classmethod
- def get_page(cls, link, skip_archives=True, session=None):
- if session is None:
- raise TypeError(
- "get_page() missing 1 required keyword argument: 'session'"
- )
-
- url = link.url
- url = url.split('#', 1)[0]
-
- # Check for VCS schemes that do not support lookup as web pages.
- from pip._internal.vcs import VcsSupport
- for scheme in VcsSupport.schemes:
- if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
- logger.debug('Cannot look at %s URL %s', scheme, link)
- return None
-
- try:
- if skip_archives:
- filename = link.filename
- for bad_ext in ARCHIVE_EXTENSIONS:
- if filename.endswith(bad_ext):
- content_type = cls._get_content_type(
- url, session=session,
- )
- if content_type.lower().startswith('text/html'):
- break
- else:
- logger.debug(
- 'Skipping page %s because of Content-Type: %s',
- link,
- content_type,
- )
- return
-
- logger.debug('Getting page %s', url)
-
- # Tack index.html onto file:// URLs that point to directories
- (scheme, netloc, path, params, query, fragment) = \
- urllib_parse.urlparse(url)
- if (scheme == 'file' and
- os.path.isdir(urllib_request.url2pathname(path))):
- # add trailing slash if not present so urljoin doesn't trim
- # final segment
- if not url.endswith('/'):
- url += '/'
- url = urllib_parse.urljoin(url, 'index.html')
- logger.debug(' file: URL is directory, getting %s', url)
-
- resp = session.get(
- url,
- headers={
- "Accept": "text/html",
- "Cache-Control": "max-age=600",
- },
- )
- resp.raise_for_status()
-
- # The check for archives above only works if the url ends with
- # something that looks like an archive. However that is not a
- # requirement of an url. Unless we issue a HEAD request on every
- # url we cannot know ahead of time for sure if something is HTML
- # or not. However we can check after we've downloaded it.
- content_type = resp.headers.get('Content-Type', 'unknown')
- if not content_type.lower().startswith("text/html"):
- logger.debug(
- 'Skipping page %s because of Content-Type: %s',
- link,
- content_type,
- )
- return
-
- inst = cls(resp.content, resp.url, resp.headers)
- except requests.HTTPError as exc:
- cls._handle_fail(link, exc, url)
- except SSLError as exc:
- reason = "There was a problem confirming the ssl certificate: "
- reason += str(exc)
- cls._handle_fail(link, reason, url, meth=logger.info)
- except requests.ConnectionError as exc:
- cls._handle_fail(link, "connection error: %s" % exc, url)
- except requests.Timeout:
- cls._handle_fail(link, "timed out", url)
- else:
- return inst
-
- @staticmethod
- def _handle_fail(link, reason, url, meth=None):
- if meth is None:
- meth = logger.debug
-
- meth("Could not fetch URL %s: %s - skipping", link, reason)
-
- @staticmethod
- def _get_content_type(url, session):
- """Get the Content-Type of the given url, using a HEAD request"""
- scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url)
- if scheme not in {'http', 'https'}:
- # FIXME: some warning or something?
- # assertion error?
- return ''
-
- resp = session.head(url, allow_redirects=True)
- resp.raise_for_status()
-
- return resp.headers.get("Content-Type", "")
-
- @cached_property
- def base_url(self):
- bases = [
- x for x in self.parsed.findall(".//base")
- if x.get("href") is not None
- ]
- if bases and bases[0].get("href"):
- return bases[0].get("href")
- else:
- return self.url
-
- @property
- def links(self):
- """Yields all links in the page"""
- for anchor in self.parsed.findall(".//a"):
- if anchor.get("href"):
- href = anchor.get("href")
- url = self.clean_link(
- urllib_parse.urljoin(self.base_url, href)
- )
- pyrequire = anchor.get('data-requires-python')
- pyrequire = unescape(pyrequire) if pyrequire else None
- yield Link(url, self, requires_python=pyrequire)
-
- _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
-
- def clean_link(self, url):
- """Makes sure a link is fully encoded. That is, if a ' ' shows up in
- the link, it will be rewritten to %20 (while not over-quoting
- % or other characters)."""
- return self._clean_re.sub(
- lambda match: '%%%2x' % ord(match.group(0)), url)
-
-
-class Link(object):
-
- def __init__(self, url, comes_from=None, requires_python=None):
- """
- Object representing a parsed link from https://pypi.org/simple/*
-
- url:
- url of the resource pointed to (href of the link)
- comes_from:
- instance of HTMLPage where the link was found, or string.
- requires_python:
- String containing the `Requires-Python` metadata field, specified
- in PEP 345. This may be specified by a data-requires-python
- attribute in the HTML link tag, as described in PEP 503.
- """
-
- # url can be a UNC windows share
- if url.startswith('\\\\'):
- url = path_to_url(url)
-
- self.url = url
- self.comes_from = comes_from
- self.requires_python = requires_python if requires_python else None
-
- def __str__(self):
- if self.requires_python:
- rp = ' (requires-python:%s)' % self.requires_python
- else:
- rp = ''
- if self.comes_from:
- return '%s (from %s)%s' % (self.url, self.comes_from, rp)
- else:
- return str(self.url)
-
- def __repr__(self):
- return '' % self
-
- def __eq__(self, other):
- if not isinstance(other, Link):
- return NotImplemented
- return self.url == other.url
-
- def __ne__(self, other):
- if not isinstance(other, Link):
- return NotImplemented
- return self.url != other.url
-
- def __lt__(self, other):
- if not isinstance(other, Link):
- return NotImplemented
- return self.url < other.url
-
- def __le__(self, other):
- if not isinstance(other, Link):
- return NotImplemented
- return self.url <= other.url
-
- def __gt__(self, other):
- if not isinstance(other, Link):
- return NotImplemented
- return self.url > other.url
-
- def __ge__(self, other):
- if not isinstance(other, Link):
- return NotImplemented
- return self.url >= other.url
-
- def __hash__(self):
- return hash(self.url)
-
- @property
- def filename(self):
- _, netloc, path, _, _ = urllib_parse.urlsplit(self.url)
- name = posixpath.basename(path.rstrip('/')) or netloc
- name = urllib_parse.unquote(name)
- assert name, ('URL %r produced no filename' % self.url)
- return name
-
- @property
- def scheme(self):
- return urllib_parse.urlsplit(self.url)[0]
-
- @property
- def netloc(self):
- return urllib_parse.urlsplit(self.url)[1]
-
- @property
- def path(self):
- return urllib_parse.unquote(urllib_parse.urlsplit(self.url)[2])
-
- def splitext(self):
- return splitext(posixpath.basename(self.path.rstrip('/')))
-
- @property
- def ext(self):
- return self.splitext()[1]
-
- @property
- def url_without_fragment(self):
- scheme, netloc, path, query, fragment = urllib_parse.urlsplit(self.url)
- return urllib_parse.urlunsplit((scheme, netloc, path, query, None))
-
- _egg_fragment_re = re.compile(r'[#&]egg=([^&]*)')
-
- @property
- def egg_fragment(self):
- match = self._egg_fragment_re.search(self.url)
- if not match:
- return None
- return match.group(1)
-
- _subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)')
-
- @property
- def subdirectory_fragment(self):
- match = self._subdirectory_fragment_re.search(self.url)
- if not match:
- return None
- return match.group(1)
-
- _hash_re = re.compile(
- r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)'
- )
-
- @property
- def hash(self):
- match = self._hash_re.search(self.url)
- if match:
- return match.group(2)
- return None
-
- @property
- def hash_name(self):
- match = self._hash_re.search(self.url)
- if match:
- return match.group(1)
- return None
-
- @property
- def show_url(self):
- return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0])
-
- @property
- def is_wheel(self):
- return self.ext == wheel_ext
-
- @property
- def is_artifact(self):
- """
- Determines if this points to an actual artifact (e.g. a tarball) or if
- it points to an "abstract" thing like a path or a VCS location.
- """
- from pip._internal.vcs import vcs
-
- if self.scheme in vcs.all_schemes:
- return False
-
- return True
-
-
-FormatControl = namedtuple('FormatControl', 'no_binary only_binary')
-"""This object has two fields, no_binary and only_binary.
-
-If a field is falsy, it isn't set. If it is {':all:'}, it should match all
-packages except those listed in the other field. Only one field can be set
-to {':all:'} at a time. The rest of the time exact package name matches
-are listed, with any given package only showing up in one field at a time.
-"""
-
-
-def fmt_ctl_handle_mutual_exclude(value, target, other):
- new = value.split(',')
- while ':all:' in new:
- other.clear()
- target.clear()
- target.add(':all:')
- del new[:new.index(':all:') + 1]
- if ':none:' not in new:
- # Without a none, we want to discard everything as :all: covers it
- return
- for name in new:
- if name == ':none:':
- target.clear()
- continue
- name = canonicalize_name(name)
- other.discard(name)
- target.add(name)
-
-
-def fmt_ctl_formats(fmt_ctl, canonical_name):
- result = {"binary", "source"}
- if canonical_name in fmt_ctl.only_binary:
- result.discard('source')
- elif canonical_name in fmt_ctl.no_binary:
- result.discard('binary')
- elif ':all:' in fmt_ctl.only_binary:
- result.discard('source')
- elif ':all:' in fmt_ctl.no_binary:
- result.discard('binary')
- return frozenset(result)
-
-
-def fmt_ctl_no_binary(fmt_ctl):
- fmt_ctl_handle_mutual_exclude(
- ':all:', fmt_ctl.no_binary, fmt_ctl.only_binary,
- )
-
-
-Search = namedtuple('Search', 'supplied canonical formats')
-"""Capture key aspects of a search.
-
-:attribute supplied: The user supplied package.
-:attribute canonical: The canonical package name.
-:attribute formats: The formats allowed for this package. Should be a set
- with 'binary' or 'source' or both in it.
-"""
+"""Routines related to PyPI, indexes"""
+from __future__ import absolute_import
+
+import cgi
+import itertools
+import logging
+import mimetypes
+import os
+import posixpath
+import re
+import sys
+import warnings
+from collections import namedtuple
+
+from pip._vendor import html5lib, requests, six
+from pip._vendor.distlib.compat import unescape
+from pip._vendor.packaging import specifiers
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.packaging.version import parse as parse_version
+from pip._vendor.requests.exceptions import SSLError
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+from pip._vendor.six.moves.urllib import request as urllib_request
+
+from pip._internal.compat import ipaddress
+from pip._internal.download import HAS_TLS, is_url, path_to_url, url_to_path
+from pip._internal.exceptions import (
+ BestVersionAlreadyInstalled, DistributionNotFound, InvalidWheelFilename,
+ UnsupportedWheel,
+)
+from pip._internal.models import PyPI
+from pip._internal.pep425tags import get_supported
+from pip._internal.utils.deprecation import RemovedInPip11Warning
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import (
+ ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS, cached_property, normalize_path,
+ splitext,
+)
+from pip._internal.utils.packaging import check_requires_python
+from pip._internal.wheel import Wheel, wheel_ext
+
+__all__ = ['FormatControl', 'fmt_ctl_handle_mutual_exclude', 'PackageFinder']
+
+
+SECURE_ORIGINS = [
+ # protocol, hostname, port
+ # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)
+ ("https", "*", "*"),
+ ("*", "localhost", "*"),
+ ("*", "127.0.0.0/8", "*"),
+ ("*", "::1/128", "*"),
+ ("file", "*", None),
+ # ssh is always secure.
+ ("ssh", "*", "*"),
+]
+
+
+logger = logging.getLogger(__name__)
+
+
+class InstallationCandidate(object):
+
+ def __init__(self, project, version, location):
+ self.project = project
+ self.version = parse_version(version)
+ self.location = location
+ self._key = (self.project, self.version, self.location)
+
+ def __repr__(self):
+ return "".format(
+ self.project, self.version, self.location,
+ )
+
+ def __hash__(self):
+ return hash(self._key)
+
+ def __lt__(self, other):
+ return self._compare(other, lambda s, o: s < o)
+
+ def __le__(self, other):
+ return self._compare(other, lambda s, o: s <= o)
+
+ def __eq__(self, other):
+ return self._compare(other, lambda s, o: s == o)
+
+ def __ge__(self, other):
+ return self._compare(other, lambda s, o: s >= o)
+
+ def __gt__(self, other):
+ return self._compare(other, lambda s, o: s > o)
+
+ def __ne__(self, other):
+ return self._compare(other, lambda s, o: s != o)
+
+ def _compare(self, other, method):
+ if not isinstance(other, InstallationCandidate):
+ return NotImplemented
+
+ return method(self._key, other._key)
+
+
+class PackageFinder(object):
+ """This finds packages.
+
+ This is meant to match easy_install's technique for looking for
+ packages, by reading pages and looking for appropriate links.
+ """
+
+ def __init__(self, find_links, index_urls, allow_all_prereleases=False,
+ trusted_hosts=None, process_dependency_links=False,
+ session=None, format_control=None, platform=None,
+ versions=None, abi=None, implementation=None):
+ """Create a PackageFinder.
+
+ :param format_control: A FormatControl object or None. Used to control
+ the selection of source packages / binary packages when consulting
+ the index and links.
+ :param platform: A string or None. If None, searches for packages
+ that are supported by the current system. Otherwise, will find
+ packages that can be built on the platform passed in. These
+ packages will only be downloaded for distribution: they will
+ not be built locally.
+ :param versions: A list of strings or None. This is passed directly
+ to pep425tags.py in the get_supported() method.
+ :param abi: A string or None. This is passed directly
+ to pep425tags.py in the get_supported() method.
+ :param implementation: A string or None. This is passed directly
+ to pep425tags.py in the get_supported() method.
+ """
+ if session is None:
+ raise TypeError(
+ "PackageFinder() missing 1 required keyword argument: "
+ "'session'"
+ )
+
+ # Build find_links. If an argument starts with ~, it may be
+ # a local file relative to a home directory. So try normalizing
+ # it and if it exists, use the normalized version.
+ # This is deliberately conservative - it might be fine just to
+ # blindly normalize anything starting with a ~...
+ self.find_links = []
+ for link in find_links:
+ if link.startswith('~'):
+ new_link = normalize_path(link)
+ if os.path.exists(new_link):
+ link = new_link
+ self.find_links.append(link)
+
+ self.index_urls = index_urls
+ self.dependency_links = []
+
+ # These are boring links that have already been logged somehow:
+ self.logged_links = set()
+
+ self.format_control = format_control or FormatControl(set(), set())
+
+ # Domains that we won't emit warnings for when not using HTTPS
+ self.secure_origins = [
+ ("*", host, "*")
+ for host in (trusted_hosts if trusted_hosts else [])
+ ]
+
+ # Do we want to allow _all_ pre-releases?
+ self.allow_all_prereleases = allow_all_prereleases
+
+ # Do we process dependency links?
+ self.process_dependency_links = process_dependency_links
+
+ # The Session we'll use to make requests
+ self.session = session
+
+ # The valid tags to check potential found wheel candidates against
+ self.valid_tags = get_supported(
+ versions=versions,
+ platform=platform,
+ abi=abi,
+ impl=implementation,
+ )
+
+ # If we don't have TLS enabled, then WARN if anyplace we're looking
+ # relies on TLS.
+ if not HAS_TLS:
+ for link in itertools.chain(self.index_urls, self.find_links):
+ parsed = urllib_parse.urlparse(link)
+ if parsed.scheme == "https":
+ logger.warning(
+ "pip is configured with locations that require "
+ "TLS/SSL, however the ssl module in Python is not "
+ "available."
+ )
+ break
+
+ def get_formatted_locations(self):
+ lines = []
+ if self.index_urls and self.index_urls != [PyPI.simple_url]:
+ lines.append(
+ "Looking in indexes: {}".format(", ".join(self.index_urls))
+ )
+ if self.find_links:
+ lines.append(
+ "Looking in links: {}".format(", ".join(self.find_links))
+ )
+ return "\n".join(lines)
+
+ def add_dependency_links(self, links):
+ # # FIXME: this shouldn't be global list this, it should only
+ # # apply to requirements of the package that specifies the
+ # # dependency_links value
+ # # FIXME: also, we should track comes_from (i.e., use Link)
+ if self.process_dependency_links:
+ warnings.warn(
+ "Dependency Links processing has been deprecated and will be "
+ "removed in a future release.",
+ RemovedInPip11Warning,
+ )
+ self.dependency_links.extend(links)
+
+ @staticmethod
+ def _sort_locations(locations, expand_dir=False):
+ """
+ Sort locations into "files" (archives) and "urls", and return
+ a pair of lists (files,urls)
+ """
+ files = []
+ urls = []
+
+ # puts the url for the given file path into the appropriate list
+ def sort_path(path):
+ url = path_to_url(path)
+ if mimetypes.guess_type(url, strict=False)[0] == 'text/html':
+ urls.append(url)
+ else:
+ files.append(url)
+
+ for url in locations:
+
+ is_local_path = os.path.exists(url)
+ is_file_url = url.startswith('file:')
+
+ if is_local_path or is_file_url:
+ if is_local_path:
+ path = url
+ else:
+ path = url_to_path(url)
+ if os.path.isdir(path):
+ if expand_dir:
+ path = os.path.realpath(path)
+ for item in os.listdir(path):
+ sort_path(os.path.join(path, item))
+ elif is_file_url:
+ urls.append(url)
+ elif os.path.isfile(path):
+ sort_path(path)
+ else:
+ logger.warning(
+ "Url '%s' is ignored: it is neither a file "
+ "nor a directory.", url,
+ )
+ elif is_url(url):
+ # Only add url with clear scheme
+ urls.append(url)
+ else:
+ logger.warning(
+ "Url '%s' is ignored. It is either a non-existing "
+ "path or lacks a specific scheme.", url,
+ )
+
+ return files, urls
+
+ def _candidate_sort_key(self, candidate):
+ """
+ Function used to generate link sort key for link tuples.
+ The greater the return value, the more preferred it is.
+ If not finding wheels, then sorted by version only.
+ If finding wheels, then the sort order is by version, then:
+ 1. existing installs
+ 2. wheels ordered via Wheel.support_index_min(self.valid_tags)
+ 3. source archives
+ Note: it was considered to embed this logic into the Link
+ comparison operators, but then different sdist links
+ with the same version, would have to be considered equal
+ """
+ support_num = len(self.valid_tags)
+ build_tag = tuple()
+ if candidate.location.is_wheel:
+ # can raise InvalidWheelFilename
+ wheel = Wheel(candidate.location.filename)
+ if not wheel.supported(self.valid_tags):
+ raise UnsupportedWheel(
+ "%s is not a supported wheel for this platform. It "
+ "can't be sorted." % wheel.filename
+ )
+ pri = -(wheel.support_index_min(self.valid_tags))
+ if wheel.build_tag is not None:
+ match = re.match(r'^(\d+)(.*)$', wheel.build_tag)
+ build_tag_groups = match.groups()
+ build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
+ else: # sdist
+ pri = -(support_num)
+ return (candidate.version, build_tag, pri)
+
+ def _validate_secure_origin(self, logger, location):
+ # Determine if this url used a secure transport mechanism
+ parsed = urllib_parse.urlparse(str(location))
+ origin = (parsed.scheme, parsed.hostname, parsed.port)
+
+ # The protocol to use to see if the protocol matches.
+ # Don't count the repository type as part of the protocol: in
+ # cases such as "git+ssh", only use "ssh". (I.e., Only verify against
+ # the last scheme.)
+ protocol = origin[0].rsplit('+', 1)[-1]
+
+ # Determine if our origin is a secure origin by looking through our
+ # hardcoded list of secure origins, as well as any additional ones
+ # configured on this PackageFinder instance.
+ for secure_origin in (SECURE_ORIGINS + self.secure_origins):
+ if protocol != secure_origin[0] and secure_origin[0] != "*":
+ continue
+
+ try:
+ # We need to do this decode dance to ensure that we have a
+ # unicode object, even on Python 2.x.
+ addr = ipaddress.ip_address(
+ origin[1]
+ if (
+ isinstance(origin[1], six.text_type) or
+ origin[1] is None
+ )
+ else origin[1].decode("utf8")
+ )
+ network = ipaddress.ip_network(
+ secure_origin[1]
+ if isinstance(secure_origin[1], six.text_type)
+ else secure_origin[1].decode("utf8")
+ )
+ except ValueError:
+ # We don't have both a valid address or a valid network, so
+ # we'll check this origin against hostnames.
+ if (origin[1] and
+ origin[1].lower() != secure_origin[1].lower() and
+ secure_origin[1] != "*"):
+ continue
+ else:
+ # We have a valid address and network, so see if the address
+ # is contained within the network.
+ if addr not in network:
+ continue
+
+ # Check to see if the port patches
+ if (origin[2] != secure_origin[2] and
+ secure_origin[2] != "*" and
+ secure_origin[2] is not None):
+ continue
+
+ # If we've gotten here, then this origin matches the current
+ # secure origin and we should return True
+ return True
+
+ # If we've gotten to this point, then the origin isn't secure and we
+ # will not accept it as a valid location to search. We will however
+ # log a warning that we are ignoring it.
+ logger.warning(
+ "The repository located at %s is not a trusted or secure host and "
+ "is being ignored. If this repository is available via HTTPS we "
+ "recommend you use HTTPS instead, otherwise you may silence "
+ "this warning and allow it anyway with '--trusted-host %s'.",
+ parsed.hostname,
+ parsed.hostname,
+ )
+
+ return False
+
+ def _get_index_urls_locations(self, project_name):
+ """Returns the locations found via self.index_urls
+
+ Checks the url_name on the main (first in the list) index and
+ use this url_name to produce all locations
+ """
+
+ def mkurl_pypi_url(url):
+ loc = posixpath.join(
+ url,
+ urllib_parse.quote(canonicalize_name(project_name)))
+ # For maximum compatibility with easy_install, ensure the path
+ # ends in a trailing slash. Although this isn't in the spec
+ # (and PyPI can handle it without the slash) some other index
+ # implementations might break if they relied on easy_install's
+ # behavior.
+ if not loc.endswith('/'):
+ loc = loc + '/'
+ return loc
+
+ return [mkurl_pypi_url(url) for url in self.index_urls]
+
+ def find_all_candidates(self, project_name):
+ """Find all available InstallationCandidate for project_name
+
+ This checks index_urls, find_links and dependency_links.
+ All versions found are returned as an InstallationCandidate list.
+
+ See _link_package_versions for details on which files are accepted
+ """
+ index_locations = self._get_index_urls_locations(project_name)
+ index_file_loc, index_url_loc = self._sort_locations(index_locations)
+ fl_file_loc, fl_url_loc = self._sort_locations(
+ self.find_links, expand_dir=True,
+ )
+ dep_file_loc, dep_url_loc = self._sort_locations(self.dependency_links)
+
+ file_locations = (Link(url) for url in itertools.chain(
+ index_file_loc, fl_file_loc, dep_file_loc,
+ ))
+
+ # We trust every url that the user has given us whether it was given
+ # via --index-url or --find-links
+ # We explicitly do not trust links that came from dependency_links
+ # We want to filter out any thing which does not have a secure origin.
+ url_locations = [
+ link for link in itertools.chain(
+ (Link(url) for url in index_url_loc),
+ (Link(url) for url in fl_url_loc),
+ (Link(url) for url in dep_url_loc),
+ )
+ if self._validate_secure_origin(logger, link)
+ ]
+
+ logger.debug('%d location(s) to search for versions of %s:',
+ len(url_locations), project_name)
+
+ for location in url_locations:
+ logger.debug('* %s', location)
+
+ canonical_name = canonicalize_name(project_name)
+ formats = fmt_ctl_formats(self.format_control, canonical_name)
+ search = Search(project_name, canonical_name, formats)
+ find_links_versions = self._package_versions(
+ # We trust every directly linked archive in find_links
+ (Link(url, '-f') for url in self.find_links),
+ search
+ )
+
+ page_versions = []
+ for page in self._get_pages(url_locations, project_name):
+ logger.debug('Analyzing links from page %s', page.url)
+ with indent_log():
+ page_versions.extend(
+ self._package_versions(page.links, search)
+ )
+
+ dependency_versions = self._package_versions(
+ (Link(url) for url in self.dependency_links), search
+ )
+ if dependency_versions:
+ logger.debug(
+ 'dependency_links found: %s',
+ ', '.join([
+ version.location.url for version in dependency_versions
+ ])
+ )
+
+ file_versions = self._package_versions(file_locations, search)
+ if file_versions:
+ file_versions.sort(reverse=True)
+ logger.debug(
+ 'Local files found: %s',
+ ', '.join([
+ url_to_path(candidate.location.url)
+ for candidate in file_versions
+ ])
+ )
+
+ # This is an intentional priority ordering
+ return (
+ file_versions + find_links_versions + page_versions +
+ dependency_versions
+ )
+
+ def find_requirement(self, req, upgrade):
+ """Try to find a Link matching req
+
+ Expects req, an InstallRequirement and upgrade, a boolean
+ Returns a Link if found,
+ Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
+ """
+ all_candidates = self.find_all_candidates(req.name)
+
+ # Filter out anything which doesn't match our specifier
+ compatible_versions = set(
+ req.specifier.filter(
+ # We turn the version object into a str here because otherwise
+ # when we're debundled but setuptools isn't, Python will see
+ # packaging.version.Version and
+ # pkg_resources._vendor.packaging.version.Version as different
+ # types. This way we'll use a str as a common data interchange
+ # format. If we stop using the pkg_resources provided specifier
+ # and start using our own, we can drop the cast to str().
+ [str(c.version) for c in all_candidates],
+ prereleases=(
+ self.allow_all_prereleases
+ if self.allow_all_prereleases else None
+ ),
+ )
+ )
+ applicable_candidates = [
+ # Again, converting to str to deal with debundling.
+ c for c in all_candidates if str(c.version) in compatible_versions
+ ]
+
+ if applicable_candidates:
+ best_candidate = max(applicable_candidates,
+ key=self._candidate_sort_key)
+ else:
+ best_candidate = None
+
+ if req.satisfied_by is not None:
+ installed_version = parse_version(req.satisfied_by.version)
+ else:
+ installed_version = None
+
+ if installed_version is None and best_candidate is None:
+ logger.critical(
+ 'Could not find a version that satisfies the requirement %s '
+ '(from versions: %s)',
+ req,
+ ', '.join(
+ sorted(
+ {str(c.version) for c in all_candidates},
+ key=parse_version,
+ )
+ )
+ )
+
+ raise DistributionNotFound(
+ 'No matching distribution found for %s' % req
+ )
+
+ best_installed = False
+ if installed_version and (
+ best_candidate is None or
+ best_candidate.version <= installed_version):
+ best_installed = True
+
+ if not upgrade and installed_version is not None:
+ if best_installed:
+ logger.debug(
+ 'Existing installed version (%s) is most up-to-date and '
+ 'satisfies requirement',
+ installed_version,
+ )
+ else:
+ logger.debug(
+ 'Existing installed version (%s) satisfies requirement '
+ '(most up-to-date version is %s)',
+ installed_version,
+ best_candidate.version,
+ )
+ return None
+
+ if best_installed:
+ # We have an existing version, and its the best version
+ logger.debug(
+ 'Installed version (%s) is most up-to-date (past versions: '
+ '%s)',
+ installed_version,
+ ', '.join(sorted(compatible_versions, key=parse_version)) or
+ "none",
+ )
+ raise BestVersionAlreadyInstalled
+
+ logger.debug(
+ 'Using version %s (newest of versions: %s)',
+ best_candidate.version,
+ ', '.join(sorted(compatible_versions, key=parse_version))
+ )
+ return best_candidate.location
+
+ def _get_pages(self, locations, project_name):
+ """
+ Yields (page, page_url) from the given locations, skipping
+ locations that have errors.
+ """
+ seen = set()
+ for location in locations:
+ if location in seen:
+ continue
+ seen.add(location)
+
+ page = self._get_page(location)
+ if page is None:
+ continue
+
+ yield page
+
+ _py_version_re = re.compile(r'-py([123]\.?[0-9]?)$')
+
+ def _sort_links(self, links):
+ """
+ Returns elements of links in order, non-egg links first, egg links
+ second, while eliminating duplicates
+ """
+ eggs, no_eggs = [], []
+ seen = set()
+ for link in links:
+ if link not in seen:
+ seen.add(link)
+ if link.egg_fragment:
+ eggs.append(link)
+ else:
+ no_eggs.append(link)
+ return no_eggs + eggs
+
+ def _package_versions(self, links, search):
+ result = []
+ for link in self._sort_links(links):
+ v = self._link_package_versions(link, search)
+ if v is not None:
+ result.append(v)
+ return result
+
+ def _log_skipped_link(self, link, reason):
+ if link not in self.logged_links:
+ logger.debug('Skipping link %s; %s', link, reason)
+ self.logged_links.add(link)
+
+ def _link_package_versions(self, link, search):
+ """Return an InstallationCandidate or None"""
+ version = None
+ if link.egg_fragment:
+ egg_info = link.egg_fragment
+ ext = link.ext
+ else:
+ egg_info, ext = link.splitext()
+ if not ext:
+ self._log_skipped_link(link, 'not a file')
+ return
+ if ext not in SUPPORTED_EXTENSIONS:
+ self._log_skipped_link(
+ link, 'unsupported archive format: %s' % ext,
+ )
+ return
+ if "binary" not in search.formats and ext == wheel_ext:
+ self._log_skipped_link(
+ link, 'No binaries permitted for %s' % search.supplied,
+ )
+ return
+ if "macosx10" in link.path and ext == '.zip':
+ self._log_skipped_link(link, 'macosx10 one')
+ return
+ if ext == wheel_ext:
+ try:
+ wheel = Wheel(link.filename)
+ except InvalidWheelFilename:
+ self._log_skipped_link(link, 'invalid wheel filename')
+ return
+ if canonicalize_name(wheel.name) != search.canonical:
+ self._log_skipped_link(
+ link, 'wrong project name (not %s)' % search.supplied)
+ return
+
+ if not wheel.supported(self.valid_tags):
+ self._log_skipped_link(
+ link, 'it is not compatible with this Python')
+ return
+
+ version = wheel.version
+
+ # This should be up by the search.ok_binary check, but see issue 2700.
+ if "source" not in search.formats and ext != wheel_ext:
+ self._log_skipped_link(
+ link, 'No sources permitted for %s' % search.supplied,
+ )
+ return
+
+ if not version:
+ version = egg_info_matches(egg_info, search.supplied, link)
+ if version is None:
+ self._log_skipped_link(
+ link, 'wrong project name (not %s)' % search.supplied)
+ return
+
+ match = self._py_version_re.search(version)
+ if match:
+ version = version[:match.start()]
+ py_version = match.group(1)
+ if py_version != sys.version[:3]:
+ self._log_skipped_link(
+ link, 'Python version is incorrect')
+ return
+ try:
+ support_this_python = check_requires_python(link.requires_python)
+ except specifiers.InvalidSpecifier:
+ logger.debug("Package %s has an invalid Requires-Python entry: %s",
+ link.filename, link.requires_python)
+ support_this_python = True
+
+ if not support_this_python:
+ logger.debug("The package %s is incompatible with the python"
+ "version in use. Acceptable python versions are:%s",
+ link, link.requires_python)
+ return
+ logger.debug('Found link %s, version: %s', link, version)
+
+ return InstallationCandidate(search.supplied, version, link)
+
+ def _get_page(self, link):
+ return HTMLPage.get_page(link, session=self.session)
+
+
+def egg_info_matches(
+ egg_info, search_name, link,
+ _egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)):
+ """Pull the version part out of a string.
+
+ :param egg_info: The string to parse. E.g. foo-2.1
+ :param search_name: The name of the package this belongs to. None to
+ infer the name. Note that this cannot unambiguously parse strings
+ like foo-2-2 which might be foo, 2-2 or foo-2, 2.
+ :param link: The link the string came from, for logging on failure.
+ """
+ match = _egg_info_re.search(egg_info)
+ if not match:
+ logger.debug('Could not parse version from link: %s', link)
+ return None
+ if search_name is None:
+ full_match = match.group(0)
+ return full_match[full_match.index('-'):]
+ name = match.group(0).lower()
+ # To match the "safe" name that pkg_resources creates:
+ name = name.replace('_', '-')
+ # project name and version must be separated by a dash
+ look_for = search_name.lower() + "-"
+ if name.startswith(look_for):
+ return match.group(0)[len(look_for):]
+ else:
+ return None
+
+
+class HTMLPage(object):
+ """Represents one page, along with its URL"""
+
+ def __init__(self, content, url, headers=None):
+ # Determine if we have any encoding information in our headers
+ encoding = None
+ if headers and "Content-Type" in headers:
+ content_type, params = cgi.parse_header(headers["Content-Type"])
+
+ if "charset" in params:
+ encoding = params['charset']
+
+ self.content = content
+ self.parsed = html5lib.parse(
+ self.content,
+ transport_encoding=encoding,
+ namespaceHTMLElements=False,
+ )
+ self.url = url
+ self.headers = headers
+
+ def __str__(self):
+ return self.url
+
+ @classmethod
+ def get_page(cls, link, skip_archives=True, session=None):
+ if session is None:
+ raise TypeError(
+ "get_page() missing 1 required keyword argument: 'session'"
+ )
+
+ url = link.url
+ url = url.split('#', 1)[0]
+
+ # Check for VCS schemes that do not support lookup as web pages.
+ from pip._internal.vcs import VcsSupport
+ for scheme in VcsSupport.schemes:
+ if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
+ logger.debug('Cannot look at %s URL %s', scheme, link)
+ return None
+
+ try:
+ if skip_archives:
+ filename = link.filename
+ for bad_ext in ARCHIVE_EXTENSIONS:
+ if filename.endswith(bad_ext):
+ content_type = cls._get_content_type(
+ url, session=session,
+ )
+ if content_type.lower().startswith('text/html'):
+ break
+ else:
+ logger.debug(
+ 'Skipping page %s because of Content-Type: %s',
+ link,
+ content_type,
+ )
+ return
+
+ logger.debug('Getting page %s', url)
+
+ # Tack index.html onto file:// URLs that point to directories
+ (scheme, netloc, path, params, query, fragment) = \
+ urllib_parse.urlparse(url)
+ if (scheme == 'file' and
+ os.path.isdir(urllib_request.url2pathname(path))):
+ # add trailing slash if not present so urljoin doesn't trim
+ # final segment
+ if not url.endswith('/'):
+ url += '/'
+ url = urllib_parse.urljoin(url, 'index.html')
+ logger.debug(' file: URL is directory, getting %s', url)
+
+ resp = session.get(
+ url,
+ headers={
+ "Accept": "text/html",
+ "Cache-Control": "max-age=600",
+ },
+ )
+ resp.raise_for_status()
+
+ # The check for archives above only works if the url ends with
+ # something that looks like an archive. However that is not a
+ # requirement of an url. Unless we issue a HEAD request on every
+ # url we cannot know ahead of time for sure if something is HTML
+ # or not. However we can check after we've downloaded it.
+ content_type = resp.headers.get('Content-Type', 'unknown')
+ if not content_type.lower().startswith("text/html"):
+ logger.debug(
+ 'Skipping page %s because of Content-Type: %s',
+ link,
+ content_type,
+ )
+ return
+
+ inst = cls(resp.content, resp.url, resp.headers)
+ except requests.HTTPError as exc:
+ cls._handle_fail(link, exc, url)
+ except SSLError as exc:
+ reason = "There was a problem confirming the ssl certificate: "
+ reason += str(exc)
+ cls._handle_fail(link, reason, url, meth=logger.info)
+ except requests.ConnectionError as exc:
+ cls._handle_fail(link, "connection error: %s" % exc, url)
+ except requests.Timeout:
+ cls._handle_fail(link, "timed out", url)
+ else:
+ return inst
+
+ @staticmethod
+ def _handle_fail(link, reason, url, meth=None):
+ if meth is None:
+ meth = logger.debug
+
+ meth("Could not fetch URL %s: %s - skipping", link, reason)
+
+ @staticmethod
+ def _get_content_type(url, session):
+ """Get the Content-Type of the given url, using a HEAD request"""
+ scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url)
+ if scheme not in {'http', 'https'}:
+ # FIXME: some warning or something?
+ # assertion error?
+ return ''
+
+ resp = session.head(url, allow_redirects=True)
+ resp.raise_for_status()
+
+ return resp.headers.get("Content-Type", "")
+
+ @cached_property
+ def base_url(self):
+ bases = [
+ x for x in self.parsed.findall(".//base")
+ if x.get("href") is not None
+ ]
+ if bases and bases[0].get("href"):
+ return bases[0].get("href")
+ else:
+ return self.url
+
+ @property
+ def links(self):
+ """Yields all links in the page"""
+ for anchor in self.parsed.findall(".//a"):
+ if anchor.get("href"):
+ href = anchor.get("href")
+ url = self.clean_link(
+ urllib_parse.urljoin(self.base_url, href)
+ )
+ pyrequire = anchor.get('data-requires-python')
+ pyrequire = unescape(pyrequire) if pyrequire else None
+ yield Link(url, self, requires_python=pyrequire)
+
+ _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
+
+ def clean_link(self, url):
+ """Makes sure a link is fully encoded. That is, if a ' ' shows up in
+ the link, it will be rewritten to %20 (while not over-quoting
+ % or other characters)."""
+ return self._clean_re.sub(
+ lambda match: '%%%2x' % ord(match.group(0)), url)
+
+
+class Link(object):
+
+ def __init__(self, url, comes_from=None, requires_python=None):
+ """
+ Object representing a parsed link from https://pypi.org/simple/*
+
+ url:
+ url of the resource pointed to (href of the link)
+ comes_from:
+ instance of HTMLPage where the link was found, or string.
+ requires_python:
+ String containing the `Requires-Python` metadata field, specified
+ in PEP 345. This may be specified by a data-requires-python
+ attribute in the HTML link tag, as described in PEP 503.
+ """
+
+ # url can be a UNC windows share
+ if url.startswith('\\\\'):
+ url = path_to_url(url)
+
+ self.url = url
+ self.comes_from = comes_from
+ self.requires_python = requires_python if requires_python else None
+
+ def __str__(self):
+ if self.requires_python:
+ rp = ' (requires-python:%s)' % self.requires_python
+ else:
+ rp = ''
+ if self.comes_from:
+ return '%s (from %s)%s' % (self.url, self.comes_from, rp)
+ else:
+ return str(self.url)
+
+ def __repr__(self):
+ return '' % self
+
+ def __eq__(self, other):
+ if not isinstance(other, Link):
+ return NotImplemented
+ return self.url == other.url
+
+ def __ne__(self, other):
+ if not isinstance(other, Link):
+ return NotImplemented
+ return self.url != other.url
+
+ def __lt__(self, other):
+ if not isinstance(other, Link):
+ return NotImplemented
+ return self.url < other.url
+
+ def __le__(self, other):
+ if not isinstance(other, Link):
+ return NotImplemented
+ return self.url <= other.url
+
+ def __gt__(self, other):
+ if not isinstance(other, Link):
+ return NotImplemented
+ return self.url > other.url
+
+ def __ge__(self, other):
+ if not isinstance(other, Link):
+ return NotImplemented
+ return self.url >= other.url
+
+ def __hash__(self):
+ return hash(self.url)
+
+ @property
+ def filename(self):
+ _, netloc, path, _, _ = urllib_parse.urlsplit(self.url)
+ name = posixpath.basename(path.rstrip('/')) or netloc
+ name = urllib_parse.unquote(name)
+ assert name, ('URL %r produced no filename' % self.url)
+ return name
+
+ @property
+ def scheme(self):
+ return urllib_parse.urlsplit(self.url)[0]
+
+ @property
+ def netloc(self):
+ return urllib_parse.urlsplit(self.url)[1]
+
+ @property
+ def path(self):
+ return urllib_parse.unquote(urllib_parse.urlsplit(self.url)[2])
+
+ def splitext(self):
+ return splitext(posixpath.basename(self.path.rstrip('/')))
+
+ @property
+ def ext(self):
+ return self.splitext()[1]
+
+ @property
+ def url_without_fragment(self):
+ scheme, netloc, path, query, fragment = urllib_parse.urlsplit(self.url)
+ return urllib_parse.urlunsplit((scheme, netloc, path, query, None))
+
+ _egg_fragment_re = re.compile(r'[#&]egg=([^&]*)')
+
+ @property
+ def egg_fragment(self):
+ match = self._egg_fragment_re.search(self.url)
+ if not match:
+ return None
+ return match.group(1)
+
+ _subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)')
+
+ @property
+ def subdirectory_fragment(self):
+ match = self._subdirectory_fragment_re.search(self.url)
+ if not match:
+ return None
+ return match.group(1)
+
+ _hash_re = re.compile(
+ r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)'
+ )
+
+ @property
+ def hash(self):
+ match = self._hash_re.search(self.url)
+ if match:
+ return match.group(2)
+ return None
+
+ @property
+ def hash_name(self):
+ match = self._hash_re.search(self.url)
+ if match:
+ return match.group(1)
+ return None
+
+ @property
+ def show_url(self):
+ return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0])
+
+ @property
+ def is_wheel(self):
+ return self.ext == wheel_ext
+
+ @property
+ def is_artifact(self):
+ """
+ Determines if this points to an actual artifact (e.g. a tarball) or if
+ it points to an "abstract" thing like a path or a VCS location.
+ """
+ from pip._internal.vcs import vcs
+
+ if self.scheme in vcs.all_schemes:
+ return False
+
+ return True
+
+
+FormatControl = namedtuple('FormatControl', 'no_binary only_binary')
+"""This object has two fields, no_binary and only_binary.
+
+If a field is falsy, it isn't set. If it is {':all:'}, it should match all
+packages except those listed in the other field. Only one field can be set
+to {':all:'} at a time. The rest of the time exact package name matches
+are listed, with any given package only showing up in one field at a time.
+"""
+
+
+def fmt_ctl_handle_mutual_exclude(value, target, other):
+ new = value.split(',')
+ while ':all:' in new:
+ other.clear()
+ target.clear()
+ target.add(':all:')
+ del new[:new.index(':all:') + 1]
+ if ':none:' not in new:
+ # Without a none, we want to discard everything as :all: covers it
+ return
+ for name in new:
+ if name == ':none:':
+ target.clear()
+ continue
+ name = canonicalize_name(name)
+ other.discard(name)
+ target.add(name)
+
+
+def fmt_ctl_formats(fmt_ctl, canonical_name):
+ result = {"binary", "source"}
+ if canonical_name in fmt_ctl.only_binary:
+ result.discard('source')
+ elif canonical_name in fmt_ctl.no_binary:
+ result.discard('binary')
+ elif ':all:' in fmt_ctl.only_binary:
+ result.discard('source')
+ elif ':all:' in fmt_ctl.no_binary:
+ result.discard('binary')
+ return frozenset(result)
+
+
+def fmt_ctl_no_binary(fmt_ctl):
+ fmt_ctl_handle_mutual_exclude(
+ ':all:', fmt_ctl.no_binary, fmt_ctl.only_binary,
+ )
+
+
+Search = namedtuple('Search', 'supplied canonical formats')
+"""Capture key aspects of a search.
+
+:attribute supplied: The user supplied package.
+:attribute canonical: The canonical package name.
+:attribute formats: The formats allowed for this package. Should be a set
+ with 'binary' or 'source' or both in it.
+"""
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/locations.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/locations.py
index 5a20c92..ce8f7e9 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/locations.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/locations.py
@@ -1,194 +1,194 @@
-"""Locations where we look for configs, install stuff, etc"""
-from __future__ import absolute_import
-
-import os
-import os.path
-import platform
-import site
-import sys
-import sysconfig
-from distutils import sysconfig as distutils_sysconfig
-from distutils.command.install import SCHEME_KEYS, install # type: ignore
-
-from pip._internal.compat import WINDOWS, expanduser
-from pip._internal.utils import appdirs
-
-# Application Directories
-USER_CACHE_DIR = appdirs.user_cache_dir("pip")
-
-
-DELETE_MARKER_MESSAGE = '''\
-This file is placed here by pip to indicate the source was put
-here by pip.
-
-Once this package is successfully installed this source code will be
-deleted (unless you remove this file).
-'''
-PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt'
-
-
-def write_delete_marker_file(directory):
- """
- Write the pip delete marker file into this directory.
- """
- filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME)
- with open(filepath, 'w') as marker_fp:
- marker_fp.write(DELETE_MARKER_MESSAGE)
-
-
-def running_under_virtualenv():
- """
- Return True if we're running inside a virtualenv, False otherwise.
-
- """
- if hasattr(sys, 'real_prefix'):
- return True
- elif sys.prefix != getattr(sys, "base_prefix", sys.prefix):
- return True
-
- return False
-
-
-def virtualenv_no_global():
- """
- Return True if in a venv and no system site packages.
- """
- # this mirrors the logic in virtualenv.py for locating the
- # no-global-site-packages.txt file
- site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
- no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt')
- if running_under_virtualenv() and os.path.isfile(no_global_file):
- return True
-
-
-if running_under_virtualenv():
- src_prefix = os.path.join(sys.prefix, 'src')
-else:
- # FIXME: keep src in cwd for now (it is not a temporary folder)
- try:
- src_prefix = os.path.join(os.getcwd(), 'src')
- except OSError:
- # In case the current working directory has been renamed or deleted
- sys.exit(
- "The folder you are executing pip from can no longer be found."
- )
-
-# under macOS + virtualenv sys.prefix is not properly resolved
-# it is something like /path/to/python/bin/..
-# Note: using realpath due to tmp dirs on OSX being symlinks
-src_prefix = os.path.abspath(src_prefix)
-
-# FIXME doesn't account for venv linked to global site-packages
-
-site_packages = sysconfig.get_path("purelib")
-# This is because of a bug in PyPy's sysconfig module, see
-# https://bitbucket.org/pypy/pypy/issues/2506/sysconfig-returns-incorrect-paths
-# for more information.
-if platform.python_implementation().lower() == "pypy":
- site_packages = distutils_sysconfig.get_python_lib()
-try:
- # Use getusersitepackages if this is present, as it ensures that the
- # value is initialised properly.
- user_site = site.getusersitepackages()
-except AttributeError:
- user_site = site.USER_SITE
-user_dir = expanduser('~')
-if WINDOWS:
- bin_py = os.path.join(sys.prefix, 'Scripts')
- bin_user = os.path.join(user_site, 'Scripts')
- # buildout uses 'bin' on Windows too?
- if not os.path.exists(bin_py):
- bin_py = os.path.join(sys.prefix, 'bin')
- bin_user = os.path.join(user_site, 'bin')
-
- config_basename = 'pip.ini'
-
- legacy_storage_dir = os.path.join(user_dir, 'pip')
- legacy_config_file = os.path.join(
- legacy_storage_dir,
- config_basename,
- )
-else:
- bin_py = os.path.join(sys.prefix, 'bin')
- bin_user = os.path.join(user_site, 'bin')
-
- config_basename = 'pip.conf'
-
- legacy_storage_dir = os.path.join(user_dir, '.pip')
- legacy_config_file = os.path.join(
- legacy_storage_dir,
- config_basename,
- )
- # Forcing to use /usr/local/bin for standard macOS framework installs
- # Also log to ~/Library/Logs/ for use with the Console.app log viewer
- if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
- bin_py = '/usr/local/bin'
-
-site_config_files = [
- os.path.join(path, config_basename)
- for path in appdirs.site_config_dirs('pip')
-]
-
-venv_config_file = os.path.join(sys.prefix, config_basename)
-new_config_file = os.path.join(appdirs.user_config_dir("pip"), config_basename)
-
-
-def distutils_scheme(dist_name, user=False, home=None, root=None,
- isolated=False, prefix=None):
- """
- Return a distutils install scheme
- """
- from distutils.dist import Distribution
-
- scheme = {}
-
- if isolated:
- extra_dist_args = {"script_args": ["--no-user-cfg"]}
- else:
- extra_dist_args = {}
- dist_args = {'name': dist_name}
- dist_args.update(extra_dist_args)
-
- d = Distribution(dist_args)
- d.parse_config_files()
- i = d.get_command_obj('install', create=True)
- # NOTE: setting user or home has the side-effect of creating the home dir
- # or user base for installations during finalize_options()
- # ideally, we'd prefer a scheme class that has no side-effects.
- assert not (user and prefix), "user={} prefix={}".format(user, prefix)
- i.user = user or i.user
- if user:
- i.prefix = ""
- i.prefix = prefix or i.prefix
- i.home = home or i.home
- i.root = root or i.root
- i.finalize_options()
- for key in SCHEME_KEYS:
- scheme[key] = getattr(i, 'install_' + key)
-
- # install_lib specified in setup.cfg should install *everything*
- # into there (i.e. it takes precedence over both purelib and
- # platlib). Note, i.install_lib is *always* set after
- # finalize_options(); we only want to override here if the user
- # has explicitly requested it hence going back to the config
- if 'install_lib' in d.get_option_dict('install'):
- scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
-
- if running_under_virtualenv():
- scheme['headers'] = os.path.join(
- sys.prefix,
- 'include',
- 'site',
- 'python' + sys.version[:3],
- dist_name,
- )
-
- if root is not None:
- path_no_drive = os.path.splitdrive(
- os.path.abspath(scheme["headers"]))[1]
- scheme["headers"] = os.path.join(
- root,
- path_no_drive[1:],
- )
-
- return scheme
+"""Locations where we look for configs, install stuff, etc"""
+from __future__ import absolute_import
+
+import os
+import os.path
+import platform
+import site
+import sys
+import sysconfig
+from distutils import sysconfig as distutils_sysconfig
+from distutils.command.install import SCHEME_KEYS, install # type: ignore
+
+from pip._internal.compat import WINDOWS, expanduser
+from pip._internal.utils import appdirs
+
+# Application Directories
+USER_CACHE_DIR = appdirs.user_cache_dir("pip")
+
+
+DELETE_MARKER_MESSAGE = '''\
+This file is placed here by pip to indicate the source was put
+here by pip.
+
+Once this package is successfully installed this source code will be
+deleted (unless you remove this file).
+'''
+PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt'
+
+
+def write_delete_marker_file(directory):
+ """
+ Write the pip delete marker file into this directory.
+ """
+ filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME)
+ with open(filepath, 'w') as marker_fp:
+ marker_fp.write(DELETE_MARKER_MESSAGE)
+
+
+def running_under_virtualenv():
+ """
+ Return True if we're running inside a virtualenv, False otherwise.
+
+ """
+ if hasattr(sys, 'real_prefix'):
+ return True
+ elif sys.prefix != getattr(sys, "base_prefix", sys.prefix):
+ return True
+
+ return False
+
+
+def virtualenv_no_global():
+ """
+ Return True if in a venv and no system site packages.
+ """
+ # this mirrors the logic in virtualenv.py for locating the
+ # no-global-site-packages.txt file
+ site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
+ no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt')
+ if running_under_virtualenv() and os.path.isfile(no_global_file):
+ return True
+
+
+if running_under_virtualenv():
+ src_prefix = os.path.join(sys.prefix, 'src')
+else:
+ # FIXME: keep src in cwd for now (it is not a temporary folder)
+ try:
+ src_prefix = os.path.join(os.getcwd(), 'src')
+ except OSError:
+ # In case the current working directory has been renamed or deleted
+ sys.exit(
+ "The folder you are executing pip from can no longer be found."
+ )
+
+# under macOS + virtualenv sys.prefix is not properly resolved
+# it is something like /path/to/python/bin/..
+# Note: using realpath due to tmp dirs on OSX being symlinks
+src_prefix = os.path.abspath(src_prefix)
+
+# FIXME doesn't account for venv linked to global site-packages
+
+site_packages = sysconfig.get_path("purelib")
+# This is because of a bug in PyPy's sysconfig module, see
+# https://bitbucket.org/pypy/pypy/issues/2506/sysconfig-returns-incorrect-paths
+# for more information.
+if platform.python_implementation().lower() == "pypy":
+ site_packages = distutils_sysconfig.get_python_lib()
+try:
+ # Use getusersitepackages if this is present, as it ensures that the
+ # value is initialised properly.
+ user_site = site.getusersitepackages()
+except AttributeError:
+ user_site = site.USER_SITE
+user_dir = expanduser('~')
+if WINDOWS:
+ bin_py = os.path.join(sys.prefix, 'Scripts')
+ bin_user = os.path.join(user_site, 'Scripts')
+ # buildout uses 'bin' on Windows too?
+ if not os.path.exists(bin_py):
+ bin_py = os.path.join(sys.prefix, 'bin')
+ bin_user = os.path.join(user_site, 'bin')
+
+ config_basename = 'pip.ini'
+
+ legacy_storage_dir = os.path.join(user_dir, 'pip')
+ legacy_config_file = os.path.join(
+ legacy_storage_dir,
+ config_basename,
+ )
+else:
+ bin_py = os.path.join(sys.prefix, 'bin')
+ bin_user = os.path.join(user_site, 'bin')
+
+ config_basename = 'pip.conf'
+
+ legacy_storage_dir = os.path.join(user_dir, '.pip')
+ legacy_config_file = os.path.join(
+ legacy_storage_dir,
+ config_basename,
+ )
+ # Forcing to use /usr/local/bin for standard macOS framework installs
+ # Also log to ~/Library/Logs/ for use with the Console.app log viewer
+ if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
+ bin_py = '/usr/local/bin'
+
+site_config_files = [
+ os.path.join(path, config_basename)
+ for path in appdirs.site_config_dirs('pip')
+]
+
+venv_config_file = os.path.join(sys.prefix, config_basename)
+new_config_file = os.path.join(appdirs.user_config_dir("pip"), config_basename)
+
+
+def distutils_scheme(dist_name, user=False, home=None, root=None,
+ isolated=False, prefix=None):
+ """
+ Return a distutils install scheme
+ """
+ from distutils.dist import Distribution
+
+ scheme = {}
+
+ if isolated:
+ extra_dist_args = {"script_args": ["--no-user-cfg"]}
+ else:
+ extra_dist_args = {}
+ dist_args = {'name': dist_name}
+ dist_args.update(extra_dist_args)
+
+ d = Distribution(dist_args)
+ d.parse_config_files()
+ i = d.get_command_obj('install', create=True)
+ # NOTE: setting user or home has the side-effect of creating the home dir
+ # or user base for installations during finalize_options()
+ # ideally, we'd prefer a scheme class that has no side-effects.
+ assert not (user and prefix), "user={} prefix={}".format(user, prefix)
+ i.user = user or i.user
+ if user:
+ i.prefix = ""
+ i.prefix = prefix or i.prefix
+ i.home = home or i.home
+ i.root = root or i.root
+ i.finalize_options()
+ for key in SCHEME_KEYS:
+ scheme[key] = getattr(i, 'install_' + key)
+
+ # install_lib specified in setup.cfg should install *everything*
+ # into there (i.e. it takes precedence over both purelib and
+ # platlib). Note, i.install_lib is *always* set after
+ # finalize_options(); we only want to override here if the user
+ # has explicitly requested it hence going back to the config
+ if 'install_lib' in d.get_option_dict('install'):
+ scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
+
+ if running_under_virtualenv():
+ scheme['headers'] = os.path.join(
+ sys.prefix,
+ 'include',
+ 'site',
+ 'python' + sys.version[:3],
+ dist_name,
+ )
+
+ if root is not None:
+ path_no_drive = os.path.splitdrive(
+ os.path.abspath(scheme["headers"]))[1]
+ scheme["headers"] = os.path.join(
+ root,
+ path_no_drive[1:],
+ )
+
+ return scheme
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/models/__init__.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/models/__init__.py
index 505d92c..2d080a4 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/models/__init__.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/models/__init__.py
@@ -1,4 +1,4 @@
-from pip._internal.models.index import Index, PyPI
-
-
-__all__ = ["Index", "PyPI"]
+from pip._internal.models.index import Index, PyPI
+
+
+__all__ = ["Index", "PyPI"]
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/models/index.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/models/index.py
index a7f10c8..161de50 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/models/index.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/models/index.py
@@ -1,15 +1,15 @@
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-
-
-class Index(object):
- def __init__(self, url):
- self.url = url
- self.netloc = urllib_parse.urlsplit(url).netloc
- self.simple_url = self.url_to_path('simple')
- self.pypi_url = self.url_to_path('pypi')
-
- def url_to_path(self, path):
- return urllib_parse.urljoin(self.url, path)
-
-
-PyPI = Index('https://pypi.org/')
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+
+
+class Index(object):
+ def __init__(self, url):
+ self.url = url
+ self.netloc = urllib_parse.urlsplit(url).netloc
+ self.simple_url = self.url_to_path('simple')
+ self.pypi_url = self.url_to_path('pypi')
+
+ def url_to_path(self, path):
+ return urllib_parse.urljoin(self.url, path)
+
+
+PyPI = Index('https://pypi.org/')
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/operations/check.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/operations/check.py
index b1ad5b6..bab6b9f 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/operations/check.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/operations/check.py
@@ -1,106 +1,106 @@
-"""Validation of dependencies of packages
-"""
-
-from collections import namedtuple
-
-from pip._vendor.packaging.utils import canonicalize_name
-
-from pip._internal.operations.prepare import make_abstract_dist
-
-from pip._internal.utils.misc import get_installed_distributions
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from pip._internal.req.req_install import InstallRequirement
- from typing import Any, Dict, Iterator, Set, Tuple, List
-
- # Shorthands
- PackageSet = Dict[str, 'PackageDetails']
- Missing = Tuple[str, Any]
- Conflicting = Tuple[str, str, Any]
-
- MissingDict = Dict[str, List[Missing]]
- ConflictingDict = Dict[str, List[Conflicting]]
- CheckResult = Tuple[MissingDict, ConflictingDict]
-
-PackageDetails = namedtuple('PackageDetails', ['version', 'requires'])
-
-
-def create_package_set_from_installed(**kwargs):
- # type: (**Any) -> PackageSet
- """Converts a list of distributions into a PackageSet.
- """
- # Default to using all packages installed on the system
- if kwargs == {}:
- kwargs = {"local_only": False, "skip": ()}
- retval = {}
- for dist in get_installed_distributions(**kwargs):
- name = canonicalize_name(dist.project_name)
- retval[name] = PackageDetails(dist.version, dist.requires())
- return retval
-
-
-def check_package_set(package_set):
- # type: (PackageSet) -> CheckResult
- """Check if a package set is consistent
- """
- missing = dict()
- conflicting = dict()
-
- for package_name in package_set:
- # Info about dependencies of package_name
- missing_deps = set() # type: Set[Missing]
- conflicting_deps = set() # type: Set[Conflicting]
-
- for req in package_set[package_name].requires:
- name = canonicalize_name(req.project_name) # type: str
-
- # Check if it's missing
- if name not in package_set:
- missed = True
- if req.marker is not None:
- missed = req.marker.evaluate()
- if missed:
- missing_deps.add((name, req))
- continue
-
- # Check if there's a conflict
- version = package_set[name].version # type: str
- if not req.specifier.contains(version, prereleases=True):
- conflicting_deps.add((name, version, req))
-
- def str_key(x):
- return str(x)
-
- if missing_deps:
- missing[package_name] = sorted(missing_deps, key=str_key)
- if conflicting_deps:
- conflicting[package_name] = sorted(conflicting_deps, key=str_key)
-
- return missing, conflicting
-
-
-def check_install_conflicts(to_install):
- # type: (List[InstallRequirement]) -> Tuple[PackageSet, CheckResult]
- """For checking if the dependency graph would be consistent after \
- installing given requirements
- """
- # Start from the current state
- state = create_package_set_from_installed()
- _simulate_installation_of(to_install, state)
- return state, check_package_set(state)
-
-
-# NOTE from @pradyunsg
-# This required a minor update in dependency link handling logic over at
-# operations.prepare.IsSDist.dist() to get it working
-def _simulate_installation_of(to_install, state):
- # type: (List[InstallRequirement], PackageSet) -> None
- """Computes the version of packages after installing to_install.
- """
-
- # Modify it as installing requirement_set would (assuming no errors)
- for inst_req in to_install:
- dist = make_abstract_dist(inst_req).dist(finder=None)
- name = canonicalize_name(dist.key)
- state[name] = PackageDetails(dist.version, dist.requires())
+"""Validation of dependencies of packages
+"""
+
+from collections import namedtuple
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.operations.prepare import make_abstract_dist
+
+from pip._internal.utils.misc import get_installed_distributions
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from pip._internal.req.req_install import InstallRequirement
+ from typing import Any, Dict, Iterator, Set, Tuple, List
+
+ # Shorthands
+ PackageSet = Dict[str, 'PackageDetails']
+ Missing = Tuple[str, Any]
+ Conflicting = Tuple[str, str, Any]
+
+ MissingDict = Dict[str, List[Missing]]
+ ConflictingDict = Dict[str, List[Conflicting]]
+ CheckResult = Tuple[MissingDict, ConflictingDict]
+
+PackageDetails = namedtuple('PackageDetails', ['version', 'requires'])
+
+
+def create_package_set_from_installed(**kwargs):
+ # type: (**Any) -> PackageSet
+ """Converts a list of distributions into a PackageSet.
+ """
+ # Default to using all packages installed on the system
+ if kwargs == {}:
+ kwargs = {"local_only": False, "skip": ()}
+ retval = {}
+ for dist in get_installed_distributions(**kwargs):
+ name = canonicalize_name(dist.project_name)
+ retval[name] = PackageDetails(dist.version, dist.requires())
+ return retval
+
+
+def check_package_set(package_set):
+ # type: (PackageSet) -> CheckResult
+ """Check if a package set is consistent
+ """
+ missing = dict()
+ conflicting = dict()
+
+ for package_name in package_set:
+ # Info about dependencies of package_name
+ missing_deps = set() # type: Set[Missing]
+ conflicting_deps = set() # type: Set[Conflicting]
+
+ for req in package_set[package_name].requires:
+ name = canonicalize_name(req.project_name) # type: str
+
+ # Check if it's missing
+ if name not in package_set:
+ missed = True
+ if req.marker is not None:
+ missed = req.marker.evaluate()
+ if missed:
+ missing_deps.add((name, req))
+ continue
+
+ # Check if there's a conflict
+ version = package_set[name].version # type: str
+ if not req.specifier.contains(version, prereleases=True):
+ conflicting_deps.add((name, version, req))
+
+ def str_key(x):
+ return str(x)
+
+ if missing_deps:
+ missing[package_name] = sorted(missing_deps, key=str_key)
+ if conflicting_deps:
+ conflicting[package_name] = sorted(conflicting_deps, key=str_key)
+
+ return missing, conflicting
+
+
+def check_install_conflicts(to_install):
+ # type: (List[InstallRequirement]) -> Tuple[PackageSet, CheckResult]
+ """For checking if the dependency graph would be consistent after \
+ installing given requirements
+ """
+ # Start from the current state
+ state = create_package_set_from_installed()
+ _simulate_installation_of(to_install, state)
+ return state, check_package_set(state)
+
+
+# NOTE from @pradyunsg
+# This required a minor update in dependency link handling logic over at
+# operations.prepare.IsSDist.dist() to get it working
+def _simulate_installation_of(to_install, state):
+ # type: (List[InstallRequirement], PackageSet) -> None
+ """Computes the version of packages after installing to_install.
+ """
+
+ # Modify it as installing requirement_set would (assuming no errors)
+ for inst_req in to_install:
+ dist = make_abstract_dist(inst_req).dist(finder=None)
+ name = canonicalize_name(dist.key)
+ state[name] = PackageDetails(dist.version, dist.requires())
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/operations/freeze.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/operations/freeze.py
index b6821c0..000102d 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/operations/freeze.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/operations/freeze.py
@@ -1,252 +1,252 @@
-from __future__ import absolute_import
-
-import collections
-import logging
-import os
-import re
-import warnings
-
-from pip._vendor import pkg_resources, six
-from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.pkg_resources import RequirementParseError
-
-from pip._internal.exceptions import InstallationError
-from pip._internal.req import InstallRequirement
-from pip._internal.req.req_file import COMMENT_RE
-from pip._internal.utils.deprecation import RemovedInPip11Warning
-from pip._internal.utils.misc import (
- dist_is_editable, get_installed_distributions,
-)
-
-logger = logging.getLogger(__name__)
-
-
-def freeze(
- requirement=None,
- find_links=None, local_only=None, user_only=None, skip_regex=None,
- isolated=False,
- wheel_cache=None,
- exclude_editable=False,
- skip=()):
- find_links = find_links or []
- skip_match = None
-
- if skip_regex:
- skip_match = re.compile(skip_regex).search
-
- dependency_links = []
-
- for dist in pkg_resources.working_set:
- if dist.has_metadata('dependency_links.txt'):
- dependency_links.extend(
- dist.get_metadata_lines('dependency_links.txt')
- )
- for link in find_links:
- if '#egg=' in link:
- dependency_links.append(link)
- for link in find_links:
- yield '-f %s' % link
- installations = {}
- for dist in get_installed_distributions(local_only=local_only,
- skip=(),
- user_only=user_only):
- try:
- req = FrozenRequirement.from_dist(
- dist,
- dependency_links
- )
- except RequirementParseError:
- logger.warning(
- "Could not parse requirement: %s",
- dist.project_name
- )
- continue
- if exclude_editable and req.editable:
- continue
- installations[req.name] = req
-
- if requirement:
- # the options that don't get turned into an InstallRequirement
- # should only be emitted once, even if the same option is in multiple
- # requirements files, so we need to keep track of what has been emitted
- # so that we don't emit it again if it's seen again
- emitted_options = set()
- # keep track of which files a requirement is in so that we can
- # give an accurate warning if a requirement appears multiple times.
- req_files = collections.defaultdict(list)
- for req_file_path in requirement:
- with open(req_file_path) as req_file:
- for line in req_file:
- if (not line.strip() or
- line.strip().startswith('#') or
- (skip_match and skip_match(line)) or
- line.startswith((
- '-r', '--requirement',
- '-Z', '--always-unzip',
- '-f', '--find-links',
- '-i', '--index-url',
- '--pre',
- '--trusted-host',
- '--process-dependency-links',
- '--extra-index-url'))):
- line = line.rstrip()
- if line not in emitted_options:
- emitted_options.add(line)
- yield line
- continue
-
- if line.startswith('-e') or line.startswith('--editable'):
- if line.startswith('-e'):
- line = line[2:].strip()
- else:
- line = line[len('--editable'):].strip().lstrip('=')
- line_req = InstallRequirement.from_editable(
- line,
- isolated=isolated,
- wheel_cache=wheel_cache,
- )
- else:
- line_req = InstallRequirement.from_line(
- COMMENT_RE.sub('', line).strip(),
- isolated=isolated,
- wheel_cache=wheel_cache,
- )
-
- if not line_req.name:
- logger.info(
- "Skipping line in requirement file [%s] because "
- "it's not clear what it would install: %s",
- req_file_path, line.strip(),
- )
- logger.info(
- " (add #egg=PackageName to the URL to avoid"
- " this warning)"
- )
- elif line_req.name not in installations:
- # either it's not installed, or it is installed
- # but has been processed already
- if not req_files[line_req.name]:
- logger.warning(
- "Requirement file [%s] contains %s, but that "
- "package is not installed",
- req_file_path,
- COMMENT_RE.sub('', line).strip(),
- )
- else:
- req_files[line_req.name].append(req_file_path)
- else:
- yield str(installations[line_req.name]).rstrip()
- del installations[line_req.name]
- req_files[line_req.name].append(req_file_path)
-
- # Warn about requirements that were included multiple times (in a
- # single requirements file or in different requirements files).
- for name, files in six.iteritems(req_files):
- if len(files) > 1:
- logger.warning("Requirement %s included multiple times [%s]",
- name, ', '.join(sorted(set(files))))
-
- yield(
- '## The following requirements were added by '
- 'pip freeze:'
- )
- for installation in sorted(
- installations.values(), key=lambda x: x.name.lower()):
- if canonicalize_name(installation.name) not in skip:
- yield str(installation).rstrip()
-
-
-class FrozenRequirement(object):
- def __init__(self, name, req, editable, comments=()):
- self.name = name
- self.req = req
- self.editable = editable
- self.comments = comments
-
- _rev_re = re.compile(r'-r(\d+)$')
- _date_re = re.compile(r'-(20\d\d\d\d\d\d)$')
-
- @classmethod
- def from_dist(cls, dist, dependency_links):
- location = os.path.normcase(os.path.abspath(dist.location))
- comments = []
- from pip._internal.vcs import vcs, get_src_requirement
- if dist_is_editable(dist) and vcs.get_backend_name(location):
- editable = True
- try:
- req = get_src_requirement(dist, location)
- except InstallationError as exc:
- logger.warning(
- "Error when trying to get requirement for VCS system %s, "
- "falling back to uneditable format", exc
- )
- req = None
- if req is None:
- logger.warning(
- 'Could not determine repository location of %s', location
- )
- comments.append(
- '## !! Could not determine repository location'
- )
- req = dist.as_requirement()
- editable = False
- else:
- editable = False
- req = dist.as_requirement()
- specs = req.specs
- assert len(specs) == 1 and specs[0][0] in ["==", "==="], \
- 'Expected 1 spec with == or ===; specs = %r; dist = %r' % \
- (specs, dist)
- version = specs[0][1]
- ver_match = cls._rev_re.search(version)
- date_match = cls._date_re.search(version)
- if ver_match or date_match:
- svn_backend = vcs.get_backend('svn')
- if svn_backend:
- svn_location = svn_backend().get_location(
- dist,
- dependency_links,
- )
- if not svn_location:
- logger.warning(
- 'Warning: cannot find svn location for %s', req,
- )
- comments.append(
- '## FIXME: could not find svn URL in dependency_links '
- 'for this package:'
- )
- else:
- warnings.warn(
- "SVN editable detection based on dependency links "
- "will be dropped in the future.",
- RemovedInPip11Warning,
- )
- comments.append(
- '# Installing as editable to satisfy requirement %s:' %
- req
- )
- if ver_match:
- rev = ver_match.group(1)
- else:
- rev = '{%s}' % date_match.group(1)
- editable = True
- req = '%s@%s#egg=%s' % (
- svn_location,
- rev,
- cls.egg_name(dist)
- )
- return cls(dist.project_name, req, editable, comments)
-
- @staticmethod
- def egg_name(dist):
- name = dist.egg_name()
- match = re.search(r'-py\d\.\d$', name)
- if match:
- name = name[:match.start()]
- return name
-
- def __str__(self):
- req = self.req
- if self.editable:
- req = '-e %s' % req
- return '\n'.join(list(self.comments) + [str(req)]) + '\n'
+from __future__ import absolute_import
+
+import collections
+import logging
+import os
+import re
+import warnings
+
+from pip._vendor import pkg_resources, six
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.pkg_resources import RequirementParseError
+
+from pip._internal.exceptions import InstallationError
+from pip._internal.req import InstallRequirement
+from pip._internal.req.req_file import COMMENT_RE
+from pip._internal.utils.deprecation import RemovedInPip11Warning
+from pip._internal.utils.misc import (
+ dist_is_editable, get_installed_distributions,
+)
+
+logger = logging.getLogger(__name__)
+
+
+def freeze(
+ requirement=None,
+ find_links=None, local_only=None, user_only=None, skip_regex=None,
+ isolated=False,
+ wheel_cache=None,
+ exclude_editable=False,
+ skip=()):
+ find_links = find_links or []
+ skip_match = None
+
+ if skip_regex:
+ skip_match = re.compile(skip_regex).search
+
+ dependency_links = []
+
+ for dist in pkg_resources.working_set:
+ if dist.has_metadata('dependency_links.txt'):
+ dependency_links.extend(
+ dist.get_metadata_lines('dependency_links.txt')
+ )
+ for link in find_links:
+ if '#egg=' in link:
+ dependency_links.append(link)
+ for link in find_links:
+ yield '-f %s' % link
+ installations = {}
+ for dist in get_installed_distributions(local_only=local_only,
+ skip=(),
+ user_only=user_only):
+ try:
+ req = FrozenRequirement.from_dist(
+ dist,
+ dependency_links
+ )
+ except RequirementParseError:
+ logger.warning(
+ "Could not parse requirement: %s",
+ dist.project_name
+ )
+ continue
+ if exclude_editable and req.editable:
+ continue
+ installations[req.name] = req
+
+ if requirement:
+ # the options that don't get turned into an InstallRequirement
+ # should only be emitted once, even if the same option is in multiple
+ # requirements files, so we need to keep track of what has been emitted
+ # so that we don't emit it again if it's seen again
+ emitted_options = set()
+ # keep track of which files a requirement is in so that we can
+ # give an accurate warning if a requirement appears multiple times.
+ req_files = collections.defaultdict(list)
+ for req_file_path in requirement:
+ with open(req_file_path) as req_file:
+ for line in req_file:
+ if (not line.strip() or
+ line.strip().startswith('#') or
+ (skip_match and skip_match(line)) or
+ line.startswith((
+ '-r', '--requirement',
+ '-Z', '--always-unzip',
+ '-f', '--find-links',
+ '-i', '--index-url',
+ '--pre',
+ '--trusted-host',
+ '--process-dependency-links',
+ '--extra-index-url'))):
+ line = line.rstrip()
+ if line not in emitted_options:
+ emitted_options.add(line)
+ yield line
+ continue
+
+ if line.startswith('-e') or line.startswith('--editable'):
+ if line.startswith('-e'):
+ line = line[2:].strip()
+ else:
+ line = line[len('--editable'):].strip().lstrip('=')
+ line_req = InstallRequirement.from_editable(
+ line,
+ isolated=isolated,
+ wheel_cache=wheel_cache,
+ )
+ else:
+ line_req = InstallRequirement.from_line(
+ COMMENT_RE.sub('', line).strip(),
+ isolated=isolated,
+ wheel_cache=wheel_cache,
+ )
+
+ if not line_req.name:
+ logger.info(
+ "Skipping line in requirement file [%s] because "
+ "it's not clear what it would install: %s",
+ req_file_path, line.strip(),
+ )
+ logger.info(
+ " (add #egg=PackageName to the URL to avoid"
+ " this warning)"
+ )
+ elif line_req.name not in installations:
+ # either it's not installed, or it is installed
+ # but has been processed already
+ if not req_files[line_req.name]:
+ logger.warning(
+ "Requirement file [%s] contains %s, but that "
+ "package is not installed",
+ req_file_path,
+ COMMENT_RE.sub('', line).strip(),
+ )
+ else:
+ req_files[line_req.name].append(req_file_path)
+ else:
+ yield str(installations[line_req.name]).rstrip()
+ del installations[line_req.name]
+ req_files[line_req.name].append(req_file_path)
+
+ # Warn about requirements that were included multiple times (in a
+ # single requirements file or in different requirements files).
+ for name, files in six.iteritems(req_files):
+ if len(files) > 1:
+ logger.warning("Requirement %s included multiple times [%s]",
+ name, ', '.join(sorted(set(files))))
+
+ yield(
+ '## The following requirements were added by '
+ 'pip freeze:'
+ )
+ for installation in sorted(
+ installations.values(), key=lambda x: x.name.lower()):
+ if canonicalize_name(installation.name) not in skip:
+ yield str(installation).rstrip()
+
+
+class FrozenRequirement(object):
+ def __init__(self, name, req, editable, comments=()):
+ self.name = name
+ self.req = req
+ self.editable = editable
+ self.comments = comments
+
+ _rev_re = re.compile(r'-r(\d+)$')
+ _date_re = re.compile(r'-(20\d\d\d\d\d\d)$')
+
+ @classmethod
+ def from_dist(cls, dist, dependency_links):
+ location = os.path.normcase(os.path.abspath(dist.location))
+ comments = []
+ from pip._internal.vcs import vcs, get_src_requirement
+ if dist_is_editable(dist) and vcs.get_backend_name(location):
+ editable = True
+ try:
+ req = get_src_requirement(dist, location)
+ except InstallationError as exc:
+ logger.warning(
+ "Error when trying to get requirement for VCS system %s, "
+ "falling back to uneditable format", exc
+ )
+ req = None
+ if req is None:
+ logger.warning(
+ 'Could not determine repository location of %s', location
+ )
+ comments.append(
+ '## !! Could not determine repository location'
+ )
+ req = dist.as_requirement()
+ editable = False
+ else:
+ editable = False
+ req = dist.as_requirement()
+ specs = req.specs
+ assert len(specs) == 1 and specs[0][0] in ["==", "==="], \
+ 'Expected 1 spec with == or ===; specs = %r; dist = %r' % \
+ (specs, dist)
+ version = specs[0][1]
+ ver_match = cls._rev_re.search(version)
+ date_match = cls._date_re.search(version)
+ if ver_match or date_match:
+ svn_backend = vcs.get_backend('svn')
+ if svn_backend:
+ svn_location = svn_backend().get_location(
+ dist,
+ dependency_links,
+ )
+ if not svn_location:
+ logger.warning(
+ 'Warning: cannot find svn location for %s', req,
+ )
+ comments.append(
+ '## FIXME: could not find svn URL in dependency_links '
+ 'for this package:'
+ )
+ else:
+ warnings.warn(
+ "SVN editable detection based on dependency links "
+ "will be dropped in the future.",
+ RemovedInPip11Warning,
+ )
+ comments.append(
+ '# Installing as editable to satisfy requirement %s:' %
+ req
+ )
+ if ver_match:
+ rev = ver_match.group(1)
+ else:
+ rev = '{%s}' % date_match.group(1)
+ editable = True
+ req = '%s@%s#egg=%s' % (
+ svn_location,
+ rev,
+ cls.egg_name(dist)
+ )
+ return cls(dist.project_name, req, editable, comments)
+
+ @staticmethod
+ def egg_name(dist):
+ name = dist.egg_name()
+ match = re.search(r'-py\d\.\d$', name)
+ if match:
+ name = name[:match.start()]
+ return name
+
+ def __str__(self):
+ req = self.req
+ if self.editable:
+ req = '-e %s' % req
+ return '\n'.join(list(self.comments) + [str(req)]) + '\n'
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/operations/prepare.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/operations/prepare.py
index 27e3a5d..c1e8158 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/operations/prepare.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/operations/prepare.py
@@ -1,380 +1,380 @@
-"""Prepares a distribution for installation
-"""
-
-import itertools
-import logging
-import os
-import sys
-from copy import copy
-
-from pip._vendor import pkg_resources, requests
-
-from pip._internal.build_env import NoOpBuildEnvironment
-from pip._internal.compat import expanduser
-from pip._internal.download import (
- is_dir_url, is_file_url, is_vcs_url, unpack_url, url_to_path,
-)
-from pip._internal.exceptions import (
- DirectoryUrlHashUnsupported, HashUnpinned, InstallationError,
- PreviousBuildDirError, VcsHashUnsupported,
-)
-from pip._internal.index import FormatControl
-from pip._internal.req.req_install import InstallRequirement
-from pip._internal.utils.hashes import MissingHashes
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import (
- call_subprocess, display_path, normalize_path,
-)
-from pip._internal.utils.ui import open_spinner
-from pip._internal.vcs import vcs
-
-logger = logging.getLogger(__name__)
-
-
-def make_abstract_dist(req):
- """Factory to make an abstract dist object.
-
- Preconditions: Either an editable req with a source_dir, or satisfied_by or
- a wheel link, or a non-editable req with a source_dir.
-
- :return: A concrete DistAbstraction.
- """
- if req.editable:
- return IsSDist(req)
- elif req.link and req.link.is_wheel:
- return IsWheel(req)
- else:
- return IsSDist(req)
-
-
-def _install_build_reqs(finder, prefix, build_requirements):
- # NOTE: What follows is not a very good thing.
- # Eventually, this should move into the BuildEnvironment class and
- # that should handle all the isolation and sub-process invocation.
- finder = copy(finder)
- finder.format_control = FormatControl(set(), set([":all:"]))
- urls = [
- finder.find_requirement(
- InstallRequirement.from_line(r), upgrade=False).url
- for r in build_requirements
- ]
- args = [
- sys.executable, '-m', 'pip', 'install', '--ignore-installed',
- '--no-user', '--prefix', prefix,
- ] + list(urls)
-
- with open_spinner("Installing build dependencies") as spinner:
- call_subprocess(args, show_stdout=False, spinner=spinner)
-
-
-class DistAbstraction(object):
- """Abstracts out the wheel vs non-wheel Resolver.resolve() logic.
-
- The requirements for anything installable are as follows:
- - we must be able to determine the requirement name
- (or we can't correctly handle the non-upgrade case).
- - we must be able to generate a list of run-time dependencies
- without installing any additional packages (or we would
- have to either burn time by doing temporary isolated installs
- or alternatively violate pips 'don't start installing unless
- all requirements are available' rule - neither of which are
- desirable).
- - for packages with setup requirements, we must also be able
- to determine their requirements without installing additional
- packages (for the same reason as run-time dependencies)
- - we must be able to create a Distribution object exposing the
- above metadata.
- """
-
- def __init__(self, req):
- self.req = req
-
- def dist(self, finder):
- """Return a setuptools Dist object."""
- raise NotImplementedError(self.dist)
-
- def prep_for_dist(self, finder):
- """Ensure that we can get a Dist for this requirement."""
- raise NotImplementedError(self.dist)
-
-
-class IsWheel(DistAbstraction):
-
- def dist(self, finder):
- return list(pkg_resources.find_distributions(
- self.req.source_dir))[0]
-
- def prep_for_dist(self, finder, build_isolation):
- # FIXME:https://github.com/pypa/pip/issues/1112
- pass
-
-
-class IsSDist(DistAbstraction):
-
- def dist(self, finder):
- dist = self.req.get_dist()
- # FIXME: shouldn't be globally added.
- if finder and dist.has_metadata('dependency_links.txt'):
- finder.add_dependency_links(
- dist.get_metadata_lines('dependency_links.txt')
- )
- return dist
-
- def prep_for_dist(self, finder, build_isolation):
- # Before calling "setup.py egg_info", we need to set-up the build
- # environment.
- build_requirements, isolate = self.req.get_pep_518_info()
- should_isolate = build_isolation and isolate
-
- minimum_requirements = ('setuptools', 'wheel')
- missing_requirements = set(minimum_requirements) - set(
- pkg_resources.Requirement(r).key
- for r in build_requirements
- )
- if missing_requirements:
- def format_reqs(rs):
- return ' and '.join(map(repr, sorted(rs)))
- logger.warning(
- "Missing build time requirements in pyproject.toml for %s: "
- "%s.", self.req, format_reqs(missing_requirements)
- )
- logger.warning(
- "This version of pip does not implement PEP 517 so it cannot "
- "build a wheel without %s.", format_reqs(minimum_requirements)
- )
-
- if should_isolate:
- with self.req.build_env:
- pass
- _install_build_reqs(finder, self.req.build_env.path,
- build_requirements)
- else:
- self.req.build_env = NoOpBuildEnvironment(no_clean=False)
-
- self.req.run_egg_info()
- self.req.assert_source_matches_version()
-
-
-class Installed(DistAbstraction):
-
- def dist(self, finder):
- return self.req.satisfied_by
-
- def prep_for_dist(self, finder):
- pass
-
-
-class RequirementPreparer(object):
- """Prepares a Requirement
- """
-
- def __init__(self, build_dir, download_dir, src_dir, wheel_download_dir,
- progress_bar, build_isolation):
- super(RequirementPreparer, self).__init__()
-
- self.src_dir = src_dir
- self.build_dir = build_dir
-
- # Where still packed archives should be written to. If None, they are
- # not saved, and are deleted immediately after unpacking.
- self.download_dir = download_dir
-
- # Where still-packed .whl files should be written to. If None, they are
- # written to the download_dir parameter. Separate to download_dir to
- # permit only keeping wheel archives for pip wheel.
- if wheel_download_dir:
- wheel_download_dir = normalize_path(wheel_download_dir)
- self.wheel_download_dir = wheel_download_dir
-
- # NOTE
- # download_dir and wheel_download_dir overlap semantically and may
- # be combined if we're willing to have non-wheel archives present in
- # the wheelhouse output by 'pip wheel'.
-
- self.progress_bar = progress_bar
-
- # Is build isolation allowed?
- self.build_isolation = build_isolation
-
- @property
- def _download_should_save(self):
- # TODO: Modify to reduce indentation needed
- if self.download_dir:
- self.download_dir = expanduser(self.download_dir)
- if os.path.exists(self.download_dir):
- return True
- else:
- logger.critical('Could not find download directory')
- raise InstallationError(
- "Could not find or access download directory '%s'"
- % display_path(self.download_dir))
- return False
-
- def prepare_linked_requirement(self, req, session, finder,
- upgrade_allowed, require_hashes):
- """Prepare a requirement that would be obtained from req.link
- """
- # TODO: Breakup into smaller functions
- if req.link and req.link.scheme == 'file':
- path = url_to_path(req.link.url)
- logger.info('Processing %s', display_path(path))
- else:
- logger.info('Collecting %s', req)
-
- with indent_log():
- # @@ if filesystem packages are not marked
- # editable in a req, a non deterministic error
- # occurs when the script attempts to unpack the
- # build directory
- req.ensure_has_source_dir(self.build_dir)
- # If a checkout exists, it's unwise to keep going. version
- # inconsistencies are logged later, but do not fail the
- # installation.
- # FIXME: this won't upgrade when there's an existing
- # package unpacked in `req.source_dir`
- # package unpacked in `req.source_dir`
- if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
- raise PreviousBuildDirError(
- "pip can't proceed with requirements '%s' due to a"
- " pre-existing build directory (%s). This is "
- "likely due to a previous installation that failed"
- ". pip is being responsible and not assuming it "
- "can delete this. Please delete it and try again."
- % (req, req.source_dir)
- )
- req.populate_link(finder, upgrade_allowed, require_hashes)
-
- # We can't hit this spot and have populate_link return None.
- # req.satisfied_by is None here (because we're
- # guarded) and upgrade has no impact except when satisfied_by
- # is not None.
- # Then inside find_requirement existing_applicable -> False
- # If no new versions are found, DistributionNotFound is raised,
- # otherwise a result is guaranteed.
- assert req.link
- link = req.link
-
- # Now that we have the real link, we can tell what kind of
- # requirements we have and raise some more informative errors
- # than otherwise. (For example, we can raise VcsHashUnsupported
- # for a VCS URL rather than HashMissing.)
- if require_hashes:
- # We could check these first 2 conditions inside
- # unpack_url and save repetition of conditions, but then
- # we would report less-useful error messages for
- # unhashable requirements, complaining that there's no
- # hash provided.
- if is_vcs_url(link):
- raise VcsHashUnsupported()
- elif is_file_url(link) and is_dir_url(link):
- raise DirectoryUrlHashUnsupported()
- if not req.original_link and not req.is_pinned:
- # Unpinned packages are asking for trouble when a new
- # version is uploaded. This isn't a security check, but
- # it saves users a surprising hash mismatch in the
- # future.
- #
- # file:/// URLs aren't pinnable, so don't complain
- # about them not being pinned.
- raise HashUnpinned()
-
- hashes = req.hashes(trust_internet=not require_hashes)
- if require_hashes and not hashes:
- # Known-good hashes are missing for this requirement, so
- # shim it with a facade object that will provoke hash
- # computation and then raise a HashMissing exception
- # showing the user what the hash should be.
- hashes = MissingHashes()
-
- try:
- download_dir = self.download_dir
- # We always delete unpacked sdists after pip ran.
- autodelete_unpacked = True
- if req.link.is_wheel and self.wheel_download_dir:
- # when doing 'pip wheel` we download wheels to a
- # dedicated dir.
- download_dir = self.wheel_download_dir
- if req.link.is_wheel:
- if download_dir:
- # When downloading, we only unpack wheels to get
- # metadata.
- autodelete_unpacked = True
- else:
- # When installing a wheel, we use the unpacked
- # wheel.
- autodelete_unpacked = False
- unpack_url(
- req.link, req.source_dir,
- download_dir, autodelete_unpacked,
- session=session, hashes=hashes,
- progress_bar=self.progress_bar
- )
- except requests.HTTPError as exc:
- logger.critical(
- 'Could not install requirement %s because of error %s',
- req,
- exc,
- )
- raise InstallationError(
- 'Could not install requirement %s because of HTTP '
- 'error %s for URL %s' %
- (req, exc, req.link)
- )
- abstract_dist = make_abstract_dist(req)
- abstract_dist.prep_for_dist(finder, self.build_isolation)
- if self._download_should_save:
- # Make a .zip of the source_dir we already created.
- if req.link.scheme in vcs.all_schemes:
- req.archive(self.download_dir)
- return abstract_dist
-
- def prepare_editable_requirement(self, req, require_hashes, use_user_site,
- finder):
- """Prepare an editable requirement
- """
- assert req.editable, "cannot prepare a non-editable req as editable"
-
- logger.info('Obtaining %s', req)
-
- with indent_log():
- if require_hashes:
- raise InstallationError(
- 'The editable requirement %s cannot be installed when '
- 'requiring hashes, because there is no single file to '
- 'hash.' % req
- )
- req.ensure_has_source_dir(self.src_dir)
- req.update_editable(not self._download_should_save)
-
- abstract_dist = make_abstract_dist(req)
- abstract_dist.prep_for_dist(finder, self.build_isolation)
-
- if self._download_should_save:
- req.archive(self.download_dir)
- req.check_if_exists(use_user_site)
-
- return abstract_dist
-
- def prepare_installed_requirement(self, req, require_hashes, skip_reason):
- """Prepare an already-installed requirement
- """
- assert req.satisfied_by, "req should have been satisfied but isn't"
- assert skip_reason is not None, (
- "did not get skip reason skipped but req.satisfied_by "
- "is set to %r" % (req.satisfied_by,)
- )
- logger.info(
- 'Requirement %s: %s (%s)',
- skip_reason, req, req.satisfied_by.version
- )
- with indent_log():
- if require_hashes:
- logger.debug(
- 'Since it is already installed, we are trusting this '
- 'package without checking its hash. To ensure a '
- 'completely repeatable environment, install into an '
- 'empty virtualenv.'
- )
- abstract_dist = Installed(req)
-
- return abstract_dist
+"""Prepares a distribution for installation
+"""
+
+import itertools
+import logging
+import os
+import sys
+from copy import copy
+
+from pip._vendor import pkg_resources, requests
+
+from pip._internal.build_env import NoOpBuildEnvironment
+from pip._internal.compat import expanduser
+from pip._internal.download import (
+ is_dir_url, is_file_url, is_vcs_url, unpack_url, url_to_path,
+)
+from pip._internal.exceptions import (
+ DirectoryUrlHashUnsupported, HashUnpinned, InstallationError,
+ PreviousBuildDirError, VcsHashUnsupported,
+)
+from pip._internal.index import FormatControl
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.utils.hashes import MissingHashes
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import (
+ call_subprocess, display_path, normalize_path,
+)
+from pip._internal.utils.ui import open_spinner
+from pip._internal.vcs import vcs
+
+logger = logging.getLogger(__name__)
+
+
+def make_abstract_dist(req):
+ """Factory to make an abstract dist object.
+
+ Preconditions: Either an editable req with a source_dir, or satisfied_by or
+ a wheel link, or a non-editable req with a source_dir.
+
+ :return: A concrete DistAbstraction.
+ """
+ if req.editable:
+ return IsSDist(req)
+ elif req.link and req.link.is_wheel:
+ return IsWheel(req)
+ else:
+ return IsSDist(req)
+
+
+def _install_build_reqs(finder, prefix, build_requirements):
+ # NOTE: What follows is not a very good thing.
+ # Eventually, this should move into the BuildEnvironment class and
+ # that should handle all the isolation and sub-process invocation.
+ finder = copy(finder)
+ finder.format_control = FormatControl(set(), set([":all:"]))
+ urls = [
+ finder.find_requirement(
+ InstallRequirement.from_line(r), upgrade=False).url
+ for r in build_requirements
+ ]
+ args = [
+ sys.executable, '-m', 'pip', 'install', '--ignore-installed',
+ '--no-user', '--prefix', prefix,
+ ] + list(urls)
+
+ with open_spinner("Installing build dependencies") as spinner:
+ call_subprocess(args, show_stdout=False, spinner=spinner)
+
+
+class DistAbstraction(object):
+ """Abstracts out the wheel vs non-wheel Resolver.resolve() logic.
+
+ The requirements for anything installable are as follows:
+ - we must be able to determine the requirement name
+ (or we can't correctly handle the non-upgrade case).
+ - we must be able to generate a list of run-time dependencies
+ without installing any additional packages (or we would
+ have to either burn time by doing temporary isolated installs
+ or alternatively violate pips 'don't start installing unless
+ all requirements are available' rule - neither of which are
+ desirable).
+ - for packages with setup requirements, we must also be able
+ to determine their requirements without installing additional
+ packages (for the same reason as run-time dependencies)
+ - we must be able to create a Distribution object exposing the
+ above metadata.
+ """
+
+ def __init__(self, req):
+ self.req = req
+
+ def dist(self, finder):
+ """Return a setuptools Dist object."""
+ raise NotImplementedError(self.dist)
+
+ def prep_for_dist(self, finder):
+ """Ensure that we can get a Dist for this requirement."""
+ raise NotImplementedError(self.dist)
+
+
+class IsWheel(DistAbstraction):
+
+ def dist(self, finder):
+ return list(pkg_resources.find_distributions(
+ self.req.source_dir))[0]
+
+ def prep_for_dist(self, finder, build_isolation):
+ # FIXME:https://github.com/pypa/pip/issues/1112
+ pass
+
+
+class IsSDist(DistAbstraction):
+
+ def dist(self, finder):
+ dist = self.req.get_dist()
+ # FIXME: shouldn't be globally added.
+ if finder and dist.has_metadata('dependency_links.txt'):
+ finder.add_dependency_links(
+ dist.get_metadata_lines('dependency_links.txt')
+ )
+ return dist
+
+ def prep_for_dist(self, finder, build_isolation):
+ # Before calling "setup.py egg_info", we need to set-up the build
+ # environment.
+ build_requirements, isolate = self.req.get_pep_518_info()
+ should_isolate = build_isolation and isolate
+
+ minimum_requirements = ('setuptools', 'wheel')
+ missing_requirements = set(minimum_requirements) - set(
+ pkg_resources.Requirement(r).key
+ for r in build_requirements
+ )
+ if missing_requirements:
+ def format_reqs(rs):
+ return ' and '.join(map(repr, sorted(rs)))
+ logger.warning(
+ "Missing build time requirements in pyproject.toml for %s: "
+ "%s.", self.req, format_reqs(missing_requirements)
+ )
+ logger.warning(
+ "This version of pip does not implement PEP 517 so it cannot "
+ "build a wheel without %s.", format_reqs(minimum_requirements)
+ )
+
+ if should_isolate:
+ with self.req.build_env:
+ pass
+ _install_build_reqs(finder, self.req.build_env.path,
+ build_requirements)
+ else:
+ self.req.build_env = NoOpBuildEnvironment(no_clean=False)
+
+ self.req.run_egg_info()
+ self.req.assert_source_matches_version()
+
+
+class Installed(DistAbstraction):
+
+ def dist(self, finder):
+ return self.req.satisfied_by
+
+ def prep_for_dist(self, finder):
+ pass
+
+
+class RequirementPreparer(object):
+ """Prepares a Requirement
+ """
+
+ def __init__(self, build_dir, download_dir, src_dir, wheel_download_dir,
+ progress_bar, build_isolation):
+ super(RequirementPreparer, self).__init__()
+
+ self.src_dir = src_dir
+ self.build_dir = build_dir
+
+ # Where still packed archives should be written to. If None, they are
+ # not saved, and are deleted immediately after unpacking.
+ self.download_dir = download_dir
+
+ # Where still-packed .whl files should be written to. If None, they are
+ # written to the download_dir parameter. Separate to download_dir to
+ # permit only keeping wheel archives for pip wheel.
+ if wheel_download_dir:
+ wheel_download_dir = normalize_path(wheel_download_dir)
+ self.wheel_download_dir = wheel_download_dir
+
+ # NOTE
+ # download_dir and wheel_download_dir overlap semantically and may
+ # be combined if we're willing to have non-wheel archives present in
+ # the wheelhouse output by 'pip wheel'.
+
+ self.progress_bar = progress_bar
+
+ # Is build isolation allowed?
+ self.build_isolation = build_isolation
+
+ @property
+ def _download_should_save(self):
+ # TODO: Modify to reduce indentation needed
+ if self.download_dir:
+ self.download_dir = expanduser(self.download_dir)
+ if os.path.exists(self.download_dir):
+ return True
+ else:
+ logger.critical('Could not find download directory')
+ raise InstallationError(
+ "Could not find or access download directory '%s'"
+ % display_path(self.download_dir))
+ return False
+
+ def prepare_linked_requirement(self, req, session, finder,
+ upgrade_allowed, require_hashes):
+ """Prepare a requirement that would be obtained from req.link
+ """
+ # TODO: Breakup into smaller functions
+ if req.link and req.link.scheme == 'file':
+ path = url_to_path(req.link.url)
+ logger.info('Processing %s', display_path(path))
+ else:
+ logger.info('Collecting %s', req)
+
+ with indent_log():
+ # @@ if filesystem packages are not marked
+ # editable in a req, a non deterministic error
+ # occurs when the script attempts to unpack the
+ # build directory
+ req.ensure_has_source_dir(self.build_dir)
+ # If a checkout exists, it's unwise to keep going. version
+ # inconsistencies are logged later, but do not fail the
+ # installation.
+ # FIXME: this won't upgrade when there's an existing
+ # package unpacked in `req.source_dir`
+ # package unpacked in `req.source_dir`
+ if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
+ raise PreviousBuildDirError(
+ "pip can't proceed with requirements '%s' due to a"
+ " pre-existing build directory (%s). This is "
+ "likely due to a previous installation that failed"
+ ". pip is being responsible and not assuming it "
+ "can delete this. Please delete it and try again."
+ % (req, req.source_dir)
+ )
+ req.populate_link(finder, upgrade_allowed, require_hashes)
+
+ # We can't hit this spot and have populate_link return None.
+ # req.satisfied_by is None here (because we're
+ # guarded) and upgrade has no impact except when satisfied_by
+ # is not None.
+ # Then inside find_requirement existing_applicable -> False
+ # If no new versions are found, DistributionNotFound is raised,
+ # otherwise a result is guaranteed.
+ assert req.link
+ link = req.link
+
+ # Now that we have the real link, we can tell what kind of
+ # requirements we have and raise some more informative errors
+ # than otherwise. (For example, we can raise VcsHashUnsupported
+ # for a VCS URL rather than HashMissing.)
+ if require_hashes:
+ # We could check these first 2 conditions inside
+ # unpack_url and save repetition of conditions, but then
+ # we would report less-useful error messages for
+ # unhashable requirements, complaining that there's no
+ # hash provided.
+ if is_vcs_url(link):
+ raise VcsHashUnsupported()
+ elif is_file_url(link) and is_dir_url(link):
+ raise DirectoryUrlHashUnsupported()
+ if not req.original_link and not req.is_pinned:
+ # Unpinned packages are asking for trouble when a new
+ # version is uploaded. This isn't a security check, but
+ # it saves users a surprising hash mismatch in the
+ # future.
+ #
+ # file:/// URLs aren't pinnable, so don't complain
+ # about them not being pinned.
+ raise HashUnpinned()
+
+ hashes = req.hashes(trust_internet=not require_hashes)
+ if require_hashes and not hashes:
+ # Known-good hashes are missing for this requirement, so
+ # shim it with a facade object that will provoke hash
+ # computation and then raise a HashMissing exception
+ # showing the user what the hash should be.
+ hashes = MissingHashes()
+
+ try:
+ download_dir = self.download_dir
+ # We always delete unpacked sdists after pip ran.
+ autodelete_unpacked = True
+ if req.link.is_wheel and self.wheel_download_dir:
+ # when doing 'pip wheel` we download wheels to a
+ # dedicated dir.
+ download_dir = self.wheel_download_dir
+ if req.link.is_wheel:
+ if download_dir:
+ # When downloading, we only unpack wheels to get
+ # metadata.
+ autodelete_unpacked = True
+ else:
+ # When installing a wheel, we use the unpacked
+ # wheel.
+ autodelete_unpacked = False
+ unpack_url(
+ req.link, req.source_dir,
+ download_dir, autodelete_unpacked,
+ session=session, hashes=hashes,
+ progress_bar=self.progress_bar
+ )
+ except requests.HTTPError as exc:
+ logger.critical(
+ 'Could not install requirement %s because of error %s',
+ req,
+ exc,
+ )
+ raise InstallationError(
+ 'Could not install requirement %s because of HTTP '
+ 'error %s for URL %s' %
+ (req, exc, req.link)
+ )
+ abstract_dist = make_abstract_dist(req)
+ abstract_dist.prep_for_dist(finder, self.build_isolation)
+ if self._download_should_save:
+ # Make a .zip of the source_dir we already created.
+ if req.link.scheme in vcs.all_schemes:
+ req.archive(self.download_dir)
+ return abstract_dist
+
+ def prepare_editable_requirement(self, req, require_hashes, use_user_site,
+ finder):
+ """Prepare an editable requirement
+ """
+ assert req.editable, "cannot prepare a non-editable req as editable"
+
+ logger.info('Obtaining %s', req)
+
+ with indent_log():
+ if require_hashes:
+ raise InstallationError(
+ 'The editable requirement %s cannot be installed when '
+ 'requiring hashes, because there is no single file to '
+ 'hash.' % req
+ )
+ req.ensure_has_source_dir(self.src_dir)
+ req.update_editable(not self._download_should_save)
+
+ abstract_dist = make_abstract_dist(req)
+ abstract_dist.prep_for_dist(finder, self.build_isolation)
+
+ if self._download_should_save:
+ req.archive(self.download_dir)
+ req.check_if_exists(use_user_site)
+
+ return abstract_dist
+
+ def prepare_installed_requirement(self, req, require_hashes, skip_reason):
+ """Prepare an already-installed requirement
+ """
+ assert req.satisfied_by, "req should have been satisfied but isn't"
+ assert skip_reason is not None, (
+ "did not get skip reason skipped but req.satisfied_by "
+ "is set to %r" % (req.satisfied_by,)
+ )
+ logger.info(
+ 'Requirement %s: %s (%s)',
+ skip_reason, req, req.satisfied_by.version
+ )
+ with indent_log():
+ if require_hashes:
+ logger.debug(
+ 'Since it is already installed, we are trusting this '
+ 'package without checking its hash. To ensure a '
+ 'completely repeatable environment, install into an '
+ 'empty virtualenv.'
+ )
+ abstract_dist = Installed(req)
+
+ return abstract_dist
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/pep425tags.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/pep425tags.py
index 0b5c783..5d31310 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/pep425tags.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/pep425tags.py
@@ -1,317 +1,317 @@
-"""Generate and work with PEP 425 Compatibility Tags."""
-from __future__ import absolute_import
-
-import distutils.util
-import logging
-import platform
-import re
-import sys
-import sysconfig
-import warnings
-from collections import OrderedDict
-
-import pip._internal.utils.glibc
-
-logger = logging.getLogger(__name__)
-
-_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)')
-
-
-def get_config_var(var):
- try:
- return sysconfig.get_config_var(var)
- except IOError as e: # Issue #1074
- warnings.warn("{}".format(e), RuntimeWarning)
- return None
-
-
-def get_abbr_impl():
- """Return abbreviated implementation name."""
- if hasattr(sys, 'pypy_version_info'):
- pyimpl = 'pp'
- elif sys.platform.startswith('java'):
- pyimpl = 'jy'
- elif sys.platform == 'cli':
- pyimpl = 'ip'
- else:
- pyimpl = 'cp'
- return pyimpl
-
-
-def get_impl_ver():
- """Return implementation version."""
- impl_ver = get_config_var("py_version_nodot")
- if not impl_ver or get_abbr_impl() == 'pp':
- impl_ver = ''.join(map(str, get_impl_version_info()))
- return impl_ver
-
-
-def get_impl_version_info():
- """Return sys.version_info-like tuple for use in decrementing the minor
- version."""
- if get_abbr_impl() == 'pp':
- # as per https://github.com/pypa/pip/issues/2882
- return (sys.version_info[0], sys.pypy_version_info.major,
- sys.pypy_version_info.minor)
- else:
- return sys.version_info[0], sys.version_info[1]
-
-
-def get_impl_tag():
- """
- Returns the Tag for this specific implementation.
- """
- return "{}{}".format(get_abbr_impl(), get_impl_ver())
-
-
-def get_flag(var, fallback, expected=True, warn=True):
- """Use a fallback method for determining SOABI flags if the needed config
- var is unset or unavailable."""
- val = get_config_var(var)
- if val is None:
- if warn:
- logger.debug("Config variable '%s' is unset, Python ABI tag may "
- "be incorrect", var)
- return fallback()
- return val == expected
-
-
-def get_abi_tag():
- """Return the ABI tag based on SOABI (if available) or emulate SOABI
- (CPython 2, PyPy)."""
- soabi = get_config_var('SOABI')
- impl = get_abbr_impl()
- if not soabi and impl in {'cp', 'pp'} and hasattr(sys, 'maxunicode'):
- d = ''
- m = ''
- u = ''
- if get_flag('Py_DEBUG',
- lambda: hasattr(sys, 'gettotalrefcount'),
- warn=(impl == 'cp')):
- d = 'd'
- if get_flag('WITH_PYMALLOC',
- lambda: impl == 'cp',
- warn=(impl == 'cp')):
- m = 'm'
- if get_flag('Py_UNICODE_SIZE',
- lambda: sys.maxunicode == 0x10ffff,
- expected=4,
- warn=(impl == 'cp' and
- sys.version_info < (3, 3))) \
- and sys.version_info < (3, 3):
- u = 'u'
- abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u)
- elif soabi and soabi.startswith('cpython-'):
- abi = 'cp' + soabi.split('-')[1]
- elif soabi:
- abi = soabi.replace('.', '_').replace('-', '_')
- else:
- abi = None
- return abi
-
-
-def _is_running_32bit():
- return sys.maxsize == 2147483647
-
-
-def get_platform():
- """Return our platform name 'win32', 'linux_x86_64'"""
- if sys.platform == 'darwin':
- # distutils.util.get_platform() returns the release based on the value
- # of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may
- # be significantly older than the user's current machine.
- release, _, machine = platform.mac_ver()
- split_ver = release.split('.')
-
- if machine == "x86_64" and _is_running_32bit():
- machine = "i386"
- elif machine == "ppc64" and _is_running_32bit():
- machine = "ppc"
-
- return 'macosx_{}_{}_{}'.format(split_ver[0], split_ver[1], machine)
-
- # XXX remove distutils dependency
- result = distutils.util.get_platform().replace('.', '_').replace('-', '_')
- if result == "linux_x86_64" and _is_running_32bit():
- # 32 bit Python program (running on a 64 bit Linux): pip should only
- # install and run 32 bit compiled extensions in that case.
- result = "linux_i686"
-
- return result
-
-
-def is_manylinux1_compatible():
- # Only Linux, and only x86-64 / i686
- if get_platform() not in {"linux_x86_64", "linux_i686"}:
- return False
-
- # Check for presence of _manylinux module
- try:
- import _manylinux
- return bool(_manylinux.manylinux1_compatible)
- except (ImportError, AttributeError):
- # Fall through to heuristic check below
- pass
-
- # Check glibc version. CentOS 5 uses glibc 2.5.
- return pip._internal.utils.glibc.have_compatible_glibc(2, 5)
-
-
-def get_darwin_arches(major, minor, machine):
- """Return a list of supported arches (including group arches) for
- the given major, minor and machine architecture of an macOS machine.
- """
- arches = []
-
- def _supports_arch(major, minor, arch):
- # Looking at the application support for macOS versions in the chart
- # provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears
- # our timeline looks roughly like:
- #
- # 10.0 - Introduces ppc support.
- # 10.4 - Introduces ppc64, i386, and x86_64 support, however the ppc64
- # and x86_64 support is CLI only, and cannot be used for GUI
- # applications.
- # 10.5 - Extends ppc64 and x86_64 support to cover GUI applications.
- # 10.6 - Drops support for ppc64
- # 10.7 - Drops support for ppc
- #
- # Given that we do not know if we're installing a CLI or a GUI
- # application, we must be conservative and assume it might be a GUI
- # application and behave as if ppc64 and x86_64 support did not occur
- # until 10.5.
- #
- # Note: The above information is taken from the "Application support"
- # column in the chart not the "Processor support" since I believe
- # that we care about what instruction sets an application can use
- # not which processors the OS supports.
- if arch == 'ppc':
- return (major, minor) <= (10, 5)
- if arch == 'ppc64':
- return (major, minor) == (10, 5)
- if arch == 'i386':
- return (major, minor) >= (10, 4)
- if arch == 'x86_64':
- return (major, minor) >= (10, 5)
- if arch in groups:
- for garch in groups[arch]:
- if _supports_arch(major, minor, garch):
- return True
- return False
-
- groups = OrderedDict([
- ("fat", ("i386", "ppc")),
- ("intel", ("x86_64", "i386")),
- ("fat64", ("x86_64", "ppc64")),
- ("fat32", ("x86_64", "i386", "ppc")),
- ])
-
- if _supports_arch(major, minor, machine):
- arches.append(machine)
-
- for garch in groups:
- if machine in groups[garch] and _supports_arch(major, minor, garch):
- arches.append(garch)
-
- arches.append('universal')
-
- return arches
-
-
-def get_supported(versions=None, noarch=False, platform=None,
- impl=None, abi=None):
- """Return a list of supported tags for each version specified in
- `versions`.
-
- :param versions: a list of string versions, of the form ["33", "32"],
- or None. The first version will be assumed to support our ABI.
- :param platform: specify the exact platform you want valid
- tags for, or None. If None, use the local system platform.
- :param impl: specify the exact implementation you want valid
- tags for, or None. If None, use the local interpreter impl.
- :param abi: specify the exact abi you want valid
- tags for, or None. If None, use the local interpreter abi.
- """
- supported = []
-
- # Versions must be given with respect to the preference
- if versions is None:
- versions = []
- version_info = get_impl_version_info()
- major = version_info[:-1]
- # Support all previous minor Python versions.
- for minor in range(version_info[-1], -1, -1):
- versions.append(''.join(map(str, major + (minor,))))
-
- impl = impl or get_abbr_impl()
-
- abis = []
-
- abi = abi or get_abi_tag()
- if abi:
- abis[0:0] = [abi]
-
- abi3s = set()
- import imp
- for suffix in imp.get_suffixes():
- if suffix[0].startswith('.abi'):
- abi3s.add(suffix[0].split('.', 2)[1])
-
- abis.extend(sorted(list(abi3s)))
-
- abis.append('none')
-
- if not noarch:
- arch = platform or get_platform()
- if arch.startswith('macosx'):
- # support macosx-10.6-intel on macosx-10.9-x86_64
- match = _osx_arch_pat.match(arch)
- if match:
- name, major, minor, actual_arch = match.groups()
- tpl = '{}_{}_%i_%s'.format(name, major)
- arches = []
- for m in reversed(range(int(minor) + 1)):
- for a in get_darwin_arches(int(major), m, actual_arch):
- arches.append(tpl % (m, a))
- else:
- # arch pattern didn't match (?!)
- arches = [arch]
- elif platform is None and is_manylinux1_compatible():
- arches = [arch.replace('linux', 'manylinux1'), arch]
- else:
- arches = [arch]
-
- # Current version, current API (built specifically for our Python):
- for abi in abis:
- for arch in arches:
- supported.append(('%s%s' % (impl, versions[0]), abi, arch))
-
- # abi3 modules compatible with older version of Python
- for version in versions[1:]:
- # abi3 was introduced in Python 3.2
- if version in {'31', '30'}:
- break
- for abi in abi3s: # empty set if not Python 3
- for arch in arches:
- supported.append(("%s%s" % (impl, version), abi, arch))
-
- # Has binaries, does not use the Python API:
- for arch in arches:
- supported.append(('py%s' % (versions[0][0]), 'none', arch))
-
- # No abi / arch, but requires our implementation:
- supported.append(('%s%s' % (impl, versions[0]), 'none', 'any'))
- # Tagged specifically as being cross-version compatible
- # (with just the major version specified)
- supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))
-
- # No abi / arch, generic Python
- for i, version in enumerate(versions):
- supported.append(('py%s' % (version,), 'none', 'any'))
- if i == 0:
- supported.append(('py%s' % (version[0]), 'none', 'any'))
-
- return supported
-
-
-implementation_tag = get_impl_tag()
+"""Generate and work with PEP 425 Compatibility Tags."""
+from __future__ import absolute_import
+
+import distutils.util
+import logging
+import platform
+import re
+import sys
+import sysconfig
+import warnings
+from collections import OrderedDict
+
+import pip._internal.utils.glibc
+
+logger = logging.getLogger(__name__)
+
+_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)')
+
+
+def get_config_var(var):
+ try:
+ return sysconfig.get_config_var(var)
+ except IOError as e: # Issue #1074
+ warnings.warn("{}".format(e), RuntimeWarning)
+ return None
+
+
+def get_abbr_impl():
+ """Return abbreviated implementation name."""
+ if hasattr(sys, 'pypy_version_info'):
+ pyimpl = 'pp'
+ elif sys.platform.startswith('java'):
+ pyimpl = 'jy'
+ elif sys.platform == 'cli':
+ pyimpl = 'ip'
+ else:
+ pyimpl = 'cp'
+ return pyimpl
+
+
+def get_impl_ver():
+ """Return implementation version."""
+ impl_ver = get_config_var("py_version_nodot")
+ if not impl_ver or get_abbr_impl() == 'pp':
+ impl_ver = ''.join(map(str, get_impl_version_info()))
+ return impl_ver
+
+
+def get_impl_version_info():
+ """Return sys.version_info-like tuple for use in decrementing the minor
+ version."""
+ if get_abbr_impl() == 'pp':
+ # as per https://github.com/pypa/pip/issues/2882
+ return (sys.version_info[0], sys.pypy_version_info.major,
+ sys.pypy_version_info.minor)
+ else:
+ return sys.version_info[0], sys.version_info[1]
+
+
+def get_impl_tag():
+ """
+ Returns the Tag for this specific implementation.
+ """
+ return "{}{}".format(get_abbr_impl(), get_impl_ver())
+
+
+def get_flag(var, fallback, expected=True, warn=True):
+ """Use a fallback method for determining SOABI flags if the needed config
+ var is unset or unavailable."""
+ val = get_config_var(var)
+ if val is None:
+ if warn:
+ logger.debug("Config variable '%s' is unset, Python ABI tag may "
+ "be incorrect", var)
+ return fallback()
+ return val == expected
+
+
+def get_abi_tag():
+ """Return the ABI tag based on SOABI (if available) or emulate SOABI
+ (CPython 2, PyPy)."""
+ soabi = get_config_var('SOABI')
+ impl = get_abbr_impl()
+ if not soabi and impl in {'cp', 'pp'} and hasattr(sys, 'maxunicode'):
+ d = ''
+ m = ''
+ u = ''
+ if get_flag('Py_DEBUG',
+ lambda: hasattr(sys, 'gettotalrefcount'),
+ warn=(impl == 'cp')):
+ d = 'd'
+ if get_flag('WITH_PYMALLOC',
+ lambda: impl == 'cp',
+ warn=(impl == 'cp')):
+ m = 'm'
+ if get_flag('Py_UNICODE_SIZE',
+ lambda: sys.maxunicode == 0x10ffff,
+ expected=4,
+ warn=(impl == 'cp' and
+ sys.version_info < (3, 3))) \
+ and sys.version_info < (3, 3):
+ u = 'u'
+ abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u)
+ elif soabi and soabi.startswith('cpython-'):
+ abi = 'cp' + soabi.split('-')[1]
+ elif soabi:
+ abi = soabi.replace('.', '_').replace('-', '_')
+ else:
+ abi = None
+ return abi
+
+
+def _is_running_32bit():
+ return sys.maxsize == 2147483647
+
+
+def get_platform():
+ """Return our platform name 'win32', 'linux_x86_64'"""
+ if sys.platform == 'darwin':
+ # distutils.util.get_platform() returns the release based on the value
+ # of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may
+ # be significantly older than the user's current machine.
+ release, _, machine = platform.mac_ver()
+ split_ver = release.split('.')
+
+ if machine == "x86_64" and _is_running_32bit():
+ machine = "i386"
+ elif machine == "ppc64" and _is_running_32bit():
+ machine = "ppc"
+
+ return 'macosx_{}_{}_{}'.format(split_ver[0], split_ver[1], machine)
+
+ # XXX remove distutils dependency
+ result = distutils.util.get_platform().replace('.', '_').replace('-', '_')
+ if result == "linux_x86_64" and _is_running_32bit():
+ # 32 bit Python program (running on a 64 bit Linux): pip should only
+ # install and run 32 bit compiled extensions in that case.
+ result = "linux_i686"
+
+ return result
+
+
+def is_manylinux1_compatible():
+ # Only Linux, and only x86-64 / i686
+ if get_platform() not in {"linux_x86_64", "linux_i686"}:
+ return False
+
+ # Check for presence of _manylinux module
+ try:
+ import _manylinux
+ return bool(_manylinux.manylinux1_compatible)
+ except (ImportError, AttributeError):
+ # Fall through to heuristic check below
+ pass
+
+ # Check glibc version. CentOS 5 uses glibc 2.5.
+ return pip._internal.utils.glibc.have_compatible_glibc(2, 5)
+
+
+def get_darwin_arches(major, minor, machine):
+ """Return a list of supported arches (including group arches) for
+ the given major, minor and machine architecture of an macOS machine.
+ """
+ arches = []
+
+ def _supports_arch(major, minor, arch):
+ # Looking at the application support for macOS versions in the chart
+ # provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears
+ # our timeline looks roughly like:
+ #
+ # 10.0 - Introduces ppc support.
+ # 10.4 - Introduces ppc64, i386, and x86_64 support, however the ppc64
+ # and x86_64 support is CLI only, and cannot be used for GUI
+ # applications.
+ # 10.5 - Extends ppc64 and x86_64 support to cover GUI applications.
+ # 10.6 - Drops support for ppc64
+ # 10.7 - Drops support for ppc
+ #
+ # Given that we do not know if we're installing a CLI or a GUI
+ # application, we must be conservative and assume it might be a GUI
+ # application and behave as if ppc64 and x86_64 support did not occur
+ # until 10.5.
+ #
+ # Note: The above information is taken from the "Application support"
+ # column in the chart not the "Processor support" since I believe
+ # that we care about what instruction sets an application can use
+ # not which processors the OS supports.
+ if arch == 'ppc':
+ return (major, minor) <= (10, 5)
+ if arch == 'ppc64':
+ return (major, minor) == (10, 5)
+ if arch == 'i386':
+ return (major, minor) >= (10, 4)
+ if arch == 'x86_64':
+ return (major, minor) >= (10, 5)
+ if arch in groups:
+ for garch in groups[arch]:
+ if _supports_arch(major, minor, garch):
+ return True
+ return False
+
+ groups = OrderedDict([
+ ("fat", ("i386", "ppc")),
+ ("intel", ("x86_64", "i386")),
+ ("fat64", ("x86_64", "ppc64")),
+ ("fat32", ("x86_64", "i386", "ppc")),
+ ])
+
+ if _supports_arch(major, minor, machine):
+ arches.append(machine)
+
+ for garch in groups:
+ if machine in groups[garch] and _supports_arch(major, minor, garch):
+ arches.append(garch)
+
+ arches.append('universal')
+
+ return arches
+
+
+def get_supported(versions=None, noarch=False, platform=None,
+ impl=None, abi=None):
+ """Return a list of supported tags for each version specified in
+ `versions`.
+
+ :param versions: a list of string versions, of the form ["33", "32"],
+ or None. The first version will be assumed to support our ABI.
+ :param platform: specify the exact platform you want valid
+ tags for, or None. If None, use the local system platform.
+ :param impl: specify the exact implementation you want valid
+ tags for, or None. If None, use the local interpreter impl.
+ :param abi: specify the exact abi you want valid
+ tags for, or None. If None, use the local interpreter abi.
+ """
+ supported = []
+
+ # Versions must be given with respect to the preference
+ if versions is None:
+ versions = []
+ version_info = get_impl_version_info()
+ major = version_info[:-1]
+ # Support all previous minor Python versions.
+ for minor in range(version_info[-1], -1, -1):
+ versions.append(''.join(map(str, major + (minor,))))
+
+ impl = impl or get_abbr_impl()
+
+ abis = []
+
+ abi = abi or get_abi_tag()
+ if abi:
+ abis[0:0] = [abi]
+
+ abi3s = set()
+ import imp
+ for suffix in imp.get_suffixes():
+ if suffix[0].startswith('.abi'):
+ abi3s.add(suffix[0].split('.', 2)[1])
+
+ abis.extend(sorted(list(abi3s)))
+
+ abis.append('none')
+
+ if not noarch:
+ arch = platform or get_platform()
+ if arch.startswith('macosx'):
+ # support macosx-10.6-intel on macosx-10.9-x86_64
+ match = _osx_arch_pat.match(arch)
+ if match:
+ name, major, minor, actual_arch = match.groups()
+ tpl = '{}_{}_%i_%s'.format(name, major)
+ arches = []
+ for m in reversed(range(int(minor) + 1)):
+ for a in get_darwin_arches(int(major), m, actual_arch):
+ arches.append(tpl % (m, a))
+ else:
+ # arch pattern didn't match (?!)
+ arches = [arch]
+ elif platform is None and is_manylinux1_compatible():
+ arches = [arch.replace('linux', 'manylinux1'), arch]
+ else:
+ arches = [arch]
+
+ # Current version, current API (built specifically for our Python):
+ for abi in abis:
+ for arch in arches:
+ supported.append(('%s%s' % (impl, versions[0]), abi, arch))
+
+ # abi3 modules compatible with older version of Python
+ for version in versions[1:]:
+ # abi3 was introduced in Python 3.2
+ if version in {'31', '30'}:
+ break
+ for abi in abi3s: # empty set if not Python 3
+ for arch in arches:
+ supported.append(("%s%s" % (impl, version), abi, arch))
+
+ # Has binaries, does not use the Python API:
+ for arch in arches:
+ supported.append(('py%s' % (versions[0][0]), 'none', arch))
+
+ # No abi / arch, but requires our implementation:
+ supported.append(('%s%s' % (impl, versions[0]), 'none', 'any'))
+ # Tagged specifically as being cross-version compatible
+ # (with just the major version specified)
+ supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))
+
+ # No abi / arch, generic Python
+ for i, version in enumerate(versions):
+ supported.append(('py%s' % (version,), 'none', 'any'))
+ if i == 0:
+ supported.append(('py%s' % (version[0]), 'none', 'any'))
+
+ return supported
+
+
+implementation_tag = get_impl_tag()
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/req/__init__.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/req/__init__.py
index c9b4c3c..07ae607 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/req/__init__.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/req/__init__.py
@@ -1,69 +1,69 @@
-from __future__ import absolute_import
-
-import logging
-
-from .req_install import InstallRequirement
-from .req_set import RequirementSet
-from .req_file import parse_requirements
-from pip._internal.utils.logging import indent_log
-
-
-__all__ = [
- "RequirementSet", "InstallRequirement",
- "parse_requirements", "install_given_reqs",
-]
-
-logger = logging.getLogger(__name__)
-
-
-def install_given_reqs(to_install, install_options, global_options=(),
- *args, **kwargs):
- """
- Install everything in the given list.
-
- (to be called after having downloaded and unpacked the packages)
- """
-
- if to_install:
- logger.info(
- 'Installing collected packages: %s',
- ', '.join([req.name for req in to_install]),
- )
-
- with indent_log():
- for requirement in to_install:
- if requirement.conflicts_with:
- logger.info(
- 'Found existing installation: %s',
- requirement.conflicts_with,
- )
- with indent_log():
- uninstalled_pathset = requirement.uninstall(
- auto_confirm=True
- )
- try:
- requirement.install(
- install_options,
- global_options,
- *args,
- **kwargs
- )
- except:
- should_rollback = (
- requirement.conflicts_with and
- not requirement.install_succeeded
- )
- # if install did not succeed, rollback previous uninstall
- if should_rollback:
- uninstalled_pathset.rollback()
- raise
- else:
- should_commit = (
- requirement.conflicts_with and
- requirement.install_succeeded
- )
- if should_commit:
- uninstalled_pathset.commit()
- requirement.remove_temporary_source()
-
- return to_install
+from __future__ import absolute_import
+
+import logging
+
+from .req_install import InstallRequirement
+from .req_set import RequirementSet
+from .req_file import parse_requirements
+from pip._internal.utils.logging import indent_log
+
+
+__all__ = [
+ "RequirementSet", "InstallRequirement",
+ "parse_requirements", "install_given_reqs",
+]
+
+logger = logging.getLogger(__name__)
+
+
+def install_given_reqs(to_install, install_options, global_options=(),
+ *args, **kwargs):
+ """
+ Install everything in the given list.
+
+ (to be called after having downloaded and unpacked the packages)
+ """
+
+ if to_install:
+ logger.info(
+ 'Installing collected packages: %s',
+ ', '.join([req.name for req in to_install]),
+ )
+
+ with indent_log():
+ for requirement in to_install:
+ if requirement.conflicts_with:
+ logger.info(
+ 'Found existing installation: %s',
+ requirement.conflicts_with,
+ )
+ with indent_log():
+ uninstalled_pathset = requirement.uninstall(
+ auto_confirm=True
+ )
+ try:
+ requirement.install(
+ install_options,
+ global_options,
+ *args,
+ **kwargs
+ )
+ except:
+ should_rollback = (
+ requirement.conflicts_with and
+ not requirement.install_succeeded
+ )
+ # if install did not succeed, rollback previous uninstall
+ if should_rollback:
+ uninstalled_pathset.rollback()
+ raise
+ else:
+ should_commit = (
+ requirement.conflicts_with and
+ requirement.install_succeeded
+ )
+ if should_commit:
+ uninstalled_pathset.commit()
+ requirement.remove_temporary_source()
+
+ return to_install
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/req/req_file.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/req/req_file.py
index f868497..9e6ef41 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/req/req_file.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/req/req_file.py
@@ -1,338 +1,338 @@
-"""
-Requirements file parsing
-"""
-
-from __future__ import absolute_import
-
-import optparse
-import os
-import re
-import shlex
-import sys
-
-from pip._vendor.six.moves import filterfalse
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-
-from pip._internal import cmdoptions
-from pip._internal.download import get_file_content
-from pip._internal.exceptions import RequirementsFileParseError
-from pip._internal.req.req_install import InstallRequirement
-
-__all__ = ['parse_requirements']
-
-SCHEME_RE = re.compile(r'^(http|https|file):', re.I)
-COMMENT_RE = re.compile(r'(^|\s)+#.*$')
-
-# Matches environment variable-style values in '${MY_VARIABLE_1}' with the
-# variable name consisting of only uppercase letters, digits or the '_'
-# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1,
-# 2013 Edition.
-ENV_VAR_RE = re.compile(r'(?P\$\{(?P[A-Z0-9_]+)\})')
-
-SUPPORTED_OPTIONS = [
- cmdoptions.constraints,
- cmdoptions.editable,
- cmdoptions.requirements,
- cmdoptions.no_index,
- cmdoptions.index_url,
- cmdoptions.find_links,
- cmdoptions.extra_index_url,
- cmdoptions.always_unzip,
- cmdoptions.no_binary,
- cmdoptions.only_binary,
- cmdoptions.pre,
- cmdoptions.process_dependency_links,
- cmdoptions.trusted_host,
- cmdoptions.require_hashes,
-]
-
-# options to be passed to requirements
-SUPPORTED_OPTIONS_REQ = [
- cmdoptions.install_options,
- cmdoptions.global_options,
- cmdoptions.hash,
-]
-
-# the 'dest' string values
-SUPPORTED_OPTIONS_REQ_DEST = [o().dest for o in SUPPORTED_OPTIONS_REQ]
-
-
-def parse_requirements(filename, finder=None, comes_from=None, options=None,
- session=None, constraint=False, wheel_cache=None):
- """Parse a requirements file and yield InstallRequirement instances.
-
- :param filename: Path or url of requirements file.
- :param finder: Instance of pip.index.PackageFinder.
- :param comes_from: Origin description of requirements.
- :param options: cli options.
- :param session: Instance of pip.download.PipSession.
- :param constraint: If true, parsing a constraint file rather than
- requirements file.
- :param wheel_cache: Instance of pip.wheel.WheelCache
- """
- if session is None:
- raise TypeError(
- "parse_requirements() missing 1 required keyword argument: "
- "'session'"
- )
-
- _, content = get_file_content(
- filename, comes_from=comes_from, session=session
- )
-
- lines_enum = preprocess(content, options)
-
- for line_number, line in lines_enum:
- req_iter = process_line(line, filename, line_number, finder,
- comes_from, options, session, wheel_cache,
- constraint=constraint)
- for req in req_iter:
- yield req
-
-
-def preprocess(content, options):
- """Split, filter, and join lines, and return a line iterator
-
- :param content: the content of the requirements file
- :param options: cli options
- """
- lines_enum = enumerate(content.splitlines(), start=1)
- lines_enum = join_lines(lines_enum)
- lines_enum = ignore_comments(lines_enum)
- lines_enum = skip_regex(lines_enum, options)
- lines_enum = expand_env_variables(lines_enum)
- return lines_enum
-
-
-def process_line(line, filename, line_number, finder=None, comes_from=None,
- options=None, session=None, wheel_cache=None,
- constraint=False):
- """Process a single requirements line; This can result in creating/yielding
- requirements, or updating the finder.
-
- For lines that contain requirements, the only options that have an effect
- are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
- requirement. Other options from SUPPORTED_OPTIONS may be present, but are
- ignored.
-
- For lines that do not contain requirements, the only options that have an
- effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
- be present, but are ignored. These lines may contain multiple options
- (although our docs imply only one is supported), and all our parsed and
- affect the finder.
-
- :param constraint: If True, parsing a constraints file.
- :param options: OptionParser options that we may update
- """
- parser = build_parser(line)
- defaults = parser.get_default_values()
- defaults.index_url = None
- if finder:
- # `finder.format_control` will be updated during parsing
- defaults.format_control = finder.format_control
- args_str, options_str = break_args_options(line)
- if sys.version_info < (2, 7, 3):
- # Prior to 2.7.3, shlex cannot deal with unicode entries
- options_str = options_str.encode('utf8')
- opts, _ = parser.parse_args(shlex.split(options_str), defaults)
-
- # preserve for the nested code path
- line_comes_from = '%s %s (line %s)' % (
- '-c' if constraint else '-r', filename, line_number,
- )
-
- # yield a line requirement
- if args_str:
- isolated = options.isolated_mode if options else False
- if options:
- cmdoptions.check_install_build_global(options, opts)
- # get the options that apply to requirements
- req_options = {}
- for dest in SUPPORTED_OPTIONS_REQ_DEST:
- if dest in opts.__dict__ and opts.__dict__[dest]:
- req_options[dest] = opts.__dict__[dest]
- yield InstallRequirement.from_line(
- args_str, line_comes_from, constraint=constraint,
- isolated=isolated, options=req_options, wheel_cache=wheel_cache
- )
-
- # yield an editable requirement
- elif opts.editables:
- isolated = options.isolated_mode if options else False
- yield InstallRequirement.from_editable(
- opts.editables[0], comes_from=line_comes_from,
- constraint=constraint, isolated=isolated, wheel_cache=wheel_cache
- )
-
- # parse a nested requirements file
- elif opts.requirements or opts.constraints:
- if opts.requirements:
- req_path = opts.requirements[0]
- nested_constraint = False
- else:
- req_path = opts.constraints[0]
- nested_constraint = True
- # original file is over http
- if SCHEME_RE.search(filename):
- # do a url join so relative paths work
- req_path = urllib_parse.urljoin(filename, req_path)
- # original file and nested file are paths
- elif not SCHEME_RE.search(req_path):
- # do a join so relative paths work
- req_path = os.path.join(os.path.dirname(filename), req_path)
- # TODO: Why not use `comes_from='-r {} (line {})'` here as well?
- parser = parse_requirements(
- req_path, finder, comes_from, options, session,
- constraint=nested_constraint, wheel_cache=wheel_cache
- )
- for req in parser:
- yield req
-
- # percolate hash-checking option upward
- elif opts.require_hashes:
- options.require_hashes = opts.require_hashes
-
- # set finder options
- elif finder:
- if opts.index_url:
- finder.index_urls = [opts.index_url]
- if opts.no_index is True:
- finder.index_urls = []
- if opts.extra_index_urls:
- finder.index_urls.extend(opts.extra_index_urls)
- if opts.find_links:
- # FIXME: it would be nice to keep track of the source
- # of the find_links: support a find-links local path
- # relative to a requirements file.
- value = opts.find_links[0]
- req_dir = os.path.dirname(os.path.abspath(filename))
- relative_to_reqs_file = os.path.join(req_dir, value)
- if os.path.exists(relative_to_reqs_file):
- value = relative_to_reqs_file
- finder.find_links.append(value)
- if opts.pre:
- finder.allow_all_prereleases = True
- if opts.process_dependency_links:
- finder.process_dependency_links = True
- if opts.trusted_hosts:
- finder.secure_origins.extend(
- ("*", host, "*") for host in opts.trusted_hosts)
-
-
-def break_args_options(line):
- """Break up the line into an args and options string. We only want to shlex
- (and then optparse) the options, not the args. args can contain markers
- which are corrupted by shlex.
- """
- tokens = line.split(' ')
- args = []
- options = tokens[:]
- for token in tokens:
- if token.startswith('-') or token.startswith('--'):
- break
- else:
- args.append(token)
- options.pop(0)
- return ' '.join(args), ' '.join(options)
-
-
-def build_parser(line):
- """
- Return a parser for parsing requirement lines
- """
- parser = optparse.OptionParser(add_help_option=False)
-
- option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
- for option_factory in option_factories:
- option = option_factory()
- parser.add_option(option)
-
- # By default optparse sys.exits on parsing errors. We want to wrap
- # that in our own exception.
- def parser_exit(self, msg):
- # add offending line
- msg = 'Invalid requirement: %s\n%s' % (line, msg)
- raise RequirementsFileParseError(msg)
- parser.exit = parser_exit
-
- return parser
-
-
-def join_lines(lines_enum):
- """Joins a line ending in '\' with the previous line (except when following
- comments). The joined line takes on the index of the first line.
- """
- primary_line_number = None
- new_line = []
- for line_number, line in lines_enum:
- if not line.endswith('\\') or COMMENT_RE.match(line):
- if COMMENT_RE.match(line):
- # this ensures comments are always matched later
- line = ' ' + line
- if new_line:
- new_line.append(line)
- yield primary_line_number, ''.join(new_line)
- new_line = []
- else:
- yield line_number, line
- else:
- if not new_line:
- primary_line_number = line_number
- new_line.append(line.strip('\\'))
-
- # last line contains \
- if new_line:
- yield primary_line_number, ''.join(new_line)
-
- # TODO: handle space after '\'.
-
-
-def ignore_comments(lines_enum):
- """
- Strips comments and filter empty lines.
- """
- for line_number, line in lines_enum:
- line = COMMENT_RE.sub('', line)
- line = line.strip()
- if line:
- yield line_number, line
-
-
-def skip_regex(lines_enum, options):
- """
- Skip lines that match '--skip-requirements-regex' pattern
-
- Note: the regex pattern is only built once
- """
- skip_regex = options.skip_requirements_regex if options else None
- if skip_regex:
- pattern = re.compile(skip_regex)
- lines_enum = filterfalse(lambda e: pattern.search(e[1]), lines_enum)
- return lines_enum
-
-
-def expand_env_variables(lines_enum):
- """Replace all environment variables that can be retrieved via `os.getenv`.
-
- The only allowed format for environment variables defined in the
- requirement file is `${MY_VARIABLE_1}` to ensure two things:
-
- 1. Strings that contain a `$` aren't accidentally (partially) expanded.
- 2. Ensure consistency across platforms for requirement files.
-
- These points are the result of a discusssion on the `github pull
- request #3514 `_.
-
- Valid characters in variable names follow the `POSIX standard
- `_ and are limited
- to uppercase letter, digits and the `_` (underscore).
- """
- for line_number, line in lines_enum:
- for env_var, var_name in ENV_VAR_RE.findall(line):
- value = os.getenv(var_name)
- if not value:
- continue
-
- line = line.replace(env_var, value)
-
- yield line_number, line
+"""
+Requirements file parsing
+"""
+
+from __future__ import absolute_import
+
+import optparse
+import os
+import re
+import shlex
+import sys
+
+from pip._vendor.six.moves import filterfalse
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+
+from pip._internal import cmdoptions
+from pip._internal.download import get_file_content
+from pip._internal.exceptions import RequirementsFileParseError
+from pip._internal.req.req_install import InstallRequirement
+
+__all__ = ['parse_requirements']
+
+SCHEME_RE = re.compile(r'^(http|https|file):', re.I)
+COMMENT_RE = re.compile(r'(^|\s)+#.*$')
+
+# Matches environment variable-style values in '${MY_VARIABLE_1}' with the
+# variable name consisting of only uppercase letters, digits or the '_'
+# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1,
+# 2013 Edition.
+ENV_VAR_RE = re.compile(r'(?P\$\{(?P[A-Z0-9_]+)\})')
+
+SUPPORTED_OPTIONS = [
+ cmdoptions.constraints,
+ cmdoptions.editable,
+ cmdoptions.requirements,
+ cmdoptions.no_index,
+ cmdoptions.index_url,
+ cmdoptions.find_links,
+ cmdoptions.extra_index_url,
+ cmdoptions.always_unzip,
+ cmdoptions.no_binary,
+ cmdoptions.only_binary,
+ cmdoptions.pre,
+ cmdoptions.process_dependency_links,
+ cmdoptions.trusted_host,
+ cmdoptions.require_hashes,
+]
+
+# options to be passed to requirements
+SUPPORTED_OPTIONS_REQ = [
+ cmdoptions.install_options,
+ cmdoptions.global_options,
+ cmdoptions.hash,
+]
+
+# the 'dest' string values
+SUPPORTED_OPTIONS_REQ_DEST = [o().dest for o in SUPPORTED_OPTIONS_REQ]
+
+
+def parse_requirements(filename, finder=None, comes_from=None, options=None,
+ session=None, constraint=False, wheel_cache=None):
+ """Parse a requirements file and yield InstallRequirement instances.
+
+ :param filename: Path or url of requirements file.
+ :param finder: Instance of pip.index.PackageFinder.
+ :param comes_from: Origin description of requirements.
+ :param options: cli options.
+ :param session: Instance of pip.download.PipSession.
+ :param constraint: If true, parsing a constraint file rather than
+ requirements file.
+ :param wheel_cache: Instance of pip.wheel.WheelCache
+ """
+ if session is None:
+ raise TypeError(
+ "parse_requirements() missing 1 required keyword argument: "
+ "'session'"
+ )
+
+ _, content = get_file_content(
+ filename, comes_from=comes_from, session=session
+ )
+
+ lines_enum = preprocess(content, options)
+
+ for line_number, line in lines_enum:
+ req_iter = process_line(line, filename, line_number, finder,
+ comes_from, options, session, wheel_cache,
+ constraint=constraint)
+ for req in req_iter:
+ yield req
+
+
+def preprocess(content, options):
+ """Split, filter, and join lines, and return a line iterator
+
+ :param content: the content of the requirements file
+ :param options: cli options
+ """
+ lines_enum = enumerate(content.splitlines(), start=1)
+ lines_enum = join_lines(lines_enum)
+ lines_enum = ignore_comments(lines_enum)
+ lines_enum = skip_regex(lines_enum, options)
+ lines_enum = expand_env_variables(lines_enum)
+ return lines_enum
+
+
+def process_line(line, filename, line_number, finder=None, comes_from=None,
+ options=None, session=None, wheel_cache=None,
+ constraint=False):
+ """Process a single requirements line; This can result in creating/yielding
+ requirements, or updating the finder.
+
+ For lines that contain requirements, the only options that have an effect
+ are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
+ requirement. Other options from SUPPORTED_OPTIONS may be present, but are
+ ignored.
+
+ For lines that do not contain requirements, the only options that have an
+ effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
+ be present, but are ignored. These lines may contain multiple options
+ (although our docs imply only one is supported), and all our parsed and
+ affect the finder.
+
+ :param constraint: If True, parsing a constraints file.
+ :param options: OptionParser options that we may update
+ """
+ parser = build_parser(line)
+ defaults = parser.get_default_values()
+ defaults.index_url = None
+ if finder:
+ # `finder.format_control` will be updated during parsing
+ defaults.format_control = finder.format_control
+ args_str, options_str = break_args_options(line)
+ if sys.version_info < (2, 7, 3):
+ # Prior to 2.7.3, shlex cannot deal with unicode entries
+ options_str = options_str.encode('utf8')
+ opts, _ = parser.parse_args(shlex.split(options_str), defaults)
+
+ # preserve for the nested code path
+ line_comes_from = '%s %s (line %s)' % (
+ '-c' if constraint else '-r', filename, line_number,
+ )
+
+ # yield a line requirement
+ if args_str:
+ isolated = options.isolated_mode if options else False
+ if options:
+ cmdoptions.check_install_build_global(options, opts)
+ # get the options that apply to requirements
+ req_options = {}
+ for dest in SUPPORTED_OPTIONS_REQ_DEST:
+ if dest in opts.__dict__ and opts.__dict__[dest]:
+ req_options[dest] = opts.__dict__[dest]
+ yield InstallRequirement.from_line(
+ args_str, line_comes_from, constraint=constraint,
+ isolated=isolated, options=req_options, wheel_cache=wheel_cache
+ )
+
+ # yield an editable requirement
+ elif opts.editables:
+ isolated = options.isolated_mode if options else False
+ yield InstallRequirement.from_editable(
+ opts.editables[0], comes_from=line_comes_from,
+ constraint=constraint, isolated=isolated, wheel_cache=wheel_cache
+ )
+
+ # parse a nested requirements file
+ elif opts.requirements or opts.constraints:
+ if opts.requirements:
+ req_path = opts.requirements[0]
+ nested_constraint = False
+ else:
+ req_path = opts.constraints[0]
+ nested_constraint = True
+ # original file is over http
+ if SCHEME_RE.search(filename):
+ # do a url join so relative paths work
+ req_path = urllib_parse.urljoin(filename, req_path)
+ # original file and nested file are paths
+ elif not SCHEME_RE.search(req_path):
+ # do a join so relative paths work
+ req_path = os.path.join(os.path.dirname(filename), req_path)
+ # TODO: Why not use `comes_from='-r {} (line {})'` here as well?
+ parser = parse_requirements(
+ req_path, finder, comes_from, options, session,
+ constraint=nested_constraint, wheel_cache=wheel_cache
+ )
+ for req in parser:
+ yield req
+
+ # percolate hash-checking option upward
+ elif opts.require_hashes:
+ options.require_hashes = opts.require_hashes
+
+ # set finder options
+ elif finder:
+ if opts.index_url:
+ finder.index_urls = [opts.index_url]
+ if opts.no_index is True:
+ finder.index_urls = []
+ if opts.extra_index_urls:
+ finder.index_urls.extend(opts.extra_index_urls)
+ if opts.find_links:
+ # FIXME: it would be nice to keep track of the source
+ # of the find_links: support a find-links local path
+ # relative to a requirements file.
+ value = opts.find_links[0]
+ req_dir = os.path.dirname(os.path.abspath(filename))
+ relative_to_reqs_file = os.path.join(req_dir, value)
+ if os.path.exists(relative_to_reqs_file):
+ value = relative_to_reqs_file
+ finder.find_links.append(value)
+ if opts.pre:
+ finder.allow_all_prereleases = True
+ if opts.process_dependency_links:
+ finder.process_dependency_links = True
+ if opts.trusted_hosts:
+ finder.secure_origins.extend(
+ ("*", host, "*") for host in opts.trusted_hosts)
+
+
+def break_args_options(line):
+ """Break up the line into an args and options string. We only want to shlex
+ (and then optparse) the options, not the args. args can contain markers
+ which are corrupted by shlex.
+ """
+ tokens = line.split(' ')
+ args = []
+ options = tokens[:]
+ for token in tokens:
+ if token.startswith('-') or token.startswith('--'):
+ break
+ else:
+ args.append(token)
+ options.pop(0)
+ return ' '.join(args), ' '.join(options)
+
+
+def build_parser(line):
+ """
+ Return a parser for parsing requirement lines
+ """
+ parser = optparse.OptionParser(add_help_option=False)
+
+ option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
+ for option_factory in option_factories:
+ option = option_factory()
+ parser.add_option(option)
+
+ # By default optparse sys.exits on parsing errors. We want to wrap
+ # that in our own exception.
+ def parser_exit(self, msg):
+ # add offending line
+ msg = 'Invalid requirement: %s\n%s' % (line, msg)
+ raise RequirementsFileParseError(msg)
+ parser.exit = parser_exit
+
+ return parser
+
+
+def join_lines(lines_enum):
+ """Joins a line ending in '\' with the previous line (except when following
+ comments). The joined line takes on the index of the first line.
+ """
+ primary_line_number = None
+ new_line = []
+ for line_number, line in lines_enum:
+ if not line.endswith('\\') or COMMENT_RE.match(line):
+ if COMMENT_RE.match(line):
+ # this ensures comments are always matched later
+ line = ' ' + line
+ if new_line:
+ new_line.append(line)
+ yield primary_line_number, ''.join(new_line)
+ new_line = []
+ else:
+ yield line_number, line
+ else:
+ if not new_line:
+ primary_line_number = line_number
+ new_line.append(line.strip('\\'))
+
+ # last line contains \
+ if new_line:
+ yield primary_line_number, ''.join(new_line)
+
+ # TODO: handle space after '\'.
+
+
+def ignore_comments(lines_enum):
+ """
+ Strips comments and filter empty lines.
+ """
+ for line_number, line in lines_enum:
+ line = COMMENT_RE.sub('', line)
+ line = line.strip()
+ if line:
+ yield line_number, line
+
+
+def skip_regex(lines_enum, options):
+ """
+ Skip lines that match '--skip-requirements-regex' pattern
+
+ Note: the regex pattern is only built once
+ """
+ skip_regex = options.skip_requirements_regex if options else None
+ if skip_regex:
+ pattern = re.compile(skip_regex)
+ lines_enum = filterfalse(lambda e: pattern.search(e[1]), lines_enum)
+ return lines_enum
+
+
+def expand_env_variables(lines_enum):
+ """Replace all environment variables that can be retrieved via `os.getenv`.
+
+ The only allowed format for environment variables defined in the
+ requirement file is `${MY_VARIABLE_1}` to ensure two things:
+
+ 1. Strings that contain a `$` aren't accidentally (partially) expanded.
+ 2. Ensure consistency across platforms for requirement files.
+
+ These points are the result of a discusssion on the `github pull
+ request #3514 `_.
+
+ Valid characters in variable names follow the `POSIX standard
+ `_ and are limited
+ to uppercase letter, digits and the `_` (underscore).
+ """
+ for line_number, line in lines_enum:
+ for env_var, var_name in ENV_VAR_RE.findall(line):
+ value = os.getenv(var_name)
+ if not value:
+ continue
+
+ line = line.replace(env_var, value)
+
+ yield line_number, line
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/req/req_install.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/req/req_install.py
index ddd167c..9dd1523 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/req/req_install.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/req/req_install.py
@@ -1,1115 +1,1115 @@
-from __future__ import absolute_import
-
-import logging
-import os
-import re
-import shutil
-import sys
-import sysconfig
-import traceback
-import warnings
-import zipfile
-from distutils.util import change_root
-from email.parser import FeedParser # type: ignore
-
-from pip._vendor import pkg_resources, pytoml, six
-from pip._vendor.packaging import specifiers
-from pip._vendor.packaging.markers import Marker
-from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
-from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.packaging.version import parse as parse_version
-from pip._vendor.packaging.version import Version
-from pip._vendor.pkg_resources import RequirementParseError, parse_requirements
-
-from pip._internal import wheel
-from pip._internal.build_env import BuildEnvironment
-from pip._internal.compat import native_str
-from pip._internal.download import (
- is_archive_file, is_url, path_to_url, url_to_path,
-)
-from pip._internal.exceptions import InstallationError, UninstallationError
-from pip._internal.locations import (
- PIP_DELETE_MARKER_FILENAME, running_under_virtualenv,
-)
-from pip._internal.req.req_uninstall import UninstallPathSet
-from pip._internal.utils.deprecation import RemovedInPip11Warning
-from pip._internal.utils.hashes import Hashes
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import (
- _make_build_dir, ask_path_exists, backup_dir, call_subprocess,
- display_path, dist_in_site_packages, dist_in_usersite, ensure_dir,
- get_installed_version, is_installable_dir, read_text_file, rmtree,
-)
-from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.utils.ui import open_spinner
-from pip._internal.vcs import vcs
-from pip._internal.wheel import Wheel, move_wheel_files
-
-logger = logging.getLogger(__name__)
-
-operators = specifiers.Specifier._operators.keys()
-
-
-def _strip_extras(path):
- m = re.match(r'^(.+)(\[[^\]]+\])$', path)
- extras = None
- if m:
- path_no_extras = m.group(1)
- extras = m.group(2)
- else:
- path_no_extras = path
-
- return path_no_extras, extras
-
-
-class InstallRequirement(object):
- """
- Represents something that may be installed later on, may have information
- about where to fetch the relavant requirement and also contains logic for
- installing the said requirement.
- """
-
- def __init__(self, req, comes_from, source_dir=None, editable=False,
- link=None, update=True, markers=None,
- isolated=False, options=None, wheel_cache=None,
- constraint=False, extras=()):
- assert req is None or isinstance(req, Requirement), req
- self.req = req
- self.comes_from = comes_from
- self.constraint = constraint
- if source_dir is not None:
- self.source_dir = os.path.normpath(os.path.abspath(source_dir))
- else:
- self.source_dir = None
- self.editable = editable
-
- self._wheel_cache = wheel_cache
- if link is not None:
- self.link = self.original_link = link
- else:
- from pip._internal.index import Link
- self.link = self.original_link = req and req.url and Link(req.url)
-
- if extras:
- self.extras = extras
- elif req:
- self.extras = {
- pkg_resources.safe_extra(extra) for extra in req.extras
- }
- else:
- self.extras = set()
- if markers is not None:
- self.markers = markers
- else:
- self.markers = req and req.marker
- self._egg_info_path = None
- # This holds the pkg_resources.Distribution object if this requirement
- # is already available:
- self.satisfied_by = None
- # This hold the pkg_resources.Distribution object if this requirement
- # conflicts with another installed distribution:
- self.conflicts_with = None
- # Temporary build location
- self._temp_build_dir = TempDirectory(kind="req-build")
- # Used to store the global directory where the _temp_build_dir should
- # have been created. Cf _correct_build_location method.
- self._ideal_build_dir = None
- # True if the editable should be updated:
- self.update = update
- # Set to True after successful installation
- self.install_succeeded = None
- # UninstallPathSet of uninstalled distribution (for possible rollback)
- self.uninstalled_pathset = None
- self.options = options if options else {}
- # Set to True after successful preparation of this requirement
- self.prepared = False
- self.is_direct = False
-
- self.isolated = isolated
- self.build_env = BuildEnvironment(no_clean=True)
-
- @classmethod
- def from_editable(cls, editable_req, comes_from=None, isolated=False,
- options=None, wheel_cache=None, constraint=False):
- from pip._internal.index import Link
-
- name, url, extras_override = parse_editable(editable_req)
- if url.startswith('file:'):
- source_dir = url_to_path(url)
- else:
- source_dir = None
-
- if name is not None:
- try:
- req = Requirement(name)
- except InvalidRequirement:
- raise InstallationError("Invalid requirement: '%s'" % name)
- else:
- req = None
- return cls(
- req, comes_from, source_dir=source_dir,
- editable=True,
- link=Link(url),
- constraint=constraint,
- isolated=isolated,
- options=options if options else {},
- wheel_cache=wheel_cache,
- extras=extras_override or (),
- )
-
- @classmethod
- def from_req(cls, req, comes_from=None, isolated=False, wheel_cache=None):
- try:
- req = Requirement(req)
- except InvalidRequirement:
- raise InstallationError("Invalid requirement: '%s'" % req)
- if req.url:
- raise InstallationError(
- "Direct url requirement (like %s) are not allowed for "
- "dependencies" % req
- )
- return cls(req, comes_from, isolated=isolated, wheel_cache=wheel_cache)
-
- @classmethod
- def from_line(
- cls, name, comes_from=None, isolated=False, options=None,
- wheel_cache=None, constraint=False):
- """Creates an InstallRequirement from a name, which might be a
- requirement, directory containing 'setup.py', filename, or URL.
- """
- from pip._internal.index import Link
-
- if is_url(name):
- marker_sep = '; '
- else:
- marker_sep = ';'
- if marker_sep in name:
- name, markers = name.split(marker_sep, 1)
- markers = markers.strip()
- if not markers:
- markers = None
- else:
- markers = Marker(markers)
- else:
- markers = None
- name = name.strip()
- req = None
- path = os.path.normpath(os.path.abspath(name))
- link = None
- extras = None
-
- if is_url(name):
- link = Link(name)
- else:
- p, extras = _strip_extras(path)
- looks_like_dir = os.path.isdir(p) and (
- os.path.sep in name or
- (os.path.altsep is not None and os.path.altsep in name) or
- name.startswith('.')
- )
- if looks_like_dir:
- if not is_installable_dir(p):
- raise InstallationError(
- "Directory %r is not installable. File 'setup.py' "
- "not found." % name
- )
- link = Link(path_to_url(p))
- elif is_archive_file(p):
- if not os.path.isfile(p):
- logger.warning(
- 'Requirement %r looks like a filename, but the '
- 'file does not exist',
- name
- )
- link = Link(path_to_url(p))
-
- # it's a local file, dir, or url
- if link:
- # Handle relative file URLs
- if link.scheme == 'file' and re.search(r'\.\./', link.url):
- link = Link(
- path_to_url(os.path.normpath(os.path.abspath(link.path))))
- # wheel file
- if link.is_wheel:
- wheel = Wheel(link.filename) # can raise InvalidWheelFilename
- req = "%s==%s" % (wheel.name, wheel.version)
- else:
- # set the req to the egg fragment. when it's not there, this
- # will become an 'unnamed' requirement
- req = link.egg_fragment
-
- # a requirement specifier
- else:
- req = name
-
- if extras:
- extras = Requirement("placeholder" + extras.lower()).extras
- else:
- extras = ()
- if req is not None:
- try:
- req = Requirement(req)
- except InvalidRequirement:
- if os.path.sep in req:
- add_msg = "It looks like a path."
- add_msg += deduce_helpful_msg(req)
- elif '=' in req and not any(op in req for op in operators):
- add_msg = "= is not a valid operator. Did you mean == ?"
- else:
- add_msg = traceback.format_exc()
- raise InstallationError(
- "Invalid requirement: '%s'\n%s" % (req, add_msg))
- return cls(
- req, comes_from, link=link, markers=markers,
- isolated=isolated,
- options=options if options else {},
- wheel_cache=wheel_cache,
- constraint=constraint,
- extras=extras,
- )
-
- def __str__(self):
- if self.req:
- s = str(self.req)
- if self.link:
- s += ' from %s' % self.link.url
- else:
- s = self.link.url if self.link else None
- if self.satisfied_by is not None:
- s += ' in %s' % display_path(self.satisfied_by.location)
- if self.comes_from:
- if isinstance(self.comes_from, six.string_types):
- comes_from = self.comes_from
- else:
- comes_from = self.comes_from.from_path()
- if comes_from:
- s += ' (from %s)' % comes_from
- return s
-
- def __repr__(self):
- return '<%s object: %s editable=%r>' % (
- self.__class__.__name__, str(self), self.editable)
-
- def populate_link(self, finder, upgrade, require_hashes):
- """Ensure that if a link can be found for this, that it is found.
-
- Note that self.link may still be None - if Upgrade is False and the
- requirement is already installed.
-
- If require_hashes is True, don't use the wheel cache, because cached
- wheels, always built locally, have different hashes than the files
- downloaded from the index server and thus throw false hash mismatches.
- Furthermore, cached wheels at present have undeterministic contents due
- to file modification times.
- """
- if self.link is None:
- self.link = finder.find_requirement(self, upgrade)
- if self._wheel_cache is not None and not require_hashes:
- old_link = self.link
- self.link = self._wheel_cache.get(self.link, self.name)
- if old_link != self.link:
- logger.debug('Using cached wheel link: %s', self.link)
-
- @property
- def specifier(self):
- return self.req.specifier
-
- @property
- def is_pinned(self):
- """Return whether I am pinned to an exact version.
-
- For example, some-package==1.2 is pinned; some-package>1.2 is not.
- """
- specifiers = self.specifier
- return (len(specifiers) == 1 and
- next(iter(specifiers)).operator in {'==', '==='})
-
- def from_path(self):
- if self.req is None:
- return None
- s = str(self.req)
- if self.comes_from:
- if isinstance(self.comes_from, six.string_types):
- comes_from = self.comes_from
- else:
- comes_from = self.comes_from.from_path()
- if comes_from:
- s += '->' + comes_from
- return s
-
- def build_location(self, build_dir):
- assert build_dir is not None
- if self._temp_build_dir.path is not None:
- return self._temp_build_dir.path
- if self.req is None:
- # for requirement via a path to a directory: the name of the
- # package is not available yet so we create a temp directory
- # Once run_egg_info will have run, we'll be able
- # to fix it via _correct_build_location
- # Some systems have /tmp as a symlink which confuses custom
- # builds (such as numpy). Thus, we ensure that the real path
- # is returned.
- self._temp_build_dir.create()
- self._ideal_build_dir = build_dir
-
- return self._temp_build_dir.path
- if self.editable:
- name = self.name.lower()
- else:
- name = self.name
- # FIXME: Is there a better place to create the build_dir? (hg and bzr
- # need this)
- if not os.path.exists(build_dir):
- logger.debug('Creating directory %s', build_dir)
- _make_build_dir(build_dir)
- return os.path.join(build_dir, name)
-
- def _correct_build_location(self):
- """Move self._temp_build_dir to self._ideal_build_dir/self.req.name
-
- For some requirements (e.g. a path to a directory), the name of the
- package is not available until we run egg_info, so the build_location
- will return a temporary directory and store the _ideal_build_dir.
-
- This is only called by self.egg_info_path to fix the temporary build
- directory.
- """
- if self.source_dir is not None:
- return
- assert self.req is not None
- assert self._temp_build_dir.path
- assert self._ideal_build_dir.path
- old_location = self._temp_build_dir.path
- self._temp_build_dir.path = None
-
- new_location = self.build_location(self._ideal_build_dir)
- if os.path.exists(new_location):
- raise InstallationError(
- 'A package already exists in %s; please remove it to continue'
- % display_path(new_location))
- logger.debug(
- 'Moving package %s from %s to new location %s',
- self, display_path(old_location), display_path(new_location),
- )
- shutil.move(old_location, new_location)
- self._temp_build_dir.path = new_location
- self._ideal_build_dir = None
- self.source_dir = os.path.normpath(os.path.abspath(new_location))
- self._egg_info_path = None
-
- @property
- def name(self):
- if self.req is None:
- return None
- return native_str(pkg_resources.safe_name(self.req.name))
-
- @property
- def setup_py_dir(self):
- return os.path.join(
- self.source_dir,
- self.link and self.link.subdirectory_fragment or '')
-
- @property
- def setup_py(self):
- assert self.source_dir, "No source dir for %s" % self
-
- setup_py = os.path.join(self.setup_py_dir, 'setup.py')
-
- # Python2 __file__ should not be unicode
- if six.PY2 and isinstance(setup_py, six.text_type):
- setup_py = setup_py.encode(sys.getfilesystemencoding())
-
- return setup_py
-
- @property
- def pyproject_toml(self):
- assert self.source_dir, "No source dir for %s" % self
-
- pp_toml = os.path.join(self.setup_py_dir, 'pyproject.toml')
-
- # Python2 __file__ should not be unicode
- if six.PY2 and isinstance(pp_toml, six.text_type):
- pp_toml = pp_toml.encode(sys.getfilesystemencoding())
-
- return pp_toml
-
- def get_pep_518_info(self):
- """Get a list of the packages required to build the project, if any,
- and a flag indicating whether pyproject.toml is present, indicating
- that the build should be isolated.
-
- Build requirements can be specified in a pyproject.toml, as described
- in PEP 518. If this file exists but doesn't specify build
- requirements, pip will default to installing setuptools and wheel.
- """
- if os.path.isfile(self.pyproject_toml):
- with open(self.pyproject_toml) as f:
- pp_toml = pytoml.load(f)
- build_sys = pp_toml.get('build-system', {})
- return (build_sys.get('requires', ['setuptools', 'wheel']), True)
- return (['setuptools', 'wheel'], False)
-
- def run_egg_info(self):
- assert self.source_dir
- if self.name:
- logger.debug(
- 'Running setup.py (path:%s) egg_info for package %s',
- self.setup_py, self.name,
- )
- else:
- logger.debug(
- 'Running setup.py (path:%s) egg_info for package from %s',
- self.setup_py, self.link,
- )
-
- with indent_log():
- script = SETUPTOOLS_SHIM % self.setup_py
- base_cmd = [sys.executable, '-c', script]
- if self.isolated:
- base_cmd += ["--no-user-cfg"]
- egg_info_cmd = base_cmd + ['egg_info']
- # We can't put the .egg-info files at the root, because then the
- # source code will be mistaken for an installed egg, causing
- # problems
- if self.editable:
- egg_base_option = []
- else:
- egg_info_dir = os.path.join(self.setup_py_dir, 'pip-egg-info')
- ensure_dir(egg_info_dir)
- egg_base_option = ['--egg-base', 'pip-egg-info']
- with self.build_env:
- call_subprocess(
- egg_info_cmd + egg_base_option,
- cwd=self.setup_py_dir,
- show_stdout=False,
- command_desc='python setup.py egg_info')
-
- if not self.req:
- if isinstance(parse_version(self.pkg_info()["Version"]), Version):
- op = "=="
- else:
- op = "==="
- self.req = Requirement(
- "".join([
- self.pkg_info()["Name"],
- op,
- self.pkg_info()["Version"],
- ])
- )
- self._correct_build_location()
- else:
- metadata_name = canonicalize_name(self.pkg_info()["Name"])
- if canonicalize_name(self.req.name) != metadata_name:
- logger.warning(
- 'Running setup.py (path:%s) egg_info for package %s '
- 'produced metadata for project name %s. Fix your '
- '#egg=%s fragments.',
- self.setup_py, self.name, metadata_name, self.name
- )
- self.req = Requirement(metadata_name)
-
- def egg_info_data(self, filename):
- if self.satisfied_by is not None:
- if not self.satisfied_by.has_metadata(filename):
- return None
- return self.satisfied_by.get_metadata(filename)
- assert self.source_dir
- filename = self.egg_info_path(filename)
- if not os.path.exists(filename):
- return None
- data = read_text_file(filename)
- return data
-
- def egg_info_path(self, filename):
- if self._egg_info_path is None:
- if self.editable:
- base = self.source_dir
- else:
- base = os.path.join(self.setup_py_dir, 'pip-egg-info')
- filenames = os.listdir(base)
- if self.editable:
- filenames = []
- for root, dirs, files in os.walk(base):
- for dir in vcs.dirnames:
- if dir in dirs:
- dirs.remove(dir)
- # Iterate over a copy of ``dirs``, since mutating
- # a list while iterating over it can cause trouble.
- # (See https://github.com/pypa/pip/pull/462.)
- for dir in list(dirs):
- # Don't search in anything that looks like a virtualenv
- # environment
- if (
- os.path.lexists(
- os.path.join(root, dir, 'bin', 'python')
- ) or
- os.path.exists(
- os.path.join(
- root, dir, 'Scripts', 'Python.exe'
- )
- )):
- dirs.remove(dir)
- # Also don't search through tests
- elif dir == 'test' or dir == 'tests':
- dirs.remove(dir)
- filenames.extend([os.path.join(root, dir)
- for dir in dirs])
- filenames = [f for f in filenames if f.endswith('.egg-info')]
-
- if not filenames:
- raise InstallationError(
- 'No files/directories in %s (from %s)' % (base, filename)
- )
- assert filenames, \
- "No files/directories in %s (from %s)" % (base, filename)
-
- # if we have more than one match, we pick the toplevel one. This
- # can easily be the case if there is a dist folder which contains
- # an extracted tarball for testing purposes.
- if len(filenames) > 1:
- filenames.sort(
- key=lambda x: x.count(os.path.sep) +
- (os.path.altsep and x.count(os.path.altsep) or 0)
- )
- self._egg_info_path = os.path.join(base, filenames[0])
- return os.path.join(self._egg_info_path, filename)
-
- def pkg_info(self):
- p = FeedParser()
- data = self.egg_info_data('PKG-INFO')
- if not data:
- logger.warning(
- 'No PKG-INFO file found in %s',
- display_path(self.egg_info_path('PKG-INFO')),
- )
- p.feed(data or '')
- return p.close()
-
- _requirements_section_re = re.compile(r'\[(.*?)\]')
-
- @property
- def installed_version(self):
- return get_installed_version(self.name)
-
- def assert_source_matches_version(self):
- assert self.source_dir
- version = self.pkg_info()['version']
- if self.req.specifier and version not in self.req.specifier:
- logger.warning(
- 'Requested %s, but installing version %s',
- self,
- version,
- )
- else:
- logger.debug(
- 'Source in %s has version %s, which satisfies requirement %s',
- display_path(self.source_dir),
- version,
- self,
- )
-
- def update_editable(self, obtain=True):
- if not self.link:
- logger.debug(
- "Cannot update repository at %s; repository location is "
- "unknown",
- self.source_dir,
- )
- return
- assert self.editable
- assert self.source_dir
- if self.link.scheme == 'file':
- # Static paths don't get updated
- return
- assert '+' in self.link.url, "bad url: %r" % self.link.url
- if not self.update:
- return
- vc_type, url = self.link.url.split('+', 1)
- backend = vcs.get_backend(vc_type)
- if backend:
- vcs_backend = backend(self.link.url)
- if obtain:
- vcs_backend.obtain(self.source_dir)
- else:
- vcs_backend.export(self.source_dir)
- else:
- assert 0, (
- 'Unexpected version control type (in %s): %s'
- % (self.link, vc_type))
-
- def uninstall(self, auto_confirm=False, verbose=False,
- use_user_site=False):
- """
- Uninstall the distribution currently satisfying this requirement.
-
- Prompts before removing or modifying files unless
- ``auto_confirm`` is True.
-
- Refuses to delete or modify files outside of ``sys.prefix`` -
- thus uninstallation within a virtual environment can only
- modify that virtual environment, even if the virtualenv is
- linked to global site-packages.
-
- """
- if not self.check_if_exists(use_user_site):
- logger.warning("Skipping %s as it is not installed.", self.name)
- return
- dist = self.satisfied_by or self.conflicts_with
-
- uninstalled_pathset = UninstallPathSet.from_dist(dist)
- uninstalled_pathset.remove(auto_confirm, verbose)
- return uninstalled_pathset
-
- def archive(self, build_dir):
- assert self.source_dir
- create_archive = True
- archive_name = '%s-%s.zip' % (self.name, self.pkg_info()["version"])
- archive_path = os.path.join(build_dir, archive_name)
- if os.path.exists(archive_path):
- response = ask_path_exists(
- 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' %
- display_path(archive_path), ('i', 'w', 'b', 'a'))
- if response == 'i':
- create_archive = False
- elif response == 'w':
- logger.warning('Deleting %s', display_path(archive_path))
- os.remove(archive_path)
- elif response == 'b':
- dest_file = backup_dir(archive_path)
- logger.warning(
- 'Backing up %s to %s',
- display_path(archive_path),
- display_path(dest_file),
- )
- shutil.move(archive_path, dest_file)
- elif response == 'a':
- sys.exit(-1)
- if create_archive:
- zip = zipfile.ZipFile(
- archive_path, 'w', zipfile.ZIP_DEFLATED,
- allowZip64=True
- )
- dir = os.path.normcase(os.path.abspath(self.setup_py_dir))
- for dirpath, dirnames, filenames in os.walk(dir):
- if 'pip-egg-info' in dirnames:
- dirnames.remove('pip-egg-info')
- for dirname in dirnames:
- dirname = os.path.join(dirpath, dirname)
- name = self._clean_zip_name(dirname, dir)
- zipdir = zipfile.ZipInfo(self.name + '/' + name + '/')
- zipdir.external_attr = 0x1ED << 16 # 0o755
- zip.writestr(zipdir, '')
- for filename in filenames:
- if filename == PIP_DELETE_MARKER_FILENAME:
- continue
- filename = os.path.join(dirpath, filename)
- name = self._clean_zip_name(filename, dir)
- zip.write(filename, self.name + '/' + name)
- zip.close()
- logger.info('Saved %s', display_path(archive_path))
-
- def _clean_zip_name(self, name, prefix):
- assert name.startswith(prefix + os.path.sep), (
- "name %r doesn't start with prefix %r" % (name, prefix)
- )
- name = name[len(prefix) + 1:]
- name = name.replace(os.path.sep, '/')
- return name
-
- def match_markers(self, extras_requested=None):
- if not extras_requested:
- # Provide an extra to safely evaluate the markers
- # without matching any extra
- extras_requested = ('',)
- if self.markers is not None:
- return any(
- self.markers.evaluate({'extra': extra})
- for extra in extras_requested)
- else:
- return True
-
- def install(self, install_options, global_options=None, root=None,
- home=None, prefix=None, warn_script_location=True,
- use_user_site=False, pycompile=True):
- global_options = global_options if global_options is not None else []
- if self.editable:
- self.install_editable(
- install_options, global_options, prefix=prefix,
- )
- return
- if self.is_wheel:
- version = wheel.wheel_version(self.source_dir)
- wheel.check_compatibility(version, self.name)
-
- self.move_wheel_files(
- self.source_dir, root=root, prefix=prefix, home=home,
- warn_script_location=warn_script_location,
- use_user_site=use_user_site, pycompile=pycompile,
- )
- self.install_succeeded = True
- return
-
- # Extend the list of global and install options passed on to
- # the setup.py call with the ones from the requirements file.
- # Options specified in requirements file override those
- # specified on the command line, since the last option given
- # to setup.py is the one that is used.
- global_options = list(global_options) + \
- self.options.get('global_options', [])
- install_options = list(install_options) + \
- self.options.get('install_options', [])
-
- if self.isolated:
- global_options = global_options + ["--no-user-cfg"]
-
- with TempDirectory(kind="record") as temp_dir:
- record_filename = os.path.join(temp_dir.path, 'install-record.txt')
- install_args = self.get_install_args(
- global_options, record_filename, root, prefix, pycompile,
- )
- msg = 'Running setup.py install for %s' % (self.name,)
- with open_spinner(msg) as spinner:
- with indent_log():
- with self.build_env:
- call_subprocess(
- install_args + install_options,
- cwd=self.setup_py_dir,
- show_stdout=False,
- spinner=spinner,
- )
-
- if not os.path.exists(record_filename):
- logger.debug('Record file %s not found', record_filename)
- return
- self.install_succeeded = True
-
- def prepend_root(path):
- if root is None or not os.path.isabs(path):
- return path
- else:
- return change_root(root, path)
-
- with open(record_filename) as f:
- for line in f:
- directory = os.path.dirname(line)
- if directory.endswith('.egg-info'):
- egg_info_dir = prepend_root(directory)
- break
- else:
- logger.warning(
- 'Could not find .egg-info directory in install record'
- ' for %s',
- self,
- )
- # FIXME: put the record somewhere
- # FIXME: should this be an error?
- return
- new_lines = []
- with open(record_filename) as f:
- for line in f:
- filename = line.strip()
- if os.path.isdir(filename):
- filename += os.path.sep
- new_lines.append(
- os.path.relpath(prepend_root(filename), egg_info_dir)
- )
- new_lines.sort()
- ensure_dir(egg_info_dir)
- inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt')
- with open(inst_files_path, 'w') as f:
- f.write('\n'.join(new_lines) + '\n')
-
- def ensure_has_source_dir(self, parent_dir):
- """Ensure that a source_dir is set.
-
- This will create a temporary build dir if the name of the requirement
- isn't known yet.
-
- :param parent_dir: The ideal pip parent_dir for the source_dir.
- Generally src_dir for editables and build_dir for sdists.
- :return: self.source_dir
- """
- if self.source_dir is None:
- self.source_dir = self.build_location(parent_dir)
- return self.source_dir
-
- def get_install_args(self, global_options, record_filename, root, prefix,
- pycompile):
- install_args = [sys.executable, "-u"]
- install_args.append('-c')
- install_args.append(SETUPTOOLS_SHIM % self.setup_py)
- install_args += list(global_options) + \
- ['install', '--record', record_filename]
- install_args += ['--single-version-externally-managed']
-
- if root is not None:
- install_args += ['--root', root]
- if prefix is not None:
- install_args += ['--prefix', prefix]
-
- if pycompile:
- install_args += ["--compile"]
- else:
- install_args += ["--no-compile"]
-
- if running_under_virtualenv():
- py_ver_str = 'python' + sysconfig.get_python_version()
- install_args += ['--install-headers',
- os.path.join(sys.prefix, 'include', 'site',
- py_ver_str, self.name)]
-
- return install_args
-
- def remove_temporary_source(self):
- """Remove the source files from this requirement, if they are marked
- for deletion"""
- if self.source_dir and os.path.exists(
- os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)):
- logger.debug('Removing source in %s', self.source_dir)
- rmtree(self.source_dir)
- self.source_dir = None
- self._temp_build_dir.cleanup()
- self.build_env.cleanup()
-
- def install_editable(self, install_options,
- global_options=(), prefix=None):
- logger.info('Running setup.py develop for %s', self.name)
-
- if self.isolated:
- global_options = list(global_options) + ["--no-user-cfg"]
-
- if prefix:
- prefix_param = ['--prefix={}'.format(prefix)]
- install_options = list(install_options) + prefix_param
-
- with indent_log():
- # FIXME: should we do --install-headers here too?
- with self.build_env:
- call_subprocess(
- [
- sys.executable,
- '-c',
- SETUPTOOLS_SHIM % self.setup_py
- ] +
- list(global_options) +
- ['develop', '--no-deps'] +
- list(install_options),
-
- cwd=self.setup_py_dir,
- show_stdout=False,
- )
-
- self.install_succeeded = True
-
- def check_if_exists(self, use_user_site):
- """Find an installed distribution that satisfies or conflicts
- with this requirement, and set self.satisfied_by or
- self.conflicts_with appropriately.
- """
- if self.req is None:
- return False
- try:
- # get_distribution() will resolve the entire list of requirements
- # anyway, and we've already determined that we need the requirement
- # in question, so strip the marker so that we don't try to
- # evaluate it.
- no_marker = Requirement(str(self.req))
- no_marker.marker = None
- self.satisfied_by = pkg_resources.get_distribution(str(no_marker))
- if self.editable and self.satisfied_by:
- self.conflicts_with = self.satisfied_by
- # when installing editables, nothing pre-existing should ever
- # satisfy
- self.satisfied_by = None
- return True
- except pkg_resources.DistributionNotFound:
- return False
- except pkg_resources.VersionConflict:
- existing_dist = pkg_resources.get_distribution(
- self.req.name
- )
- if use_user_site:
- if dist_in_usersite(existing_dist):
- self.conflicts_with = existing_dist
- elif (running_under_virtualenv() and
- dist_in_site_packages(existing_dist)):
- raise InstallationError(
- "Will not install to the user site because it will "
- "lack sys.path precedence to %s in %s" %
- (existing_dist.project_name, existing_dist.location)
- )
- else:
- self.conflicts_with = existing_dist
- return True
-
- @property
- def is_wheel(self):
- return self.link and self.link.is_wheel
-
- def move_wheel_files(self, wheeldir, root=None, home=None, prefix=None,
- warn_script_location=True, use_user_site=False,
- pycompile=True):
- move_wheel_files(
- self.name, self.req, wheeldir,
- user=use_user_site,
- home=home,
- root=root,
- prefix=prefix,
- pycompile=pycompile,
- isolated=self.isolated,
- warn_script_location=warn_script_location,
- )
-
- def get_dist(self):
- """Return a pkg_resources.Distribution built from self.egg_info_path"""
- egg_info = self.egg_info_path('').rstrip(os.path.sep)
- base_dir = os.path.dirname(egg_info)
- metadata = pkg_resources.PathMetadata(base_dir, egg_info)
- dist_name = os.path.splitext(os.path.basename(egg_info))[0]
- return pkg_resources.Distribution(
- os.path.dirname(egg_info),
- project_name=dist_name,
- metadata=metadata,
- )
-
- @property
- def has_hash_options(self):
- """Return whether any known-good hashes are specified as options.
-
- These activate --require-hashes mode; hashes specified as part of a
- URL do not.
-
- """
- return bool(self.options.get('hashes', {}))
-
- def hashes(self, trust_internet=True):
- """Return a hash-comparer that considers my option- and URL-based
- hashes to be known-good.
-
- Hashes in URLs--ones embedded in the requirements file, not ones
- downloaded from an index server--are almost peers with ones from
- flags. They satisfy --require-hashes (whether it was implicitly or
- explicitly activated) but do not activate it. md5 and sha224 are not
- allowed in flags, which should nudge people toward good algos. We
- always OR all hashes together, even ones from URLs.
-
- :param trust_internet: Whether to trust URL-based (#md5=...) hashes
- downloaded from the internet, as by populate_link()
-
- """
- good_hashes = self.options.get('hashes', {}).copy()
- link = self.link if trust_internet else self.original_link
- if link and link.hash:
- good_hashes.setdefault(link.hash_name, []).append(link.hash)
- return Hashes(good_hashes)
-
-
-def _strip_postfix(req):
- """
- Strip req postfix ( -dev, 0.2, etc )
- """
- # FIXME: use package_to_requirement?
- match = re.search(r'^(.*?)(?:-dev|-\d.*)$', req)
- if match:
- # Strip off -dev, -0.2, etc.
- warnings.warn(
- "#egg cleanup for editable urls will be dropped in the future",
- RemovedInPip11Warning,
- )
- req = match.group(1)
- return req
-
-
-def parse_editable(editable_req):
- """Parses an editable requirement into:
- - a requirement name
- - an URL
- - extras
- - editable options
- Accepted requirements:
- svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
- .[some_extra]
- """
-
- from pip._internal.index import Link
-
- url = editable_req
-
- # If a file path is specified with extras, strip off the extras.
- url_no_extras, extras = _strip_extras(url)
-
- if os.path.isdir(url_no_extras):
- if not os.path.exists(os.path.join(url_no_extras, 'setup.py')):
- raise InstallationError(
- "Directory %r is not installable. File 'setup.py' not found." %
- url_no_extras
- )
- # Treating it as code that has already been checked out
- url_no_extras = path_to_url(url_no_extras)
-
- if url_no_extras.lower().startswith('file:'):
- package_name = Link(url_no_extras).egg_fragment
- if extras:
- return (
- package_name,
- url_no_extras,
- Requirement("placeholder" + extras.lower()).extras,
- )
- else:
- return package_name, url_no_extras, None
-
- for version_control in vcs:
- if url.lower().startswith('%s:' % version_control):
- url = '%s+%s' % (version_control, url)
- break
-
- if '+' not in url:
- raise InstallationError(
- '%s should either be a path to a local project or a VCS url '
- 'beginning with svn+, git+, hg+, or bzr+' %
- editable_req
- )
-
- vc_type = url.split('+', 1)[0].lower()
-
- if not vcs.get_backend(vc_type):
- error_message = 'For --editable=%s only ' % editable_req + \
- ', '.join([backend.name + '+URL' for backend in vcs.backends]) + \
- ' is currently supported'
- raise InstallationError(error_message)
-
- package_name = Link(url).egg_fragment
- if not package_name:
- raise InstallationError(
- "Could not detect requirement name for '%s', please specify one "
- "with #egg=your_package_name" % editable_req
- )
- return _strip_postfix(package_name), url, None
-
-
-def deduce_helpful_msg(req):
- """Returns helpful msg in case requirements file does not exist,
- or cannot be parsed.
-
- :params req: Requirements file path
- """
- msg = ""
- if os.path.exists(req):
- msg = " It does exist."
- # Try to parse and check if it is a requirements file.
- try:
- with open(req, 'r') as fp:
- # parse first line only
- next(parse_requirements(fp.read()))
- msg += " The argument you provided " + \
- "(%s) appears to be a" % (req) + \
- " requirements file. If that is the" + \
- " case, use the '-r' flag to install" + \
- " the packages specified within it."
- except RequirementParseError:
- logger.debug("Cannot parse '%s' as requirements \
- file" % (req), exc_info=1)
- else:
- msg += " File '%s' does not exist." % (req)
- return msg
+from __future__ import absolute_import
+
+import logging
+import os
+import re
+import shutil
+import sys
+import sysconfig
+import traceback
+import warnings
+import zipfile
+from distutils.util import change_root
+from email.parser import FeedParser # type: ignore
+
+from pip._vendor import pkg_resources, pytoml, six
+from pip._vendor.packaging import specifiers
+from pip._vendor.packaging.markers import Marker
+from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.packaging.version import parse as parse_version
+from pip._vendor.packaging.version import Version
+from pip._vendor.pkg_resources import RequirementParseError, parse_requirements
+
+from pip._internal import wheel
+from pip._internal.build_env import BuildEnvironment
+from pip._internal.compat import native_str
+from pip._internal.download import (
+ is_archive_file, is_url, path_to_url, url_to_path,
+)
+from pip._internal.exceptions import InstallationError, UninstallationError
+from pip._internal.locations import (
+ PIP_DELETE_MARKER_FILENAME, running_under_virtualenv,
+)
+from pip._internal.req.req_uninstall import UninstallPathSet
+from pip._internal.utils.deprecation import RemovedInPip11Warning
+from pip._internal.utils.hashes import Hashes
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import (
+ _make_build_dir, ask_path_exists, backup_dir, call_subprocess,
+ display_path, dist_in_site_packages, dist_in_usersite, ensure_dir,
+ get_installed_version, is_installable_dir, read_text_file, rmtree,
+)
+from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.ui import open_spinner
+from pip._internal.vcs import vcs
+from pip._internal.wheel import Wheel, move_wheel_files
+
+logger = logging.getLogger(__name__)
+
+operators = specifiers.Specifier._operators.keys()
+
+
+def _strip_extras(path):
+ m = re.match(r'^(.+)(\[[^\]]+\])$', path)
+ extras = None
+ if m:
+ path_no_extras = m.group(1)
+ extras = m.group(2)
+ else:
+ path_no_extras = path
+
+ return path_no_extras, extras
+
+
+class InstallRequirement(object):
+ """
+ Represents something that may be installed later on, may have information
+ about where to fetch the relavant requirement and also contains logic for
+ installing the said requirement.
+ """
+
+ def __init__(self, req, comes_from, source_dir=None, editable=False,
+ link=None, update=True, markers=None,
+ isolated=False, options=None, wheel_cache=None,
+ constraint=False, extras=()):
+ assert req is None or isinstance(req, Requirement), req
+ self.req = req
+ self.comes_from = comes_from
+ self.constraint = constraint
+ if source_dir is not None:
+ self.source_dir = os.path.normpath(os.path.abspath(source_dir))
+ else:
+ self.source_dir = None
+ self.editable = editable
+
+ self._wheel_cache = wheel_cache
+ if link is not None:
+ self.link = self.original_link = link
+ else:
+ from pip._internal.index import Link
+ self.link = self.original_link = req and req.url and Link(req.url)
+
+ if extras:
+ self.extras = extras
+ elif req:
+ self.extras = {
+ pkg_resources.safe_extra(extra) for extra in req.extras
+ }
+ else:
+ self.extras = set()
+ if markers is not None:
+ self.markers = markers
+ else:
+ self.markers = req and req.marker
+ self._egg_info_path = None
+ # This holds the pkg_resources.Distribution object if this requirement
+ # is already available:
+ self.satisfied_by = None
+ # This hold the pkg_resources.Distribution object if this requirement
+ # conflicts with another installed distribution:
+ self.conflicts_with = None
+ # Temporary build location
+ self._temp_build_dir = TempDirectory(kind="req-build")
+ # Used to store the global directory where the _temp_build_dir should
+ # have been created. Cf _correct_build_location method.
+ self._ideal_build_dir = None
+ # True if the editable should be updated:
+ self.update = update
+ # Set to True after successful installation
+ self.install_succeeded = None
+ # UninstallPathSet of uninstalled distribution (for possible rollback)
+ self.uninstalled_pathset = None
+ self.options = options if options else {}
+ # Set to True after successful preparation of this requirement
+ self.prepared = False
+ self.is_direct = False
+
+ self.isolated = isolated
+ self.build_env = BuildEnvironment(no_clean=True)
+
+ @classmethod
+ def from_editable(cls, editable_req, comes_from=None, isolated=False,
+ options=None, wheel_cache=None, constraint=False):
+ from pip._internal.index import Link
+
+ name, url, extras_override = parse_editable(editable_req)
+ if url.startswith('file:'):
+ source_dir = url_to_path(url)
+ else:
+ source_dir = None
+
+ if name is not None:
+ try:
+ req = Requirement(name)
+ except InvalidRequirement:
+ raise InstallationError("Invalid requirement: '%s'" % name)
+ else:
+ req = None
+ return cls(
+ req, comes_from, source_dir=source_dir,
+ editable=True,
+ link=Link(url),
+ constraint=constraint,
+ isolated=isolated,
+ options=options if options else {},
+ wheel_cache=wheel_cache,
+ extras=extras_override or (),
+ )
+
+ @classmethod
+ def from_req(cls, req, comes_from=None, isolated=False, wheel_cache=None):
+ try:
+ req = Requirement(req)
+ except InvalidRequirement:
+ raise InstallationError("Invalid requirement: '%s'" % req)
+ if req.url:
+ raise InstallationError(
+ "Direct url requirement (like %s) are not allowed for "
+ "dependencies" % req
+ )
+ return cls(req, comes_from, isolated=isolated, wheel_cache=wheel_cache)
+
+ @classmethod
+ def from_line(
+ cls, name, comes_from=None, isolated=False, options=None,
+ wheel_cache=None, constraint=False):
+ """Creates an InstallRequirement from a name, which might be a
+ requirement, directory containing 'setup.py', filename, or URL.
+ """
+ from pip._internal.index import Link
+
+ if is_url(name):
+ marker_sep = '; '
+ else:
+ marker_sep = ';'
+ if marker_sep in name:
+ name, markers = name.split(marker_sep, 1)
+ markers = markers.strip()
+ if not markers:
+ markers = None
+ else:
+ markers = Marker(markers)
+ else:
+ markers = None
+ name = name.strip()
+ req = None
+ path = os.path.normpath(os.path.abspath(name))
+ link = None
+ extras = None
+
+ if is_url(name):
+ link = Link(name)
+ else:
+ p, extras = _strip_extras(path)
+ looks_like_dir = os.path.isdir(p) and (
+ os.path.sep in name or
+ (os.path.altsep is not None and os.path.altsep in name) or
+ name.startswith('.')
+ )
+ if looks_like_dir:
+ if not is_installable_dir(p):
+ raise InstallationError(
+ "Directory %r is not installable. File 'setup.py' "
+ "not found." % name
+ )
+ link = Link(path_to_url(p))
+ elif is_archive_file(p):
+ if not os.path.isfile(p):
+ logger.warning(
+ 'Requirement %r looks like a filename, but the '
+ 'file does not exist',
+ name
+ )
+ link = Link(path_to_url(p))
+
+ # it's a local file, dir, or url
+ if link:
+ # Handle relative file URLs
+ if link.scheme == 'file' and re.search(r'\.\./', link.url):
+ link = Link(
+ path_to_url(os.path.normpath(os.path.abspath(link.path))))
+ # wheel file
+ if link.is_wheel:
+ wheel = Wheel(link.filename) # can raise InvalidWheelFilename
+ req = "%s==%s" % (wheel.name, wheel.version)
+ else:
+ # set the req to the egg fragment. when it's not there, this
+ # will become an 'unnamed' requirement
+ req = link.egg_fragment
+
+ # a requirement specifier
+ else:
+ req = name
+
+ if extras:
+ extras = Requirement("placeholder" + extras.lower()).extras
+ else:
+ extras = ()
+ if req is not None:
+ try:
+ req = Requirement(req)
+ except InvalidRequirement:
+ if os.path.sep in req:
+ add_msg = "It looks like a path."
+ add_msg += deduce_helpful_msg(req)
+ elif '=' in req and not any(op in req for op in operators):
+ add_msg = "= is not a valid operator. Did you mean == ?"
+ else:
+ add_msg = traceback.format_exc()
+ raise InstallationError(
+ "Invalid requirement: '%s'\n%s" % (req, add_msg))
+ return cls(
+ req, comes_from, link=link, markers=markers,
+ isolated=isolated,
+ options=options if options else {},
+ wheel_cache=wheel_cache,
+ constraint=constraint,
+ extras=extras,
+ )
+
+ def __str__(self):
+ if self.req:
+ s = str(self.req)
+ if self.link:
+ s += ' from %s' % self.link.url
+ else:
+ s = self.link.url if self.link else None
+ if self.satisfied_by is not None:
+ s += ' in %s' % display_path(self.satisfied_by.location)
+ if self.comes_from:
+ if isinstance(self.comes_from, six.string_types):
+ comes_from = self.comes_from
+ else:
+ comes_from = self.comes_from.from_path()
+ if comes_from:
+ s += ' (from %s)' % comes_from
+ return s
+
+ def __repr__(self):
+ return '<%s object: %s editable=%r>' % (
+ self.__class__.__name__, str(self), self.editable)
+
+ def populate_link(self, finder, upgrade, require_hashes):
+ """Ensure that if a link can be found for this, that it is found.
+
+ Note that self.link may still be None - if Upgrade is False and the
+ requirement is already installed.
+
+ If require_hashes is True, don't use the wheel cache, because cached
+ wheels, always built locally, have different hashes than the files
+ downloaded from the index server and thus throw false hash mismatches.
+ Furthermore, cached wheels at present have undeterministic contents due
+ to file modification times.
+ """
+ if self.link is None:
+ self.link = finder.find_requirement(self, upgrade)
+ if self._wheel_cache is not None and not require_hashes:
+ old_link = self.link
+ self.link = self._wheel_cache.get(self.link, self.name)
+ if old_link != self.link:
+ logger.debug('Using cached wheel link: %s', self.link)
+
+ @property
+ def specifier(self):
+ return self.req.specifier
+
+ @property
+ def is_pinned(self):
+ """Return whether I am pinned to an exact version.
+
+ For example, some-package==1.2 is pinned; some-package>1.2 is not.
+ """
+ specifiers = self.specifier
+ return (len(specifiers) == 1 and
+ next(iter(specifiers)).operator in {'==', '==='})
+
+ def from_path(self):
+ if self.req is None:
+ return None
+ s = str(self.req)
+ if self.comes_from:
+ if isinstance(self.comes_from, six.string_types):
+ comes_from = self.comes_from
+ else:
+ comes_from = self.comes_from.from_path()
+ if comes_from:
+ s += '->' + comes_from
+ return s
+
+ def build_location(self, build_dir):
+ assert build_dir is not None
+ if self._temp_build_dir.path is not None:
+ return self._temp_build_dir.path
+ if self.req is None:
+ # for requirement via a path to a directory: the name of the
+ # package is not available yet so we create a temp directory
+ # Once run_egg_info will have run, we'll be able
+ # to fix it via _correct_build_location
+ # Some systems have /tmp as a symlink which confuses custom
+ # builds (such as numpy). Thus, we ensure that the real path
+ # is returned.
+ self._temp_build_dir.create()
+ self._ideal_build_dir = build_dir
+
+ return self._temp_build_dir.path
+ if self.editable:
+ name = self.name.lower()
+ else:
+ name = self.name
+ # FIXME: Is there a better place to create the build_dir? (hg and bzr
+ # need this)
+ if not os.path.exists(build_dir):
+ logger.debug('Creating directory %s', build_dir)
+ _make_build_dir(build_dir)
+ return os.path.join(build_dir, name)
+
+ def _correct_build_location(self):
+ """Move self._temp_build_dir to self._ideal_build_dir/self.req.name
+
+ For some requirements (e.g. a path to a directory), the name of the
+ package is not available until we run egg_info, so the build_location
+ will return a temporary directory and store the _ideal_build_dir.
+
+ This is only called by self.egg_info_path to fix the temporary build
+ directory.
+ """
+ if self.source_dir is not None:
+ return
+ assert self.req is not None
+ assert self._temp_build_dir.path
+ assert self._ideal_build_dir.path
+ old_location = self._temp_build_dir.path
+ self._temp_build_dir.path = None
+
+ new_location = self.build_location(self._ideal_build_dir)
+ if os.path.exists(new_location):
+ raise InstallationError(
+ 'A package already exists in %s; please remove it to continue'
+ % display_path(new_location))
+ logger.debug(
+ 'Moving package %s from %s to new location %s',
+ self, display_path(old_location), display_path(new_location),
+ )
+ shutil.move(old_location, new_location)
+ self._temp_build_dir.path = new_location
+ self._ideal_build_dir = None
+ self.source_dir = os.path.normpath(os.path.abspath(new_location))
+ self._egg_info_path = None
+
+ @property
+ def name(self):
+ if self.req is None:
+ return None
+ return native_str(pkg_resources.safe_name(self.req.name))
+
+ @property
+ def setup_py_dir(self):
+ return os.path.join(
+ self.source_dir,
+ self.link and self.link.subdirectory_fragment or '')
+
+ @property
+ def setup_py(self):
+ assert self.source_dir, "No source dir for %s" % self
+
+ setup_py = os.path.join(self.setup_py_dir, 'setup.py')
+
+ # Python2 __file__ should not be unicode
+ if six.PY2 and isinstance(setup_py, six.text_type):
+ setup_py = setup_py.encode(sys.getfilesystemencoding())
+
+ return setup_py
+
+ @property
+ def pyproject_toml(self):
+ assert self.source_dir, "No source dir for %s" % self
+
+ pp_toml = os.path.join(self.setup_py_dir, 'pyproject.toml')
+
+ # Python2 __file__ should not be unicode
+ if six.PY2 and isinstance(pp_toml, six.text_type):
+ pp_toml = pp_toml.encode(sys.getfilesystemencoding())
+
+ return pp_toml
+
+ def get_pep_518_info(self):
+ """Get a list of the packages required to build the project, if any,
+ and a flag indicating whether pyproject.toml is present, indicating
+ that the build should be isolated.
+
+ Build requirements can be specified in a pyproject.toml, as described
+ in PEP 518. If this file exists but doesn't specify build
+ requirements, pip will default to installing setuptools and wheel.
+ """
+ if os.path.isfile(self.pyproject_toml):
+ with open(self.pyproject_toml) as f:
+ pp_toml = pytoml.load(f)
+ build_sys = pp_toml.get('build-system', {})
+ return (build_sys.get('requires', ['setuptools', 'wheel']), True)
+ return (['setuptools', 'wheel'], False)
+
+ def run_egg_info(self):
+ assert self.source_dir
+ if self.name:
+ logger.debug(
+ 'Running setup.py (path:%s) egg_info for package %s',
+ self.setup_py, self.name,
+ )
+ else:
+ logger.debug(
+ 'Running setup.py (path:%s) egg_info for package from %s',
+ self.setup_py, self.link,
+ )
+
+ with indent_log():
+ script = SETUPTOOLS_SHIM % self.setup_py
+ base_cmd = [sys.executable, '-c', script]
+ if self.isolated:
+ base_cmd += ["--no-user-cfg"]
+ egg_info_cmd = base_cmd + ['egg_info']
+ # We can't put the .egg-info files at the root, because then the
+ # source code will be mistaken for an installed egg, causing
+ # problems
+ if self.editable:
+ egg_base_option = []
+ else:
+ egg_info_dir = os.path.join(self.setup_py_dir, 'pip-egg-info')
+ ensure_dir(egg_info_dir)
+ egg_base_option = ['--egg-base', 'pip-egg-info']
+ with self.build_env:
+ call_subprocess(
+ egg_info_cmd + egg_base_option,
+ cwd=self.setup_py_dir,
+ show_stdout=False,
+ command_desc='python setup.py egg_info')
+
+ if not self.req:
+ if isinstance(parse_version(self.pkg_info()["Version"]), Version):
+ op = "=="
+ else:
+ op = "==="
+ self.req = Requirement(
+ "".join([
+ self.pkg_info()["Name"],
+ op,
+ self.pkg_info()["Version"],
+ ])
+ )
+ self._correct_build_location()
+ else:
+ metadata_name = canonicalize_name(self.pkg_info()["Name"])
+ if canonicalize_name(self.req.name) != metadata_name:
+ logger.warning(
+ 'Running setup.py (path:%s) egg_info for package %s '
+ 'produced metadata for project name %s. Fix your '
+ '#egg=%s fragments.',
+ self.setup_py, self.name, metadata_name, self.name
+ )
+ self.req = Requirement(metadata_name)
+
+ def egg_info_data(self, filename):
+ if self.satisfied_by is not None:
+ if not self.satisfied_by.has_metadata(filename):
+ return None
+ return self.satisfied_by.get_metadata(filename)
+ assert self.source_dir
+ filename = self.egg_info_path(filename)
+ if not os.path.exists(filename):
+ return None
+ data = read_text_file(filename)
+ return data
+
+ def egg_info_path(self, filename):
+ if self._egg_info_path is None:
+ if self.editable:
+ base = self.source_dir
+ else:
+ base = os.path.join(self.setup_py_dir, 'pip-egg-info')
+ filenames = os.listdir(base)
+ if self.editable:
+ filenames = []
+ for root, dirs, files in os.walk(base):
+ for dir in vcs.dirnames:
+ if dir in dirs:
+ dirs.remove(dir)
+ # Iterate over a copy of ``dirs``, since mutating
+ # a list while iterating over it can cause trouble.
+ # (See https://github.com/pypa/pip/pull/462.)
+ for dir in list(dirs):
+ # Don't search in anything that looks like a virtualenv
+ # environment
+ if (
+ os.path.lexists(
+ os.path.join(root, dir, 'bin', 'python')
+ ) or
+ os.path.exists(
+ os.path.join(
+ root, dir, 'Scripts', 'Python.exe'
+ )
+ )):
+ dirs.remove(dir)
+ # Also don't search through tests
+ elif dir == 'test' or dir == 'tests':
+ dirs.remove(dir)
+ filenames.extend([os.path.join(root, dir)
+ for dir in dirs])
+ filenames = [f for f in filenames if f.endswith('.egg-info')]
+
+ if not filenames:
+ raise InstallationError(
+ 'No files/directories in %s (from %s)' % (base, filename)
+ )
+ assert filenames, \
+ "No files/directories in %s (from %s)" % (base, filename)
+
+ # if we have more than one match, we pick the toplevel one. This
+ # can easily be the case if there is a dist folder which contains
+ # an extracted tarball for testing purposes.
+ if len(filenames) > 1:
+ filenames.sort(
+ key=lambda x: x.count(os.path.sep) +
+ (os.path.altsep and x.count(os.path.altsep) or 0)
+ )
+ self._egg_info_path = os.path.join(base, filenames[0])
+ return os.path.join(self._egg_info_path, filename)
+
+ def pkg_info(self):
+ p = FeedParser()
+ data = self.egg_info_data('PKG-INFO')
+ if not data:
+ logger.warning(
+ 'No PKG-INFO file found in %s',
+ display_path(self.egg_info_path('PKG-INFO')),
+ )
+ p.feed(data or '')
+ return p.close()
+
+ _requirements_section_re = re.compile(r'\[(.*?)\]')
+
+ @property
+ def installed_version(self):
+ return get_installed_version(self.name)
+
+ def assert_source_matches_version(self):
+ assert self.source_dir
+ version = self.pkg_info()['version']
+ if self.req.specifier and version not in self.req.specifier:
+ logger.warning(
+ 'Requested %s, but installing version %s',
+ self,
+ version,
+ )
+ else:
+ logger.debug(
+ 'Source in %s has version %s, which satisfies requirement %s',
+ display_path(self.source_dir),
+ version,
+ self,
+ )
+
+ def update_editable(self, obtain=True):
+ if not self.link:
+ logger.debug(
+ "Cannot update repository at %s; repository location is "
+ "unknown",
+ self.source_dir,
+ )
+ return
+ assert self.editable
+ assert self.source_dir
+ if self.link.scheme == 'file':
+ # Static paths don't get updated
+ return
+ assert '+' in self.link.url, "bad url: %r" % self.link.url
+ if not self.update:
+ return
+ vc_type, url = self.link.url.split('+', 1)
+ backend = vcs.get_backend(vc_type)
+ if backend:
+ vcs_backend = backend(self.link.url)
+ if obtain:
+ vcs_backend.obtain(self.source_dir)
+ else:
+ vcs_backend.export(self.source_dir)
+ else:
+ assert 0, (
+ 'Unexpected version control type (in %s): %s'
+ % (self.link, vc_type))
+
+ def uninstall(self, auto_confirm=False, verbose=False,
+ use_user_site=False):
+ """
+ Uninstall the distribution currently satisfying this requirement.
+
+ Prompts before removing or modifying files unless
+ ``auto_confirm`` is True.
+
+ Refuses to delete or modify files outside of ``sys.prefix`` -
+ thus uninstallation within a virtual environment can only
+ modify that virtual environment, even if the virtualenv is
+ linked to global site-packages.
+
+ """
+ if not self.check_if_exists(use_user_site):
+ logger.warning("Skipping %s as it is not installed.", self.name)
+ return
+ dist = self.satisfied_by or self.conflicts_with
+
+ uninstalled_pathset = UninstallPathSet.from_dist(dist)
+ uninstalled_pathset.remove(auto_confirm, verbose)
+ return uninstalled_pathset
+
+ def archive(self, build_dir):
+ assert self.source_dir
+ create_archive = True
+ archive_name = '%s-%s.zip' % (self.name, self.pkg_info()["version"])
+ archive_path = os.path.join(build_dir, archive_name)
+ if os.path.exists(archive_path):
+ response = ask_path_exists(
+ 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' %
+ display_path(archive_path), ('i', 'w', 'b', 'a'))
+ if response == 'i':
+ create_archive = False
+ elif response == 'w':
+ logger.warning('Deleting %s', display_path(archive_path))
+ os.remove(archive_path)
+ elif response == 'b':
+ dest_file = backup_dir(archive_path)
+ logger.warning(
+ 'Backing up %s to %s',
+ display_path(archive_path),
+ display_path(dest_file),
+ )
+ shutil.move(archive_path, dest_file)
+ elif response == 'a':
+ sys.exit(-1)
+ if create_archive:
+ zip = zipfile.ZipFile(
+ archive_path, 'w', zipfile.ZIP_DEFLATED,
+ allowZip64=True
+ )
+ dir = os.path.normcase(os.path.abspath(self.setup_py_dir))
+ for dirpath, dirnames, filenames in os.walk(dir):
+ if 'pip-egg-info' in dirnames:
+ dirnames.remove('pip-egg-info')
+ for dirname in dirnames:
+ dirname = os.path.join(dirpath, dirname)
+ name = self._clean_zip_name(dirname, dir)
+ zipdir = zipfile.ZipInfo(self.name + '/' + name + '/')
+ zipdir.external_attr = 0x1ED << 16 # 0o755
+ zip.writestr(zipdir, '')
+ for filename in filenames:
+ if filename == PIP_DELETE_MARKER_FILENAME:
+ continue
+ filename = os.path.join(dirpath, filename)
+ name = self._clean_zip_name(filename, dir)
+ zip.write(filename, self.name + '/' + name)
+ zip.close()
+ logger.info('Saved %s', display_path(archive_path))
+
+ def _clean_zip_name(self, name, prefix):
+ assert name.startswith(prefix + os.path.sep), (
+ "name %r doesn't start with prefix %r" % (name, prefix)
+ )
+ name = name[len(prefix) + 1:]
+ name = name.replace(os.path.sep, '/')
+ return name
+
+ def match_markers(self, extras_requested=None):
+ if not extras_requested:
+ # Provide an extra to safely evaluate the markers
+ # without matching any extra
+ extras_requested = ('',)
+ if self.markers is not None:
+ return any(
+ self.markers.evaluate({'extra': extra})
+ for extra in extras_requested)
+ else:
+ return True
+
+ def install(self, install_options, global_options=None, root=None,
+ home=None, prefix=None, warn_script_location=True,
+ use_user_site=False, pycompile=True):
+ global_options = global_options if global_options is not None else []
+ if self.editable:
+ self.install_editable(
+ install_options, global_options, prefix=prefix,
+ )
+ return
+ if self.is_wheel:
+ version = wheel.wheel_version(self.source_dir)
+ wheel.check_compatibility(version, self.name)
+
+ self.move_wheel_files(
+ self.source_dir, root=root, prefix=prefix, home=home,
+ warn_script_location=warn_script_location,
+ use_user_site=use_user_site, pycompile=pycompile,
+ )
+ self.install_succeeded = True
+ return
+
+ # Extend the list of global and install options passed on to
+ # the setup.py call with the ones from the requirements file.
+ # Options specified in requirements file override those
+ # specified on the command line, since the last option given
+ # to setup.py is the one that is used.
+ global_options = list(global_options) + \
+ self.options.get('global_options', [])
+ install_options = list(install_options) + \
+ self.options.get('install_options', [])
+
+ if self.isolated:
+ global_options = global_options + ["--no-user-cfg"]
+
+ with TempDirectory(kind="record") as temp_dir:
+ record_filename = os.path.join(temp_dir.path, 'install-record.txt')
+ install_args = self.get_install_args(
+ global_options, record_filename, root, prefix, pycompile,
+ )
+ msg = 'Running setup.py install for %s' % (self.name,)
+ with open_spinner(msg) as spinner:
+ with indent_log():
+ with self.build_env:
+ call_subprocess(
+ install_args + install_options,
+ cwd=self.setup_py_dir,
+ show_stdout=False,
+ spinner=spinner,
+ )
+
+ if not os.path.exists(record_filename):
+ logger.debug('Record file %s not found', record_filename)
+ return
+ self.install_succeeded = True
+
+ def prepend_root(path):
+ if root is None or not os.path.isabs(path):
+ return path
+ else:
+ return change_root(root, path)
+
+ with open(record_filename) as f:
+ for line in f:
+ directory = os.path.dirname(line)
+ if directory.endswith('.egg-info'):
+ egg_info_dir = prepend_root(directory)
+ break
+ else:
+ logger.warning(
+ 'Could not find .egg-info directory in install record'
+ ' for %s',
+ self,
+ )
+ # FIXME: put the record somewhere
+ # FIXME: should this be an error?
+ return
+ new_lines = []
+ with open(record_filename) as f:
+ for line in f:
+ filename = line.strip()
+ if os.path.isdir(filename):
+ filename += os.path.sep
+ new_lines.append(
+ os.path.relpath(prepend_root(filename), egg_info_dir)
+ )
+ new_lines.sort()
+ ensure_dir(egg_info_dir)
+ inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt')
+ with open(inst_files_path, 'w') as f:
+ f.write('\n'.join(new_lines) + '\n')
+
+ def ensure_has_source_dir(self, parent_dir):
+ """Ensure that a source_dir is set.
+
+ This will create a temporary build dir if the name of the requirement
+ isn't known yet.
+
+ :param parent_dir: The ideal pip parent_dir for the source_dir.
+ Generally src_dir for editables and build_dir for sdists.
+ :return: self.source_dir
+ """
+ if self.source_dir is None:
+ self.source_dir = self.build_location(parent_dir)
+ return self.source_dir
+
+ def get_install_args(self, global_options, record_filename, root, prefix,
+ pycompile):
+ install_args = [sys.executable, "-u"]
+ install_args.append('-c')
+ install_args.append(SETUPTOOLS_SHIM % self.setup_py)
+ install_args += list(global_options) + \
+ ['install', '--record', record_filename]
+ install_args += ['--single-version-externally-managed']
+
+ if root is not None:
+ install_args += ['--root', root]
+ if prefix is not None:
+ install_args += ['--prefix', prefix]
+
+ if pycompile:
+ install_args += ["--compile"]
+ else:
+ install_args += ["--no-compile"]
+
+ if running_under_virtualenv():
+ py_ver_str = 'python' + sysconfig.get_python_version()
+ install_args += ['--install-headers',
+ os.path.join(sys.prefix, 'include', 'site',
+ py_ver_str, self.name)]
+
+ return install_args
+
+ def remove_temporary_source(self):
+ """Remove the source files from this requirement, if they are marked
+ for deletion"""
+ if self.source_dir and os.path.exists(
+ os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)):
+ logger.debug('Removing source in %s', self.source_dir)
+ rmtree(self.source_dir)
+ self.source_dir = None
+ self._temp_build_dir.cleanup()
+ self.build_env.cleanup()
+
+ def install_editable(self, install_options,
+ global_options=(), prefix=None):
+ logger.info('Running setup.py develop for %s', self.name)
+
+ if self.isolated:
+ global_options = list(global_options) + ["--no-user-cfg"]
+
+ if prefix:
+ prefix_param = ['--prefix={}'.format(prefix)]
+ install_options = list(install_options) + prefix_param
+
+ with indent_log():
+ # FIXME: should we do --install-headers here too?
+ with self.build_env:
+ call_subprocess(
+ [
+ sys.executable,
+ '-c',
+ SETUPTOOLS_SHIM % self.setup_py
+ ] +
+ list(global_options) +
+ ['develop', '--no-deps'] +
+ list(install_options),
+
+ cwd=self.setup_py_dir,
+ show_stdout=False,
+ )
+
+ self.install_succeeded = True
+
+ def check_if_exists(self, use_user_site):
+ """Find an installed distribution that satisfies or conflicts
+ with this requirement, and set self.satisfied_by or
+ self.conflicts_with appropriately.
+ """
+ if self.req is None:
+ return False
+ try:
+ # get_distribution() will resolve the entire list of requirements
+ # anyway, and we've already determined that we need the requirement
+ # in question, so strip the marker so that we don't try to
+ # evaluate it.
+ no_marker = Requirement(str(self.req))
+ no_marker.marker = None
+ self.satisfied_by = pkg_resources.get_distribution(str(no_marker))
+ if self.editable and self.satisfied_by:
+ self.conflicts_with = self.satisfied_by
+ # when installing editables, nothing pre-existing should ever
+ # satisfy
+ self.satisfied_by = None
+ return True
+ except pkg_resources.DistributionNotFound:
+ return False
+ except pkg_resources.VersionConflict:
+ existing_dist = pkg_resources.get_distribution(
+ self.req.name
+ )
+ if use_user_site:
+ if dist_in_usersite(existing_dist):
+ self.conflicts_with = existing_dist
+ elif (running_under_virtualenv() and
+ dist_in_site_packages(existing_dist)):
+ raise InstallationError(
+ "Will not install to the user site because it will "
+ "lack sys.path precedence to %s in %s" %
+ (existing_dist.project_name, existing_dist.location)
+ )
+ else:
+ self.conflicts_with = existing_dist
+ return True
+
+ @property
+ def is_wheel(self):
+ return self.link and self.link.is_wheel
+
+ def move_wheel_files(self, wheeldir, root=None, home=None, prefix=None,
+ warn_script_location=True, use_user_site=False,
+ pycompile=True):
+ move_wheel_files(
+ self.name, self.req, wheeldir,
+ user=use_user_site,
+ home=home,
+ root=root,
+ prefix=prefix,
+ pycompile=pycompile,
+ isolated=self.isolated,
+ warn_script_location=warn_script_location,
+ )
+
+ def get_dist(self):
+ """Return a pkg_resources.Distribution built from self.egg_info_path"""
+ egg_info = self.egg_info_path('').rstrip(os.path.sep)
+ base_dir = os.path.dirname(egg_info)
+ metadata = pkg_resources.PathMetadata(base_dir, egg_info)
+ dist_name = os.path.splitext(os.path.basename(egg_info))[0]
+ return pkg_resources.Distribution(
+ os.path.dirname(egg_info),
+ project_name=dist_name,
+ metadata=metadata,
+ )
+
+ @property
+ def has_hash_options(self):
+ """Return whether any known-good hashes are specified as options.
+
+ These activate --require-hashes mode; hashes specified as part of a
+ URL do not.
+
+ """
+ return bool(self.options.get('hashes', {}))
+
+ def hashes(self, trust_internet=True):
+ """Return a hash-comparer that considers my option- and URL-based
+ hashes to be known-good.
+
+ Hashes in URLs--ones embedded in the requirements file, not ones
+ downloaded from an index server--are almost peers with ones from
+ flags. They satisfy --require-hashes (whether it was implicitly or
+ explicitly activated) but do not activate it. md5 and sha224 are not
+ allowed in flags, which should nudge people toward good algos. We
+ always OR all hashes together, even ones from URLs.
+
+ :param trust_internet: Whether to trust URL-based (#md5=...) hashes
+ downloaded from the internet, as by populate_link()
+
+ """
+ good_hashes = self.options.get('hashes', {}).copy()
+ link = self.link if trust_internet else self.original_link
+ if link and link.hash:
+ good_hashes.setdefault(link.hash_name, []).append(link.hash)
+ return Hashes(good_hashes)
+
+
+def _strip_postfix(req):
+ """
+ Strip req postfix ( -dev, 0.2, etc )
+ """
+ # FIXME: use package_to_requirement?
+ match = re.search(r'^(.*?)(?:-dev|-\d.*)$', req)
+ if match:
+ # Strip off -dev, -0.2, etc.
+ warnings.warn(
+ "#egg cleanup for editable urls will be dropped in the future",
+ RemovedInPip11Warning,
+ )
+ req = match.group(1)
+ return req
+
+
+def parse_editable(editable_req):
+ """Parses an editable requirement into:
+ - a requirement name
+ - an URL
+ - extras
+ - editable options
+ Accepted requirements:
+ svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
+ .[some_extra]
+ """
+
+ from pip._internal.index import Link
+
+ url = editable_req
+
+ # If a file path is specified with extras, strip off the extras.
+ url_no_extras, extras = _strip_extras(url)
+
+ if os.path.isdir(url_no_extras):
+ if not os.path.exists(os.path.join(url_no_extras, 'setup.py')):
+ raise InstallationError(
+ "Directory %r is not installable. File 'setup.py' not found." %
+ url_no_extras
+ )
+ # Treating it as code that has already been checked out
+ url_no_extras = path_to_url(url_no_extras)
+
+ if url_no_extras.lower().startswith('file:'):
+ package_name = Link(url_no_extras).egg_fragment
+ if extras:
+ return (
+ package_name,
+ url_no_extras,
+ Requirement("placeholder" + extras.lower()).extras,
+ )
+ else:
+ return package_name, url_no_extras, None
+
+ for version_control in vcs:
+ if url.lower().startswith('%s:' % version_control):
+ url = '%s+%s' % (version_control, url)
+ break
+
+ if '+' not in url:
+ raise InstallationError(
+ '%s should either be a path to a local project or a VCS url '
+ 'beginning with svn+, git+, hg+, or bzr+' %
+ editable_req
+ )
+
+ vc_type = url.split('+', 1)[0].lower()
+
+ if not vcs.get_backend(vc_type):
+ error_message = 'For --editable=%s only ' % editable_req + \
+ ', '.join([backend.name + '+URL' for backend in vcs.backends]) + \
+ ' is currently supported'
+ raise InstallationError(error_message)
+
+ package_name = Link(url).egg_fragment
+ if not package_name:
+ raise InstallationError(
+ "Could not detect requirement name for '%s', please specify one "
+ "with #egg=your_package_name" % editable_req
+ )
+ return _strip_postfix(package_name), url, None
+
+
+def deduce_helpful_msg(req):
+ """Returns helpful msg in case requirements file does not exist,
+ or cannot be parsed.
+
+ :params req: Requirements file path
+ """
+ msg = ""
+ if os.path.exists(req):
+ msg = " It does exist."
+ # Try to parse and check if it is a requirements file.
+ try:
+ with open(req, 'r') as fp:
+ # parse first line only
+ next(parse_requirements(fp.read()))
+ msg += " The argument you provided " + \
+ "(%s) appears to be a" % (req) + \
+ " requirements file. If that is the" + \
+ " case, use the '-r' flag to install" + \
+ " the packages specified within it."
+ except RequirementParseError:
+ logger.debug("Cannot parse '%s' as requirements \
+ file" % (req), exc_info=1)
+ else:
+ msg += " File '%s' does not exist." % (req)
+ return msg
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/req/req_set.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/req/req_set.py
index b2b55f8..78b7d32 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/req/req_set.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/req/req_set.py
@@ -1,164 +1,164 @@
-from __future__ import absolute_import
-
-import logging
-from collections import OrderedDict
-
-from pip._internal.exceptions import InstallationError
-from pip._internal.utils.logging import indent_log
-from pip._internal.wheel import Wheel
-
-logger = logging.getLogger(__name__)
-
-
-class RequirementSet(object):
-
- def __init__(self, require_hashes=False):
- """Create a RequirementSet.
-
- :param wheel_cache: The pip wheel cache, for passing to
- InstallRequirement.
- """
-
- self.requirements = OrderedDict()
- self.require_hashes = require_hashes
-
- # Mapping of alias: real_name
- self.requirement_aliases = {}
- self.unnamed_requirements = []
- self.successfully_downloaded = []
- self.reqs_to_cleanup = []
-
- def __str__(self):
- reqs = [req for req in self.requirements.values()
- if not req.comes_from]
- reqs.sort(key=lambda req: req.name.lower())
- return ' '.join([str(req.req) for req in reqs])
-
- def __repr__(self):
- reqs = [req for req in self.requirements.values()]
- reqs.sort(key=lambda req: req.name.lower())
- reqs_str = ', '.join([str(req.req) for req in reqs])
- return ('<%s object; %d requirement(s): %s>'
- % (self.__class__.__name__, len(reqs), reqs_str))
-
- def add_requirement(self, install_req, parent_req_name=None,
- extras_requested=None):
- """Add install_req as a requirement to install.
-
- :param parent_req_name: The name of the requirement that needed this
- added. The name is used because when multiple unnamed requirements
- resolve to the same name, we could otherwise end up with dependency
- links that point outside the Requirements set. parent_req must
- already be added. Note that None implies that this is a user
- supplied requirement, vs an inferred one.
- :param extras_requested: an iterable of extras used to evaluate the
- environment markers.
- :return: Additional requirements to scan. That is either [] if
- the requirement is not applicable, or [install_req] if the
- requirement is applicable and has just been added.
- """
- name = install_req.name
- if not install_req.match_markers(extras_requested):
- logger.info("Ignoring %s: markers '%s' don't match your "
- "environment", install_req.name,
- install_req.markers)
- return [], None
-
- # This check has to come after we filter requirements with the
- # environment markers.
- if install_req.link and install_req.link.is_wheel:
- wheel = Wheel(install_req.link.filename)
- if not wheel.supported():
- raise InstallationError(
- "%s is not a supported wheel on this platform." %
- wheel.filename
- )
-
- # This next bit is really a sanity check.
- assert install_req.is_direct == (parent_req_name is None), (
- "a direct req shouldn't have a parent and also, "
- "a non direct req should have a parent"
- )
-
- if not name:
- # url or path requirement w/o an egg fragment
- self.unnamed_requirements.append(install_req)
- return [install_req], None
- else:
- try:
- existing_req = self.get_requirement(name)
- except KeyError:
- existing_req = None
- if (parent_req_name is None and existing_req and not
- existing_req.constraint and
- existing_req.extras == install_req.extras and not
- existing_req.req.specifier == install_req.req.specifier):
- raise InstallationError(
- 'Double requirement given: %s (already in %s, name=%r)'
- % (install_req, existing_req, name))
- if not existing_req:
- # Add requirement
- self.requirements[name] = install_req
- # FIXME: what about other normalizations? E.g., _ vs. -?
- if name.lower() != name:
- self.requirement_aliases[name.lower()] = name
- result = [install_req]
- else:
- # Assume there's no need to scan, and that we've already
- # encountered this for scanning.
- result = []
- if not install_req.constraint and existing_req.constraint:
- if (install_req.link and not (existing_req.link and
- install_req.link.path == existing_req.link.path)):
- self.reqs_to_cleanup.append(install_req)
- raise InstallationError(
- "Could not satisfy constraints for '%s': "
- "installation from path or url cannot be "
- "constrained to a version" % name,
- )
- # If we're now installing a constraint, mark the existing
- # object for real installation.
- existing_req.constraint = False
- existing_req.extras = tuple(
- sorted(set(existing_req.extras).union(
- set(install_req.extras))))
- logger.debug("Setting %s extras to: %s",
- existing_req, existing_req.extras)
- # And now we need to scan this.
- result = [existing_req]
- # Canonicalise to the already-added object for the backref
- # check below.
- install_req = existing_req
-
- # We return install_req here to allow for the caller to add it to
- # the dependency information for the parent package.
- return result, install_req
-
- def has_requirement(self, project_name):
- name = project_name.lower()
- if (name in self.requirements and
- not self.requirements[name].constraint or
- name in self.requirement_aliases and
- not self.requirements[self.requirement_aliases[name]].constraint):
- return True
- return False
-
- @property
- def has_requirements(self):
- return list(req for req in self.requirements.values() if not
- req.constraint) or self.unnamed_requirements
-
- def get_requirement(self, project_name):
- for name in project_name, project_name.lower():
- if name in self.requirements:
- return self.requirements[name]
- if name in self.requirement_aliases:
- return self.requirements[self.requirement_aliases[name]]
- raise KeyError("No project with the name %r" % project_name)
-
- def cleanup_files(self):
- """Clean up files, remove builds."""
- logger.debug('Cleaning up...')
- with indent_log():
- for req in self.reqs_to_cleanup:
- req.remove_temporary_source()
+from __future__ import absolute_import
+
+import logging
+from collections import OrderedDict
+
+from pip._internal.exceptions import InstallationError
+from pip._internal.utils.logging import indent_log
+from pip._internal.wheel import Wheel
+
+logger = logging.getLogger(__name__)
+
+
+class RequirementSet(object):
+
+ def __init__(self, require_hashes=False):
+ """Create a RequirementSet.
+
+ :param wheel_cache: The pip wheel cache, for passing to
+ InstallRequirement.
+ """
+
+ self.requirements = OrderedDict()
+ self.require_hashes = require_hashes
+
+ # Mapping of alias: real_name
+ self.requirement_aliases = {}
+ self.unnamed_requirements = []
+ self.successfully_downloaded = []
+ self.reqs_to_cleanup = []
+
+ def __str__(self):
+ reqs = [req for req in self.requirements.values()
+ if not req.comes_from]
+ reqs.sort(key=lambda req: req.name.lower())
+ return ' '.join([str(req.req) for req in reqs])
+
+ def __repr__(self):
+ reqs = [req for req in self.requirements.values()]
+ reqs.sort(key=lambda req: req.name.lower())
+ reqs_str = ', '.join([str(req.req) for req in reqs])
+ return ('<%s object; %d requirement(s): %s>'
+ % (self.__class__.__name__, len(reqs), reqs_str))
+
+ def add_requirement(self, install_req, parent_req_name=None,
+ extras_requested=None):
+ """Add install_req as a requirement to install.
+
+ :param parent_req_name: The name of the requirement that needed this
+ added. The name is used because when multiple unnamed requirements
+ resolve to the same name, we could otherwise end up with dependency
+ links that point outside the Requirements set. parent_req must
+ already be added. Note that None implies that this is a user
+ supplied requirement, vs an inferred one.
+ :param extras_requested: an iterable of extras used to evaluate the
+ environment markers.
+ :return: Additional requirements to scan. That is either [] if
+ the requirement is not applicable, or [install_req] if the
+ requirement is applicable and has just been added.
+ """
+ name = install_req.name
+ if not install_req.match_markers(extras_requested):
+ logger.info("Ignoring %s: markers '%s' don't match your "
+ "environment", install_req.name,
+ install_req.markers)
+ return [], None
+
+ # This check has to come after we filter requirements with the
+ # environment markers.
+ if install_req.link and install_req.link.is_wheel:
+ wheel = Wheel(install_req.link.filename)
+ if not wheel.supported():
+ raise InstallationError(
+ "%s is not a supported wheel on this platform." %
+ wheel.filename
+ )
+
+ # This next bit is really a sanity check.
+ assert install_req.is_direct == (parent_req_name is None), (
+ "a direct req shouldn't have a parent and also, "
+ "a non direct req should have a parent"
+ )
+
+ if not name:
+ # url or path requirement w/o an egg fragment
+ self.unnamed_requirements.append(install_req)
+ return [install_req], None
+ else:
+ try:
+ existing_req = self.get_requirement(name)
+ except KeyError:
+ existing_req = None
+ if (parent_req_name is None and existing_req and not
+ existing_req.constraint and
+ existing_req.extras == install_req.extras and not
+ existing_req.req.specifier == install_req.req.specifier):
+ raise InstallationError(
+ 'Double requirement given: %s (already in %s, name=%r)'
+ % (install_req, existing_req, name))
+ if not existing_req:
+ # Add requirement
+ self.requirements[name] = install_req
+ # FIXME: what about other normalizations? E.g., _ vs. -?
+ if name.lower() != name:
+ self.requirement_aliases[name.lower()] = name
+ result = [install_req]
+ else:
+ # Assume there's no need to scan, and that we've already
+ # encountered this for scanning.
+ result = []
+ if not install_req.constraint and existing_req.constraint:
+ if (install_req.link and not (existing_req.link and
+ install_req.link.path == existing_req.link.path)):
+ self.reqs_to_cleanup.append(install_req)
+ raise InstallationError(
+ "Could not satisfy constraints for '%s': "
+ "installation from path or url cannot be "
+ "constrained to a version" % name,
+ )
+ # If we're now installing a constraint, mark the existing
+ # object for real installation.
+ existing_req.constraint = False
+ existing_req.extras = tuple(
+ sorted(set(existing_req.extras).union(
+ set(install_req.extras))))
+ logger.debug("Setting %s extras to: %s",
+ existing_req, existing_req.extras)
+ # And now we need to scan this.
+ result = [existing_req]
+ # Canonicalise to the already-added object for the backref
+ # check below.
+ install_req = existing_req
+
+ # We return install_req here to allow for the caller to add it to
+ # the dependency information for the parent package.
+ return result, install_req
+
+ def has_requirement(self, project_name):
+ name = project_name.lower()
+ if (name in self.requirements and
+ not self.requirements[name].constraint or
+ name in self.requirement_aliases and
+ not self.requirements[self.requirement_aliases[name]].constraint):
+ return True
+ return False
+
+ @property
+ def has_requirements(self):
+ return list(req for req in self.requirements.values() if not
+ req.constraint) or self.unnamed_requirements
+
+ def get_requirement(self, project_name):
+ for name in project_name, project_name.lower():
+ if name in self.requirements:
+ return self.requirements[name]
+ if name in self.requirement_aliases:
+ return self.requirements[self.requirement_aliases[name]]
+ raise KeyError("No project with the name %r" % project_name)
+
+ def cleanup_files(self):
+ """Clean up files, remove builds."""
+ logger.debug('Cleaning up...')
+ with indent_log():
+ for req in self.reqs_to_cleanup:
+ req.remove_temporary_source()
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/req/req_uninstall.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/req/req_uninstall.py
index a3cc7bf..a47520f 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/req/req_uninstall.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/req/req_uninstall.py
@@ -1,455 +1,455 @@
-from __future__ import absolute_import
-
-import csv
-import functools
-import logging
-import os
-import sys
-import sysconfig
-
-from pip._vendor import pkg_resources
-
-from pip._internal.compat import WINDOWS, cache_from_source, uses_pycache
-from pip._internal.exceptions import UninstallationError
-from pip._internal.locations import bin_py, bin_user
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import (
- FakeFile, ask, dist_in_usersite, dist_is_local, egg_link_path, is_local,
- normalize_path, renames,
-)
-from pip._internal.utils.temp_dir import TempDirectory
-
-logger = logging.getLogger(__name__)
-
-
-def _script_names(dist, script_name, is_gui):
- """Create the fully qualified name of the files created by
- {console,gui}_scripts for the given ``dist``.
- Returns the list of file names
- """
- if dist_in_usersite(dist):
- bin_dir = bin_user
- else:
- bin_dir = bin_py
- exe_name = os.path.join(bin_dir, script_name)
- paths_to_remove = [exe_name]
- if WINDOWS:
- paths_to_remove.append(exe_name + '.exe')
- paths_to_remove.append(exe_name + '.exe.manifest')
- if is_gui:
- paths_to_remove.append(exe_name + '-script.pyw')
- else:
- paths_to_remove.append(exe_name + '-script.py')
- return paths_to_remove
-
-
-def _unique(fn):
- @functools.wraps(fn)
- def unique(*args, **kw):
- seen = set()
- for item in fn(*args, **kw):
- if item not in seen:
- seen.add(item)
- yield item
- return unique
-
-
-@_unique
-def uninstallation_paths(dist):
- """
- Yield all the uninstallation paths for dist based on RECORD-without-.pyc
-
- Yield paths to all the files in RECORD. For each .py file in RECORD, add
- the .pyc in the same directory.
-
- UninstallPathSet.add() takes care of the __pycache__ .pyc.
- """
- r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
- for row in r:
- path = os.path.join(dist.location, row[0])
- yield path
- if path.endswith('.py'):
- dn, fn = os.path.split(path)
- base = fn[:-3]
- path = os.path.join(dn, base + '.pyc')
- yield path
-
-
-def compact(paths):
- """Compact a path set to contain the minimal number of paths
- necessary to contain all paths in the set. If /a/path/ and
- /a/path/to/a/file.txt are both in the set, leave only the
- shorter path."""
-
- sep = os.path.sep
- short_paths = set()
- for path in sorted(paths, key=len):
- should_add = any(
- path.startswith(shortpath.rstrip("*")) and
- path[len(shortpath.rstrip("*").rstrip(sep))] == sep
- for shortpath in short_paths
- )
- if not should_add:
- short_paths.add(path)
- return short_paths
-
-
-def compress_for_output_listing(paths):
- """Returns a tuple of 2 sets of which paths to display to user
-
- The first set contains paths that would be deleted. Files of a package
- are not added and the top-level directory of the package has a '*' added
- at the end - to signify that all it's contents are removed.
-
- The second set contains files that would have been skipped in the above
- folders.
- """
-
- will_remove = list(paths)
- will_skip = set()
-
- # Determine folders and files
- folders = set()
- files = set()
- for path in will_remove:
- if path.endswith(".pyc"):
- continue
- if path.endswith("__init__.py") or ".dist-info" in path:
- folders.add(os.path.dirname(path))
- files.add(path)
-
- folders = compact(folders)
-
- # This walks the tree using os.walk to not miss extra folders
- # that might get added.
- for folder in folders:
- for dirpath, _, dirfiles in os.walk(folder):
- for fname in dirfiles:
- if fname.endswith(".pyc"):
- continue
-
- file_ = os.path.normcase(os.path.join(dirpath, fname))
- if os.path.isfile(file_) and file_ not in files:
- # We are skipping this file. Add it to the set.
- will_skip.add(file_)
-
- will_remove = files | {
- os.path.join(folder, "*") for folder in folders
- }
-
- return will_remove, will_skip
-
-
-class UninstallPathSet(object):
- """A set of file paths to be removed in the uninstallation of a
- requirement."""
- def __init__(self, dist):
- self.paths = set()
- self._refuse = set()
- self.pth = {}
- self.dist = dist
- self.save_dir = TempDirectory(kind="uninstall")
- self._moved_paths = []
-
- def _permitted(self, path):
- """
- Return True if the given path is one we are permitted to
- remove/modify, False otherwise.
-
- """
- return is_local(path)
-
- def add(self, path):
- head, tail = os.path.split(path)
-
- # we normalize the head to resolve parent directory symlinks, but not
- # the tail, since we only want to uninstall symlinks, not their targets
- path = os.path.join(normalize_path(head), os.path.normcase(tail))
-
- if not os.path.exists(path):
- return
- if self._permitted(path):
- self.paths.add(path)
- else:
- self._refuse.add(path)
-
- # __pycache__ files can show up after 'installed-files.txt' is created,
- # due to imports
- if os.path.splitext(path)[1] == '.py' and uses_pycache:
- self.add(cache_from_source(path))
-
- def add_pth(self, pth_file, entry):
- pth_file = normalize_path(pth_file)
- if self._permitted(pth_file):
- if pth_file not in self.pth:
- self.pth[pth_file] = UninstallPthEntries(pth_file)
- self.pth[pth_file].add(entry)
- else:
- self._refuse.add(pth_file)
-
- def _stash(self, path):
- return os.path.join(
- self.save_dir.path, os.path.splitdrive(path)[1].lstrip(os.path.sep)
- )
-
- def remove(self, auto_confirm=False, verbose=False):
- """Remove paths in ``self.paths`` with confirmation (unless
- ``auto_confirm`` is True)."""
-
- if not self.paths:
- logger.info(
- "Can't uninstall '%s'. No files were found to uninstall.",
- self.dist.project_name,
- )
- return
-
- dist_name_version = (
- self.dist.project_name + "-" + self.dist.version
- )
- logger.info('Uninstalling %s:', dist_name_version)
-
- with indent_log():
- if auto_confirm or self._allowed_to_proceed(verbose):
- self.save_dir.create()
-
- for path in sorted(compact(self.paths)):
- new_path = self._stash(path)
- logger.debug('Removing file or directory %s', path)
- self._moved_paths.append(path)
- renames(path, new_path)
- for pth in self.pth.values():
- pth.remove()
-
- logger.info('Successfully uninstalled %s', dist_name_version)
-
- def _allowed_to_proceed(self, verbose):
- """Display which files would be deleted and prompt for confirmation
- """
-
- def _display(msg, paths):
- if not paths:
- return
-
- logger.info(msg)
- with indent_log():
- for path in sorted(compact(paths)):
- logger.info(path)
-
- if not verbose:
- will_remove, will_skip = compress_for_output_listing(self.paths)
- else:
- # In verbose mode, display all the files that are going to be
- # deleted.
- will_remove = list(self.paths)
- will_skip = set()
-
- _display('Would remove:', will_remove)
- _display('Would not remove (might be manually added):', will_skip)
- _display('Would not remove (outside of prefix):', self._refuse)
-
- return ask('Proceed (y/n)? ', ('y', 'n')) == 'y'
-
- def rollback(self):
- """Rollback the changes previously made by remove()."""
- if self.save_dir.path is None:
- logger.error(
- "Can't roll back %s; was not uninstalled",
- self.dist.project_name,
- )
- return False
- logger.info('Rolling back uninstall of %s', self.dist.project_name)
- for path in self._moved_paths:
- tmp_path = self._stash(path)
- logger.debug('Replacing %s', path)
- renames(tmp_path, path)
- for pth in self.pth.values():
- pth.rollback()
-
- def commit(self):
- """Remove temporary save dir: rollback will no longer be possible."""
- self.save_dir.cleanup()
- self._moved_paths = []
-
- @classmethod
- def from_dist(cls, dist):
- dist_path = normalize_path(dist.location)
- if not dist_is_local(dist):
- logger.info(
- "Not uninstalling %s at %s, outside environment %s",
- dist.key,
- dist_path,
- sys.prefix,
- )
- return cls(dist)
-
- if dist_path in {p for p in {sysconfig.get_path("stdlib"),
- sysconfig.get_path("platstdlib")}
- if p}:
- logger.info(
- "Not uninstalling %s at %s, as it is in the standard library.",
- dist.key,
- dist_path,
- )
- return cls(dist)
-
- paths_to_remove = cls(dist)
- develop_egg_link = egg_link_path(dist)
- develop_egg_link_egg_info = '{}.egg-info'.format(
- pkg_resources.to_filename(dist.project_name))
- egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info)
- # Special case for distutils installed package
- distutils_egg_info = getattr(dist._provider, 'path', None)
-
- # Uninstall cases order do matter as in the case of 2 installs of the
- # same package, pip needs to uninstall the currently detected version
- if (egg_info_exists and dist.egg_info.endswith('.egg-info') and
- not dist.egg_info.endswith(develop_egg_link_egg_info)):
- # if dist.egg_info.endswith(develop_egg_link_egg_info), we
- # are in fact in the develop_egg_link case
- paths_to_remove.add(dist.egg_info)
- if dist.has_metadata('installed-files.txt'):
- for installed_file in dist.get_metadata(
- 'installed-files.txt').splitlines():
- path = os.path.normpath(
- os.path.join(dist.egg_info, installed_file)
- )
- paths_to_remove.add(path)
- # FIXME: need a test for this elif block
- # occurs with --single-version-externally-managed/--record outside
- # of pip
- elif dist.has_metadata('top_level.txt'):
- if dist.has_metadata('namespace_packages.txt'):
- namespaces = dist.get_metadata('namespace_packages.txt')
- else:
- namespaces = []
- for top_level_pkg in [
- p for p
- in dist.get_metadata('top_level.txt').splitlines()
- if p and p not in namespaces]:
- path = os.path.join(dist.location, top_level_pkg)
- paths_to_remove.add(path)
- paths_to_remove.add(path + '.py')
- paths_to_remove.add(path + '.pyc')
- paths_to_remove.add(path + '.pyo')
-
- elif distutils_egg_info:
- raise UninstallationError(
- "Cannot uninstall {!r}. It is a distutils installed project "
- "and thus we cannot accurately determine which files belong "
- "to it which would lead to only a partial uninstall.".format(
- dist.project_name,
- )
- )
-
- elif dist.location.endswith('.egg'):
- # package installed by easy_install
- # We cannot match on dist.egg_name because it can slightly vary
- # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
- paths_to_remove.add(dist.location)
- easy_install_egg = os.path.split(dist.location)[1]
- easy_install_pth = os.path.join(os.path.dirname(dist.location),
- 'easy-install.pth')
- paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
-
- elif egg_info_exists and dist.egg_info.endswith('.dist-info'):
- for path in uninstallation_paths(dist):
- paths_to_remove.add(path)
-
- elif develop_egg_link:
- # develop egg
- with open(develop_egg_link, 'r') as fh:
- link_pointer = os.path.normcase(fh.readline().strip())
- assert (link_pointer == dist.location), (
- 'Egg-link %s does not match installed location of %s '
- '(at %s)' % (link_pointer, dist.project_name, dist.location)
- )
- paths_to_remove.add(develop_egg_link)
- easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
- 'easy-install.pth')
- paths_to_remove.add_pth(easy_install_pth, dist.location)
-
- else:
- logger.debug(
- 'Not sure how to uninstall: %s - Check: %s',
- dist, dist.location,
- )
-
- # find distutils scripts= scripts
- if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
- for script in dist.metadata_listdir('scripts'):
- if dist_in_usersite(dist):
- bin_dir = bin_user
- else:
- bin_dir = bin_py
- paths_to_remove.add(os.path.join(bin_dir, script))
- if WINDOWS:
- paths_to_remove.add(os.path.join(bin_dir, script) + '.bat')
-
- # find console_scripts
- _scripts_to_remove = []
- console_scripts = dist.get_entry_map(group='console_scripts')
- for name in console_scripts.keys():
- _scripts_to_remove.extend(_script_names(dist, name, False))
- # find gui_scripts
- gui_scripts = dist.get_entry_map(group='gui_scripts')
- for name in gui_scripts.keys():
- _scripts_to_remove.extend(_script_names(dist, name, True))
-
- for s in _scripts_to_remove:
- paths_to_remove.add(s)
-
- return paths_to_remove
-
-
-class UninstallPthEntries(object):
- def __init__(self, pth_file):
- if not os.path.isfile(pth_file):
- raise UninstallationError(
- "Cannot remove entries from nonexistent file %s" % pth_file
- )
- self.file = pth_file
- self.entries = set()
- self._saved_lines = None
-
- def add(self, entry):
- entry = os.path.normcase(entry)
- # On Windows, os.path.normcase converts the entry to use
- # backslashes. This is correct for entries that describe absolute
- # paths outside of site-packages, but all the others use forward
- # slashes.
- if WINDOWS and not os.path.splitdrive(entry)[0]:
- entry = entry.replace('\\', '/')
- self.entries.add(entry)
-
- def remove(self):
- logger.debug('Removing pth entries from %s:', self.file)
- with open(self.file, 'rb') as fh:
- # windows uses '\r\n' with py3k, but uses '\n' with py2.x
- lines = fh.readlines()
- self._saved_lines = lines
- if any(b'\r\n' in line for line in lines):
- endline = '\r\n'
- else:
- endline = '\n'
- # handle missing trailing newline
- if lines and not lines[-1].endswith(endline.encode("utf-8")):
- lines[-1] = lines[-1] + endline.encode("utf-8")
- for entry in self.entries:
- try:
- logger.debug('Removing entry: %s', entry)
- lines.remove((entry + endline).encode("utf-8"))
- except ValueError:
- pass
- with open(self.file, 'wb') as fh:
- fh.writelines(lines)
-
- def rollback(self):
- if self._saved_lines is None:
- logger.error(
- 'Cannot roll back changes to %s, none were made', self.file
- )
- return False
- logger.debug('Rolling %s back to previous state', self.file)
- with open(self.file, 'wb') as fh:
- fh.writelines(self._saved_lines)
- return True
+from __future__ import absolute_import
+
+import csv
+import functools
+import logging
+import os
+import sys
+import sysconfig
+
+from pip._vendor import pkg_resources
+
+from pip._internal.compat import WINDOWS, cache_from_source, uses_pycache
+from pip._internal.exceptions import UninstallationError
+from pip._internal.locations import bin_py, bin_user
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import (
+ FakeFile, ask, dist_in_usersite, dist_is_local, egg_link_path, is_local,
+ normalize_path, renames,
+)
+from pip._internal.utils.temp_dir import TempDirectory
+
+logger = logging.getLogger(__name__)
+
+
+def _script_names(dist, script_name, is_gui):
+ """Create the fully qualified name of the files created by
+ {console,gui}_scripts for the given ``dist``.
+ Returns the list of file names
+ """
+ if dist_in_usersite(dist):
+ bin_dir = bin_user
+ else:
+ bin_dir = bin_py
+ exe_name = os.path.join(bin_dir, script_name)
+ paths_to_remove = [exe_name]
+ if WINDOWS:
+ paths_to_remove.append(exe_name + '.exe')
+ paths_to_remove.append(exe_name + '.exe.manifest')
+ if is_gui:
+ paths_to_remove.append(exe_name + '-script.pyw')
+ else:
+ paths_to_remove.append(exe_name + '-script.py')
+ return paths_to_remove
+
+
+def _unique(fn):
+ @functools.wraps(fn)
+ def unique(*args, **kw):
+ seen = set()
+ for item in fn(*args, **kw):
+ if item not in seen:
+ seen.add(item)
+ yield item
+ return unique
+
+
+@_unique
+def uninstallation_paths(dist):
+ """
+ Yield all the uninstallation paths for dist based on RECORD-without-.pyc
+
+ Yield paths to all the files in RECORD. For each .py file in RECORD, add
+ the .pyc in the same directory.
+
+ UninstallPathSet.add() takes care of the __pycache__ .pyc.
+ """
+ r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
+ for row in r:
+ path = os.path.join(dist.location, row[0])
+ yield path
+ if path.endswith('.py'):
+ dn, fn = os.path.split(path)
+ base = fn[:-3]
+ path = os.path.join(dn, base + '.pyc')
+ yield path
+
+
+def compact(paths):
+ """Compact a path set to contain the minimal number of paths
+ necessary to contain all paths in the set. If /a/path/ and
+ /a/path/to/a/file.txt are both in the set, leave only the
+ shorter path."""
+
+ sep = os.path.sep
+ short_paths = set()
+ for path in sorted(paths, key=len):
+ should_add = any(
+ path.startswith(shortpath.rstrip("*")) and
+ path[len(shortpath.rstrip("*").rstrip(sep))] == sep
+ for shortpath in short_paths
+ )
+ if not should_add:
+ short_paths.add(path)
+ return short_paths
+
+
+def compress_for_output_listing(paths):
+ """Returns a tuple of 2 sets of which paths to display to user
+
+ The first set contains paths that would be deleted. Files of a package
+ are not added and the top-level directory of the package has a '*' added
+ at the end - to signify that all it's contents are removed.
+
+ The second set contains files that would have been skipped in the above
+ folders.
+ """
+
+ will_remove = list(paths)
+ will_skip = set()
+
+ # Determine folders and files
+ folders = set()
+ files = set()
+ for path in will_remove:
+ if path.endswith(".pyc"):
+ continue
+ if path.endswith("__init__.py") or ".dist-info" in path:
+ folders.add(os.path.dirname(path))
+ files.add(path)
+
+ folders = compact(folders)
+
+ # This walks the tree using os.walk to not miss extra folders
+ # that might get added.
+ for folder in folders:
+ for dirpath, _, dirfiles in os.walk(folder):
+ for fname in dirfiles:
+ if fname.endswith(".pyc"):
+ continue
+
+ file_ = os.path.normcase(os.path.join(dirpath, fname))
+ if os.path.isfile(file_) and file_ not in files:
+ # We are skipping this file. Add it to the set.
+ will_skip.add(file_)
+
+ will_remove = files | {
+ os.path.join(folder, "*") for folder in folders
+ }
+
+ return will_remove, will_skip
+
+
+class UninstallPathSet(object):
+ """A set of file paths to be removed in the uninstallation of a
+ requirement."""
+ def __init__(self, dist):
+ self.paths = set()
+ self._refuse = set()
+ self.pth = {}
+ self.dist = dist
+ self.save_dir = TempDirectory(kind="uninstall")
+ self._moved_paths = []
+
+ def _permitted(self, path):
+ """
+ Return True if the given path is one we are permitted to
+ remove/modify, False otherwise.
+
+ """
+ return is_local(path)
+
+ def add(self, path):
+ head, tail = os.path.split(path)
+
+ # we normalize the head to resolve parent directory symlinks, but not
+ # the tail, since we only want to uninstall symlinks, not their targets
+ path = os.path.join(normalize_path(head), os.path.normcase(tail))
+
+ if not os.path.exists(path):
+ return
+ if self._permitted(path):
+ self.paths.add(path)
+ else:
+ self._refuse.add(path)
+
+ # __pycache__ files can show up after 'installed-files.txt' is created,
+ # due to imports
+ if os.path.splitext(path)[1] == '.py' and uses_pycache:
+ self.add(cache_from_source(path))
+
+ def add_pth(self, pth_file, entry):
+ pth_file = normalize_path(pth_file)
+ if self._permitted(pth_file):
+ if pth_file not in self.pth:
+ self.pth[pth_file] = UninstallPthEntries(pth_file)
+ self.pth[pth_file].add(entry)
+ else:
+ self._refuse.add(pth_file)
+
+ def _stash(self, path):
+ return os.path.join(
+ self.save_dir.path, os.path.splitdrive(path)[1].lstrip(os.path.sep)
+ )
+
+ def remove(self, auto_confirm=False, verbose=False):
+ """Remove paths in ``self.paths`` with confirmation (unless
+ ``auto_confirm`` is True)."""
+
+ if not self.paths:
+ logger.info(
+ "Can't uninstall '%s'. No files were found to uninstall.",
+ self.dist.project_name,
+ )
+ return
+
+ dist_name_version = (
+ self.dist.project_name + "-" + self.dist.version
+ )
+ logger.info('Uninstalling %s:', dist_name_version)
+
+ with indent_log():
+ if auto_confirm or self._allowed_to_proceed(verbose):
+ self.save_dir.create()
+
+ for path in sorted(compact(self.paths)):
+ new_path = self._stash(path)
+ logger.debug('Removing file or directory %s', path)
+ self._moved_paths.append(path)
+ renames(path, new_path)
+ for pth in self.pth.values():
+ pth.remove()
+
+ logger.info('Successfully uninstalled %s', dist_name_version)
+
+ def _allowed_to_proceed(self, verbose):
+ """Display which files would be deleted and prompt for confirmation
+ """
+
+ def _display(msg, paths):
+ if not paths:
+ return
+
+ logger.info(msg)
+ with indent_log():
+ for path in sorted(compact(paths)):
+ logger.info(path)
+
+ if not verbose:
+ will_remove, will_skip = compress_for_output_listing(self.paths)
+ else:
+ # In verbose mode, display all the files that are going to be
+ # deleted.
+ will_remove = list(self.paths)
+ will_skip = set()
+
+ _display('Would remove:', will_remove)
+ _display('Would not remove (might be manually added):', will_skip)
+ _display('Would not remove (outside of prefix):', self._refuse)
+
+ return ask('Proceed (y/n)? ', ('y', 'n')) == 'y'
+
+ def rollback(self):
+ """Rollback the changes previously made by remove()."""
+ if self.save_dir.path is None:
+ logger.error(
+ "Can't roll back %s; was not uninstalled",
+ self.dist.project_name,
+ )
+ return False
+ logger.info('Rolling back uninstall of %s', self.dist.project_name)
+ for path in self._moved_paths:
+ tmp_path = self._stash(path)
+ logger.debug('Replacing %s', path)
+ renames(tmp_path, path)
+ for pth in self.pth.values():
+ pth.rollback()
+
+ def commit(self):
+ """Remove temporary save dir: rollback will no longer be possible."""
+ self.save_dir.cleanup()
+ self._moved_paths = []
+
+ @classmethod
+ def from_dist(cls, dist):
+ dist_path = normalize_path(dist.location)
+ if not dist_is_local(dist):
+ logger.info(
+ "Not uninstalling %s at %s, outside environment %s",
+ dist.key,
+ dist_path,
+ sys.prefix,
+ )
+ return cls(dist)
+
+ if dist_path in {p for p in {sysconfig.get_path("stdlib"),
+ sysconfig.get_path("platstdlib")}
+ if p}:
+ logger.info(
+ "Not uninstalling %s at %s, as it is in the standard library.",
+ dist.key,
+ dist_path,
+ )
+ return cls(dist)
+
+ paths_to_remove = cls(dist)
+ develop_egg_link = egg_link_path(dist)
+ develop_egg_link_egg_info = '{}.egg-info'.format(
+ pkg_resources.to_filename(dist.project_name))
+ egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info)
+ # Special case for distutils installed package
+ distutils_egg_info = getattr(dist._provider, 'path', None)
+
+ # Uninstall cases order do matter as in the case of 2 installs of the
+ # same package, pip needs to uninstall the currently detected version
+ if (egg_info_exists and dist.egg_info.endswith('.egg-info') and
+ not dist.egg_info.endswith(develop_egg_link_egg_info)):
+ # if dist.egg_info.endswith(develop_egg_link_egg_info), we
+ # are in fact in the develop_egg_link case
+ paths_to_remove.add(dist.egg_info)
+ if dist.has_metadata('installed-files.txt'):
+ for installed_file in dist.get_metadata(
+ 'installed-files.txt').splitlines():
+ path = os.path.normpath(
+ os.path.join(dist.egg_info, installed_file)
+ )
+ paths_to_remove.add(path)
+ # FIXME: need a test for this elif block
+ # occurs with --single-version-externally-managed/--record outside
+ # of pip
+ elif dist.has_metadata('top_level.txt'):
+ if dist.has_metadata('namespace_packages.txt'):
+ namespaces = dist.get_metadata('namespace_packages.txt')
+ else:
+ namespaces = []
+ for top_level_pkg in [
+ p for p
+ in dist.get_metadata('top_level.txt').splitlines()
+ if p and p not in namespaces]:
+ path = os.path.join(dist.location, top_level_pkg)
+ paths_to_remove.add(path)
+ paths_to_remove.add(path + '.py')
+ paths_to_remove.add(path + '.pyc')
+ paths_to_remove.add(path + '.pyo')
+
+ elif distutils_egg_info:
+ raise UninstallationError(
+ "Cannot uninstall {!r}. It is a distutils installed project "
+ "and thus we cannot accurately determine which files belong "
+ "to it which would lead to only a partial uninstall.".format(
+ dist.project_name,
+ )
+ )
+
+ elif dist.location.endswith('.egg'):
+ # package installed by easy_install
+ # We cannot match on dist.egg_name because it can slightly vary
+ # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
+ paths_to_remove.add(dist.location)
+ easy_install_egg = os.path.split(dist.location)[1]
+ easy_install_pth = os.path.join(os.path.dirname(dist.location),
+ 'easy-install.pth')
+ paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
+
+ elif egg_info_exists and dist.egg_info.endswith('.dist-info'):
+ for path in uninstallation_paths(dist):
+ paths_to_remove.add(path)
+
+ elif develop_egg_link:
+ # develop egg
+ with open(develop_egg_link, 'r') as fh:
+ link_pointer = os.path.normcase(fh.readline().strip())
+ assert (link_pointer == dist.location), (
+ 'Egg-link %s does not match installed location of %s '
+ '(at %s)' % (link_pointer, dist.project_name, dist.location)
+ )
+ paths_to_remove.add(develop_egg_link)
+ easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
+ 'easy-install.pth')
+ paths_to_remove.add_pth(easy_install_pth, dist.location)
+
+ else:
+ logger.debug(
+ 'Not sure how to uninstall: %s - Check: %s',
+ dist, dist.location,
+ )
+
+ # find distutils scripts= scripts
+ if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
+ for script in dist.metadata_listdir('scripts'):
+ if dist_in_usersite(dist):
+ bin_dir = bin_user
+ else:
+ bin_dir = bin_py
+ paths_to_remove.add(os.path.join(bin_dir, script))
+ if WINDOWS:
+ paths_to_remove.add(os.path.join(bin_dir, script) + '.bat')
+
+ # find console_scripts
+ _scripts_to_remove = []
+ console_scripts = dist.get_entry_map(group='console_scripts')
+ for name in console_scripts.keys():
+ _scripts_to_remove.extend(_script_names(dist, name, False))
+ # find gui_scripts
+ gui_scripts = dist.get_entry_map(group='gui_scripts')
+ for name in gui_scripts.keys():
+ _scripts_to_remove.extend(_script_names(dist, name, True))
+
+ for s in _scripts_to_remove:
+ paths_to_remove.add(s)
+
+ return paths_to_remove
+
+
+class UninstallPthEntries(object):
+ def __init__(self, pth_file):
+ if not os.path.isfile(pth_file):
+ raise UninstallationError(
+ "Cannot remove entries from nonexistent file %s" % pth_file
+ )
+ self.file = pth_file
+ self.entries = set()
+ self._saved_lines = None
+
+ def add(self, entry):
+ entry = os.path.normcase(entry)
+ # On Windows, os.path.normcase converts the entry to use
+ # backslashes. This is correct for entries that describe absolute
+ # paths outside of site-packages, but all the others use forward
+ # slashes.
+ if WINDOWS and not os.path.splitdrive(entry)[0]:
+ entry = entry.replace('\\', '/')
+ self.entries.add(entry)
+
+ def remove(self):
+ logger.debug('Removing pth entries from %s:', self.file)
+ with open(self.file, 'rb') as fh:
+ # windows uses '\r\n' with py3k, but uses '\n' with py2.x
+ lines = fh.readlines()
+ self._saved_lines = lines
+ if any(b'\r\n' in line for line in lines):
+ endline = '\r\n'
+ else:
+ endline = '\n'
+ # handle missing trailing newline
+ if lines and not lines[-1].endswith(endline.encode("utf-8")):
+ lines[-1] = lines[-1] + endline.encode("utf-8")
+ for entry in self.entries:
+ try:
+ logger.debug('Removing entry: %s', entry)
+ lines.remove((entry + endline).encode("utf-8"))
+ except ValueError:
+ pass
+ with open(self.file, 'wb') as fh:
+ fh.writelines(lines)
+
+ def rollback(self):
+ if self._saved_lines is None:
+ logger.error(
+ 'Cannot roll back changes to %s, none were made', self.file
+ )
+ return False
+ logger.debug('Rolling %s back to previous state', self.file)
+ with open(self.file, 'wb') as fh:
+ fh.writelines(self._saved_lines)
+ return True
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/resolve.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/resolve.py
index 3200fca..189827e 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/resolve.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/resolve.py
@@ -1,354 +1,354 @@
-"""Dependency Resolution
-
-The dependency resolution in pip is performed as follows:
-
-for top-level requirements:
- a. only one spec allowed per project, regardless of conflicts or not.
- otherwise a "double requirement" exception is raised
- b. they override sub-dependency requirements.
-for sub-dependencies
- a. "first found, wins" (where the order is breadth first)
-"""
-
-import logging
-from collections import defaultdict
-from itertools import chain
-
-from pip._internal.exceptions import (
- BestVersionAlreadyInstalled, DistributionNotFound, HashError, HashErrors,
- UnsupportedPythonVersion,
-)
-
-from pip._internal.req.req_install import InstallRequirement
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import dist_in_usersite, ensure_dir
-from pip._internal.utils.packaging import check_dist_requires_python
-
-logger = logging.getLogger(__name__)
-
-
-class Resolver(object):
- """Resolves which packages need to be installed/uninstalled to perform \
- the requested operation without breaking the requirements of any package.
- """
-
- _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"}
-
- def __init__(self, preparer, session, finder, wheel_cache, use_user_site,
- ignore_dependencies, ignore_installed, ignore_requires_python,
- force_reinstall, isolated, upgrade_strategy):
- super(Resolver, self).__init__()
- assert upgrade_strategy in self._allowed_strategies
-
- self.preparer = preparer
- self.finder = finder
- self.session = session
-
- # NOTE: This would eventually be replaced with a cache that can give
- # information about both sdist and wheels transparently.
- self.wheel_cache = wheel_cache
-
- self.require_hashes = None # This is set in resolve
-
- self.upgrade_strategy = upgrade_strategy
- self.force_reinstall = force_reinstall
- self.isolated = isolated
- self.ignore_dependencies = ignore_dependencies
- self.ignore_installed = ignore_installed
- self.ignore_requires_python = ignore_requires_python
- self.use_user_site = use_user_site
-
- self._discovered_dependencies = defaultdict(list)
-
- def resolve(self, requirement_set):
- """Resolve what operations need to be done
-
- As a side-effect of this method, the packages (and their dependencies)
- are downloaded, unpacked and prepared for installation. This
- preparation is done by ``pip.operations.prepare``.
-
- Once PyPI has static dependency metadata available, it would be
- possible to move the preparation to become a step separated from
- dependency resolution.
- """
- # make the wheelhouse
- if self.preparer.wheel_download_dir:
- ensure_dir(self.preparer.wheel_download_dir)
-
- # If any top-level requirement has a hash specified, enter
- # hash-checking mode, which requires hashes from all.
- root_reqs = (
- requirement_set.unnamed_requirements +
- list(requirement_set.requirements.values())
- )
- self.require_hashes = (
- requirement_set.require_hashes or
- any(req.has_hash_options for req in root_reqs)
- )
-
- # Display where finder is looking for packages
- locations = self.finder.get_formatted_locations()
- if locations:
- logger.info(locations)
-
- # Actually prepare the files, and collect any exceptions. Most hash
- # exceptions cannot be checked ahead of time, because
- # req.populate_link() needs to be called before we can make decisions
- # based on link type.
- discovered_reqs = []
- hash_errors = HashErrors()
- for req in chain(root_reqs, discovered_reqs):
- try:
- discovered_reqs.extend(
- self._resolve_one(requirement_set, req)
- )
- except HashError as exc:
- exc.req = req
- hash_errors.append(exc)
-
- if hash_errors:
- raise hash_errors
-
- def _is_upgrade_allowed(self, req):
- if self.upgrade_strategy == "to-satisfy-only":
- return False
- elif self.upgrade_strategy == "eager":
- return True
- else:
- assert self.upgrade_strategy == "only-if-needed"
- return req.is_direct
-
- def _set_req_to_reinstall(self, req):
- """
- Set a requirement to be installed.
- """
- # Don't uninstall the conflict if doing a user install and the
- # conflict is not a user install.
- if not self.use_user_site or dist_in_usersite(req.satisfied_by):
- req.conflicts_with = req.satisfied_by
- req.satisfied_by = None
-
- # XXX: Stop passing requirement_set for options
- def _check_skip_installed(self, req_to_install):
- """Check if req_to_install should be skipped.
-
- This will check if the req is installed, and whether we should upgrade
- or reinstall it, taking into account all the relevant user options.
-
- After calling this req_to_install will only have satisfied_by set to
- None if the req_to_install is to be upgraded/reinstalled etc. Any
- other value will be a dist recording the current thing installed that
- satisfies the requirement.
-
- Note that for vcs urls and the like we can't assess skipping in this
- routine - we simply identify that we need to pull the thing down,
- then later on it is pulled down and introspected to assess upgrade/
- reinstalls etc.
-
- :return: A text reason for why it was skipped, or None.
- """
- if self.ignore_installed:
- return None
-
- req_to_install.check_if_exists(self.use_user_site)
- if not req_to_install.satisfied_by:
- return None
-
- if self.force_reinstall:
- self._set_req_to_reinstall(req_to_install)
- return None
-
- if not self._is_upgrade_allowed(req_to_install):
- if self.upgrade_strategy == "only-if-needed":
- return 'not upgraded as not directly required'
- return 'already satisfied'
-
- # Check for the possibility of an upgrade. For link-based
- # requirements we have to pull the tree down and inspect to assess
- # the version #, so it's handled way down.
- if not req_to_install.link:
- try:
- self.finder.find_requirement(req_to_install, upgrade=True)
- except BestVersionAlreadyInstalled:
- # Then the best version is installed.
- return 'already up-to-date'
- except DistributionNotFound:
- # No distribution found, so we squash the error. It will
- # be raised later when we re-try later to do the install.
- # Why don't we just raise here?
- pass
-
- self._set_req_to_reinstall(req_to_install)
- return None
-
- def _get_abstract_dist_for(self, req):
- """Takes a InstallRequirement and returns a single AbstractDist \
- representing a prepared variant of the same.
- """
- assert self.require_hashes is not None, (
- "require_hashes should have been set in Resolver.resolve()"
- )
-
- if req.editable:
- return self.preparer.prepare_editable_requirement(
- req, self.require_hashes, self.use_user_site, self.finder,
- )
-
- # satisfied_by is only evaluated by calling _check_skip_installed,
- # so it must be None here.
- assert req.satisfied_by is None
- skip_reason = self._check_skip_installed(req)
-
- if req.satisfied_by:
- return self.preparer.prepare_installed_requirement(
- req, self.require_hashes, skip_reason
- )
-
- upgrade_allowed = self._is_upgrade_allowed(req)
- abstract_dist = self.preparer.prepare_linked_requirement(
- req, self.session, self.finder, upgrade_allowed,
- self.require_hashes
- )
-
- # NOTE
- # The following portion is for determining if a certain package is
- # going to be re-installed/upgraded or not and reporting to the user.
- # This should probably get cleaned up in a future refactor.
-
- # req.req is only avail after unpack for URL
- # pkgs repeat check_if_exists to uninstall-on-upgrade
- # (#14)
- if not self.ignore_installed:
- req.check_if_exists(self.use_user_site)
-
- if req.satisfied_by:
- should_modify = (
- self.upgrade_strategy != "to-satisfy-only" or
- self.force_reinstall or
- self.ignore_installed or
- req.link.scheme == 'file'
- )
- if should_modify:
- self._set_req_to_reinstall(req)
- else:
- logger.info(
- 'Requirement already satisfied (use --upgrade to upgrade):'
- ' %s', req,
- )
-
- return abstract_dist
-
- def _resolve_one(self, requirement_set, req_to_install):
- """Prepare a single requirements file.
-
- :return: A list of additional InstallRequirements to also install.
- """
- # Tell user what we are doing for this requirement:
- # obtain (editable), skipping, processing (local url), collecting
- # (remote url or package name)
- if req_to_install.constraint or req_to_install.prepared:
- return []
-
- req_to_install.prepared = True
-
- # register tmp src for cleanup in case something goes wrong
- requirement_set.reqs_to_cleanup.append(req_to_install)
-
- abstract_dist = self._get_abstract_dist_for(req_to_install)
-
- # Parse and return dependencies
- dist = abstract_dist.dist(self.finder)
- try:
- check_dist_requires_python(dist)
- except UnsupportedPythonVersion as err:
- if self.ignore_requires_python:
- logger.warning(err.args[0])
- else:
- raise
-
- more_reqs = []
-
- def add_req(subreq, extras_requested):
- sub_install_req = InstallRequirement.from_req(
- str(subreq),
- req_to_install,
- isolated=self.isolated,
- wheel_cache=self.wheel_cache,
- )
- parent_req_name = req_to_install.name
- to_scan_again, add_to_parent = requirement_set.add_requirement(
- sub_install_req,
- parent_req_name=parent_req_name,
- extras_requested=extras_requested,
- )
- if parent_req_name and add_to_parent:
- self._discovered_dependencies[parent_req_name].append(
- add_to_parent
- )
- more_reqs.extend(to_scan_again)
-
- with indent_log():
- # We add req_to_install before its dependencies, so that we
- # can refer to it when adding dependencies.
- if not requirement_set.has_requirement(req_to_install.name):
- # 'unnamed' requirements will get added here
- req_to_install.is_direct = True
- requirement_set.add_requirement(
- req_to_install, parent_req_name=None,
- )
-
- if not self.ignore_dependencies:
- if req_to_install.extras:
- logger.debug(
- "Installing extra requirements: %r",
- ','.join(req_to_install.extras),
- )
- missing_requested = sorted(
- set(req_to_install.extras) - set(dist.extras)
- )
- for missing in missing_requested:
- logger.warning(
- '%s does not provide the extra \'%s\'',
- dist, missing
- )
-
- available_requested = sorted(
- set(dist.extras) & set(req_to_install.extras)
- )
- for subreq in dist.requires(available_requested):
- add_req(subreq, extras_requested=available_requested)
-
- if not req_to_install.editable and not req_to_install.satisfied_by:
- # XXX: --no-install leads this to report 'Successfully
- # downloaded' for only non-editable reqs, even though we took
- # action on them.
- requirement_set.successfully_downloaded.append(req_to_install)
-
- return more_reqs
-
- def get_installation_order(self, req_set):
- """Create the installation order.
-
- The installation order is topological - requirements are installed
- before the requiring thing. We break cycles at an arbitrary point,
- and make no other guarantees.
- """
- # The current implementation, which we may change at any point
- # installs the user specified things in the order given, except when
- # dependencies must come earlier to achieve topological order.
- order = []
- ordered_reqs = set()
-
- def schedule(req):
- if req.satisfied_by or req in ordered_reqs:
- return
- if req.constraint:
- return
- ordered_reqs.add(req)
- for dep in self._discovered_dependencies[req.name]:
- schedule(dep)
- order.append(req)
-
- for install_req in req_set.requirements.values():
- schedule(install_req)
- return order
+"""Dependency Resolution
+
+The dependency resolution in pip is performed as follows:
+
+for top-level requirements:
+ a. only one spec allowed per project, regardless of conflicts or not.
+ otherwise a "double requirement" exception is raised
+ b. they override sub-dependency requirements.
+for sub-dependencies
+ a. "first found, wins" (where the order is breadth first)
+"""
+
+import logging
+from collections import defaultdict
+from itertools import chain
+
+from pip._internal.exceptions import (
+ BestVersionAlreadyInstalled, DistributionNotFound, HashError, HashErrors,
+ UnsupportedPythonVersion,
+)
+
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import dist_in_usersite, ensure_dir
+from pip._internal.utils.packaging import check_dist_requires_python
+
+logger = logging.getLogger(__name__)
+
+
+class Resolver(object):
+ """Resolves which packages need to be installed/uninstalled to perform \
+ the requested operation without breaking the requirements of any package.
+ """
+
+ _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"}
+
+ def __init__(self, preparer, session, finder, wheel_cache, use_user_site,
+ ignore_dependencies, ignore_installed, ignore_requires_python,
+ force_reinstall, isolated, upgrade_strategy):
+ super(Resolver, self).__init__()
+ assert upgrade_strategy in self._allowed_strategies
+
+ self.preparer = preparer
+ self.finder = finder
+ self.session = session
+
+ # NOTE: This would eventually be replaced with a cache that can give
+ # information about both sdist and wheels transparently.
+ self.wheel_cache = wheel_cache
+
+ self.require_hashes = None # This is set in resolve
+
+ self.upgrade_strategy = upgrade_strategy
+ self.force_reinstall = force_reinstall
+ self.isolated = isolated
+ self.ignore_dependencies = ignore_dependencies
+ self.ignore_installed = ignore_installed
+ self.ignore_requires_python = ignore_requires_python
+ self.use_user_site = use_user_site
+
+ self._discovered_dependencies = defaultdict(list)
+
+ def resolve(self, requirement_set):
+ """Resolve what operations need to be done
+
+ As a side-effect of this method, the packages (and their dependencies)
+ are downloaded, unpacked and prepared for installation. This
+ preparation is done by ``pip.operations.prepare``.
+
+ Once PyPI has static dependency metadata available, it would be
+ possible to move the preparation to become a step separated from
+ dependency resolution.
+ """
+ # make the wheelhouse
+ if self.preparer.wheel_download_dir:
+ ensure_dir(self.preparer.wheel_download_dir)
+
+ # If any top-level requirement has a hash specified, enter
+ # hash-checking mode, which requires hashes from all.
+ root_reqs = (
+ requirement_set.unnamed_requirements +
+ list(requirement_set.requirements.values())
+ )
+ self.require_hashes = (
+ requirement_set.require_hashes or
+ any(req.has_hash_options for req in root_reqs)
+ )
+
+ # Display where finder is looking for packages
+ locations = self.finder.get_formatted_locations()
+ if locations:
+ logger.info(locations)
+
+ # Actually prepare the files, and collect any exceptions. Most hash
+ # exceptions cannot be checked ahead of time, because
+ # req.populate_link() needs to be called before we can make decisions
+ # based on link type.
+ discovered_reqs = []
+ hash_errors = HashErrors()
+ for req in chain(root_reqs, discovered_reqs):
+ try:
+ discovered_reqs.extend(
+ self._resolve_one(requirement_set, req)
+ )
+ except HashError as exc:
+ exc.req = req
+ hash_errors.append(exc)
+
+ if hash_errors:
+ raise hash_errors
+
+ def _is_upgrade_allowed(self, req):
+ if self.upgrade_strategy == "to-satisfy-only":
+ return False
+ elif self.upgrade_strategy == "eager":
+ return True
+ else:
+ assert self.upgrade_strategy == "only-if-needed"
+ return req.is_direct
+
+ def _set_req_to_reinstall(self, req):
+ """
+ Set a requirement to be installed.
+ """
+ # Don't uninstall the conflict if doing a user install and the
+ # conflict is not a user install.
+ if not self.use_user_site or dist_in_usersite(req.satisfied_by):
+ req.conflicts_with = req.satisfied_by
+ req.satisfied_by = None
+
+ # XXX: Stop passing requirement_set for options
+ def _check_skip_installed(self, req_to_install):
+ """Check if req_to_install should be skipped.
+
+ This will check if the req is installed, and whether we should upgrade
+ or reinstall it, taking into account all the relevant user options.
+
+ After calling this req_to_install will only have satisfied_by set to
+ None if the req_to_install is to be upgraded/reinstalled etc. Any
+ other value will be a dist recording the current thing installed that
+ satisfies the requirement.
+
+ Note that for vcs urls and the like we can't assess skipping in this
+ routine - we simply identify that we need to pull the thing down,
+ then later on it is pulled down and introspected to assess upgrade/
+ reinstalls etc.
+
+ :return: A text reason for why it was skipped, or None.
+ """
+ if self.ignore_installed:
+ return None
+
+ req_to_install.check_if_exists(self.use_user_site)
+ if not req_to_install.satisfied_by:
+ return None
+
+ if self.force_reinstall:
+ self._set_req_to_reinstall(req_to_install)
+ return None
+
+ if not self._is_upgrade_allowed(req_to_install):
+ if self.upgrade_strategy == "only-if-needed":
+ return 'not upgraded as not directly required'
+ return 'already satisfied'
+
+ # Check for the possibility of an upgrade. For link-based
+ # requirements we have to pull the tree down and inspect to assess
+ # the version #, so it's handled way down.
+ if not req_to_install.link:
+ try:
+ self.finder.find_requirement(req_to_install, upgrade=True)
+ except BestVersionAlreadyInstalled:
+ # Then the best version is installed.
+ return 'already up-to-date'
+ except DistributionNotFound:
+ # No distribution found, so we squash the error. It will
+ # be raised later when we re-try later to do the install.
+ # Why don't we just raise here?
+ pass
+
+ self._set_req_to_reinstall(req_to_install)
+ return None
+
+ def _get_abstract_dist_for(self, req):
+ """Takes a InstallRequirement and returns a single AbstractDist \
+ representing a prepared variant of the same.
+ """
+ assert self.require_hashes is not None, (
+ "require_hashes should have been set in Resolver.resolve()"
+ )
+
+ if req.editable:
+ return self.preparer.prepare_editable_requirement(
+ req, self.require_hashes, self.use_user_site, self.finder,
+ )
+
+ # satisfied_by is only evaluated by calling _check_skip_installed,
+ # so it must be None here.
+ assert req.satisfied_by is None
+ skip_reason = self._check_skip_installed(req)
+
+ if req.satisfied_by:
+ return self.preparer.prepare_installed_requirement(
+ req, self.require_hashes, skip_reason
+ )
+
+ upgrade_allowed = self._is_upgrade_allowed(req)
+ abstract_dist = self.preparer.prepare_linked_requirement(
+ req, self.session, self.finder, upgrade_allowed,
+ self.require_hashes
+ )
+
+ # NOTE
+ # The following portion is for determining if a certain package is
+ # going to be re-installed/upgraded or not and reporting to the user.
+ # This should probably get cleaned up in a future refactor.
+
+ # req.req is only avail after unpack for URL
+ # pkgs repeat check_if_exists to uninstall-on-upgrade
+ # (#14)
+ if not self.ignore_installed:
+ req.check_if_exists(self.use_user_site)
+
+ if req.satisfied_by:
+ should_modify = (
+ self.upgrade_strategy != "to-satisfy-only" or
+ self.force_reinstall or
+ self.ignore_installed or
+ req.link.scheme == 'file'
+ )
+ if should_modify:
+ self._set_req_to_reinstall(req)
+ else:
+ logger.info(
+ 'Requirement already satisfied (use --upgrade to upgrade):'
+ ' %s', req,
+ )
+
+ return abstract_dist
+
+ def _resolve_one(self, requirement_set, req_to_install):
+ """Prepare a single requirements file.
+
+ :return: A list of additional InstallRequirements to also install.
+ """
+ # Tell user what we are doing for this requirement:
+ # obtain (editable), skipping, processing (local url), collecting
+ # (remote url or package name)
+ if req_to_install.constraint or req_to_install.prepared:
+ return []
+
+ req_to_install.prepared = True
+
+ # register tmp src for cleanup in case something goes wrong
+ requirement_set.reqs_to_cleanup.append(req_to_install)
+
+ abstract_dist = self._get_abstract_dist_for(req_to_install)
+
+ # Parse and return dependencies
+ dist = abstract_dist.dist(self.finder)
+ try:
+ check_dist_requires_python(dist)
+ except UnsupportedPythonVersion as err:
+ if self.ignore_requires_python:
+ logger.warning(err.args[0])
+ else:
+ raise
+
+ more_reqs = []
+
+ def add_req(subreq, extras_requested):
+ sub_install_req = InstallRequirement.from_req(
+ str(subreq),
+ req_to_install,
+ isolated=self.isolated,
+ wheel_cache=self.wheel_cache,
+ )
+ parent_req_name = req_to_install.name
+ to_scan_again, add_to_parent = requirement_set.add_requirement(
+ sub_install_req,
+ parent_req_name=parent_req_name,
+ extras_requested=extras_requested,
+ )
+ if parent_req_name and add_to_parent:
+ self._discovered_dependencies[parent_req_name].append(
+ add_to_parent
+ )
+ more_reqs.extend(to_scan_again)
+
+ with indent_log():
+ # We add req_to_install before its dependencies, so that we
+ # can refer to it when adding dependencies.
+ if not requirement_set.has_requirement(req_to_install.name):
+ # 'unnamed' requirements will get added here
+ req_to_install.is_direct = True
+ requirement_set.add_requirement(
+ req_to_install, parent_req_name=None,
+ )
+
+ if not self.ignore_dependencies:
+ if req_to_install.extras:
+ logger.debug(
+ "Installing extra requirements: %r",
+ ','.join(req_to_install.extras),
+ )
+ missing_requested = sorted(
+ set(req_to_install.extras) - set(dist.extras)
+ )
+ for missing in missing_requested:
+ logger.warning(
+ '%s does not provide the extra \'%s\'',
+ dist, missing
+ )
+
+ available_requested = sorted(
+ set(dist.extras) & set(req_to_install.extras)
+ )
+ for subreq in dist.requires(available_requested):
+ add_req(subreq, extras_requested=available_requested)
+
+ if not req_to_install.editable and not req_to_install.satisfied_by:
+ # XXX: --no-install leads this to report 'Successfully
+ # downloaded' for only non-editable reqs, even though we took
+ # action on them.
+ requirement_set.successfully_downloaded.append(req_to_install)
+
+ return more_reqs
+
+ def get_installation_order(self, req_set):
+ """Create the installation order.
+
+ The installation order is topological - requirements are installed
+ before the requiring thing. We break cycles at an arbitrary point,
+ and make no other guarantees.
+ """
+ # The current implementation, which we may change at any point
+ # installs the user specified things in the order given, except when
+ # dependencies must come earlier to achieve topological order.
+ order = []
+ ordered_reqs = set()
+
+ def schedule(req):
+ if req.satisfied_by or req in ordered_reqs:
+ return
+ if req.constraint:
+ return
+ ordered_reqs.add(req)
+ for dep in self._discovered_dependencies[req.name]:
+ schedule(dep)
+ order.append(req)
+
+ for install_req in req_set.requirements.values():
+ schedule(install_req)
+ return order
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/status_codes.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/status_codes.py
index 275360a..2b56931 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/status_codes.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/status_codes.py
@@ -1,8 +1,8 @@
-from __future__ import absolute_import
-
-SUCCESS = 0
-ERROR = 1
-UNKNOWN_ERROR = 2
-VIRTUALENV_NOT_FOUND = 3
-PREVIOUS_BUILD_DIR_ERROR = 4
-NO_MATCHES_FOUND = 23
+from __future__ import absolute_import
+
+SUCCESS = 0
+ERROR = 1
+UNKNOWN_ERROR = 2
+VIRTUALENV_NOT_FOUND = 3
+PREVIOUS_BUILD_DIR_ERROR = 4
+NO_MATCHES_FOUND = 23
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/appdirs.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/appdirs.py
index 28c5d4b..0eb87ca 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/appdirs.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/appdirs.py
@@ -1,258 +1,258 @@
-"""
-This code was taken from https://github.com/ActiveState/appdirs and modified
-to suit our purposes.
-"""
-from __future__ import absolute_import
-
-import os
-import sys
-
-from pip._vendor.six import PY2, text_type
-
-from pip._internal.compat import WINDOWS, expanduser
-
-
-def user_cache_dir(appname):
- r"""
- Return full path to the user-specific cache dir for this application.
-
- "appname" is the name of application.
-
- Typical user cache directories are:
- macOS: ~/Library/Caches/
- Unix: ~/.cache/ (XDG default)
- Windows: C:\Users\\AppData\Local\\Cache
-
- On Windows the only suggestion in the MSDN docs is that local settings go
- in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the
- non-roaming app data dir (the default returned by `user_data_dir`). Apps
- typically put cache data somewhere *under* the given dir here. Some
- examples:
- ...\Mozilla\Firefox\Profiles\\Cache
- ...\Acme\SuperApp\Cache\1.0
-
- OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
- """
- if WINDOWS:
- # Get the base path
- path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
-
- # When using Python 2, return paths as bytes on Windows like we do on
- # other operating systems. See helper function docs for more details.
- if PY2 and isinstance(path, text_type):
- path = _win_path_to_bytes(path)
-
- # Add our app name and Cache directory to it
- path = os.path.join(path, appname, "Cache")
- elif sys.platform == "darwin":
- # Get the base path
- path = expanduser("~/Library/Caches")
-
- # Add our app name to it
- path = os.path.join(path, appname)
- else:
- # Get the base path
- path = os.getenv("XDG_CACHE_HOME", expanduser("~/.cache"))
-
- # Add our app name to it
- path = os.path.join(path, appname)
-
- return path
-
-
-def user_data_dir(appname, roaming=False):
- r"""
- Return full path to the user-specific data dir for this application.
-
- "appname" is the name of application.
- If None, just the system directory is returned.
- "roaming" (boolean, default False) can be set True to use the Windows
- roaming appdata directory. That means that for users on a Windows
- network setup for roaming profiles, this user data will be
- sync'd on login. See
-
- for a discussion of issues.
-
- Typical user data directories are:
- macOS: ~/Library/Application Support/
- if it exists, else ~/.config/
- Unix: ~/.local/share/ # or in
- $XDG_DATA_HOME, if defined
- Win XP (not roaming): C:\Documents and Settings\\ ...
- ...Application Data\
- Win XP (roaming): C:\Documents and Settings\\Local ...
- ...Settings\Application Data\
- Win 7 (not roaming): C:\\Users\\AppData\Local\
- Win 7 (roaming): C:\\Users\\AppData\Roaming\
-
- For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
- That means, by default "~/.local/share/".
- """
- if WINDOWS:
- const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
- path = os.path.join(os.path.normpath(_get_win_folder(const)), appname)
- elif sys.platform == "darwin":
- path = os.path.join(
- expanduser('~/Library/Application Support/'),
- appname,
- ) if os.path.isdir(os.path.join(
- expanduser('~/Library/Application Support/'),
- appname,
- )
- ) else os.path.join(
- expanduser('~/.config/'),
- appname,
- )
- else:
- path = os.path.join(
- os.getenv('XDG_DATA_HOME', expanduser("~/.local/share")),
- appname,
- )
-
- return path
-
-
-def user_config_dir(appname, roaming=True):
- """Return full path to the user-specific config dir for this application.
-
- "appname" is the name of application.
- If None, just the system directory is returned.
- "roaming" (boolean, default True) can be set False to not use the
- Windows roaming appdata directory. That means that for users on a
- Windows network setup for roaming profiles, this user data will be
- sync'd on login. See
-
- for a discussion of issues.
-
- Typical user data directories are:
- macOS: same as user_data_dir
- Unix: ~/.config/
- Win *: same as user_data_dir
-
- For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
- That means, by default "~/.config/".
- """
- if WINDOWS:
- path = user_data_dir(appname, roaming=roaming)
- elif sys.platform == "darwin":
- path = user_data_dir(appname)
- else:
- path = os.getenv('XDG_CONFIG_HOME', expanduser("~/.config"))
- path = os.path.join(path, appname)
-
- return path
-
-
-# for the discussion regarding site_config_dirs locations
-# see
-def site_config_dirs(appname):
- r"""Return a list of potential user-shared config dirs for this application.
-
- "appname" is the name of application.
-
- Typical user config directories are:
- macOS: /Library/Application Support//
- Unix: /etc or $XDG_CONFIG_DIRS[i]// for each value in
- $XDG_CONFIG_DIRS
- Win XP: C:\Documents and Settings\All Users\Application ...
- ...Data\\
- Vista: (Fail! "C:\ProgramData" is a hidden *system* directory
- on Vista.)
- Win 7: Hidden, but writeable on Win 7:
- C:\ProgramData\\
- """
- if WINDOWS:
- path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
- pathlist = [os.path.join(path, appname)]
- elif sys.platform == 'darwin':
- pathlist = [os.path.join('/Library/Application Support', appname)]
- else:
- # try looking in $XDG_CONFIG_DIRS
- xdg_config_dirs = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
- if xdg_config_dirs:
- pathlist = [
- os.path.join(expanduser(x), appname)
- for x in xdg_config_dirs.split(os.pathsep)
- ]
- else:
- pathlist = []
-
- # always look in /etc directly as well
- pathlist.append('/etc')
-
- return pathlist
-
-
-# -- Windows support functions --
-
-def _get_win_folder_from_registry(csidl_name):
- """
- This is a fallback technique at best. I'm not sure if using the
- registry for this guarantees us the correct answer for all CSIDL_*
- names.
- """
- import _winreg
-
- shell_folder_name = {
- "CSIDL_APPDATA": "AppData",
- "CSIDL_COMMON_APPDATA": "Common AppData",
- "CSIDL_LOCAL_APPDATA": "Local AppData",
- }[csidl_name]
-
- key = _winreg.OpenKey(
- _winreg.HKEY_CURRENT_USER,
- r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
- )
- directory, _type = _winreg.QueryValueEx(key, shell_folder_name)
- return directory
-
-
-def _get_win_folder_with_ctypes(csidl_name):
- csidl_const = {
- "CSIDL_APPDATA": 26,
- "CSIDL_COMMON_APPDATA": 35,
- "CSIDL_LOCAL_APPDATA": 28,
- }[csidl_name]
-
- buf = ctypes.create_unicode_buffer(1024)
- ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
-
- # Downgrade to short path name if have highbit chars. See
- # .
- has_high_char = False
- for c in buf:
- if ord(c) > 255:
- has_high_char = True
- break
- if has_high_char:
- buf2 = ctypes.create_unicode_buffer(1024)
- if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
- buf = buf2
-
- return buf.value
-
-
-if WINDOWS:
- try:
- import ctypes
- _get_win_folder = _get_win_folder_with_ctypes
- except ImportError:
- _get_win_folder = _get_win_folder_from_registry
-
-
-def _win_path_to_bytes(path):
- """Encode Windows paths to bytes. Only used on Python 2.
-
- Motivation is to be consistent with other operating systems where paths
- are also returned as bytes. This avoids problems mixing bytes and Unicode
- elsewhere in the codebase. For more details and discussion see
- .
-
- If encoding using ASCII and MBCS fails, return the original Unicode path.
- """
- for encoding in ('ASCII', 'MBCS'):
- try:
- return path.encode(encoding)
- except (UnicodeEncodeError, LookupError):
- pass
- return path
+"""
+This code was taken from https://github.com/ActiveState/appdirs and modified
+to suit our purposes.
+"""
+from __future__ import absolute_import
+
+import os
+import sys
+
+from pip._vendor.six import PY2, text_type
+
+from pip._internal.compat import WINDOWS, expanduser
+
+
+def user_cache_dir(appname):
+ r"""
+ Return full path to the user-specific cache dir for this application.
+
+ "appname" is the name of application.
+
+ Typical user cache directories are:
+ macOS: ~/Library/Caches/
+ Unix: ~/.cache/ (XDG default)
+ Windows: C:\Users\\AppData\Local\\Cache
+
+ On Windows the only suggestion in the MSDN docs is that local settings go
+ in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the
+ non-roaming app data dir (the default returned by `user_data_dir`). Apps
+ typically put cache data somewhere *under* the given dir here. Some
+ examples:
+ ...\Mozilla\Firefox\Profiles\\Cache
+ ...\Acme\SuperApp\Cache\1.0
+
+ OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
+ """
+ if WINDOWS:
+ # Get the base path
+ path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
+
+ # When using Python 2, return paths as bytes on Windows like we do on
+ # other operating systems. See helper function docs for more details.
+ if PY2 and isinstance(path, text_type):
+ path = _win_path_to_bytes(path)
+
+ # Add our app name and Cache directory to it
+ path = os.path.join(path, appname, "Cache")
+ elif sys.platform == "darwin":
+ # Get the base path
+ path = expanduser("~/Library/Caches")
+
+ # Add our app name to it
+ path = os.path.join(path, appname)
+ else:
+ # Get the base path
+ path = os.getenv("XDG_CACHE_HOME", expanduser("~/.cache"))
+
+ # Add our app name to it
+ path = os.path.join(path, appname)
+
+ return path
+
+
+def user_data_dir(appname, roaming=False):
+ r"""
+ Return full path to the user-specific data dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "roaming" (boolean, default False) can be set True to use the Windows
+ roaming appdata directory. That means that for users on a Windows
+ network setup for roaming profiles, this user data will be
+ sync'd on login. See
+
+ for a discussion of issues.
+
+ Typical user data directories are:
+ macOS: ~/Library/Application Support/
+ if it exists, else ~/.config/
+ Unix: ~/.local/share/ # or in
+ $XDG_DATA_HOME, if defined
+ Win XP (not roaming): C:\Documents and Settings\\ ...
+ ...Application Data\
+ Win XP (roaming): C:\Documents and Settings\\Local ...
+ ...Settings\Application Data\
+ Win 7 (not roaming): C:\\Users\\AppData\Local\
+ Win 7 (roaming): C:\\Users\\AppData\Roaming\
+
+ For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
+ That means, by default "~/.local/share/".
+ """
+ if WINDOWS:
+ const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
+ path = os.path.join(os.path.normpath(_get_win_folder(const)), appname)
+ elif sys.platform == "darwin":
+ path = os.path.join(
+ expanduser('~/Library/Application Support/'),
+ appname,
+ ) if os.path.isdir(os.path.join(
+ expanduser('~/Library/Application Support/'),
+ appname,
+ )
+ ) else os.path.join(
+ expanduser('~/.config/'),
+ appname,
+ )
+ else:
+ path = os.path.join(
+ os.getenv('XDG_DATA_HOME', expanduser("~/.local/share")),
+ appname,
+ )
+
+ return path
+
+
+def user_config_dir(appname, roaming=True):
+ """Return full path to the user-specific config dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "roaming" (boolean, default True) can be set False to not use the
+ Windows roaming appdata directory. That means that for users on a
+ Windows network setup for roaming profiles, this user data will be
+ sync'd on login. See
+
+ for a discussion of issues.
+
+ Typical user data directories are:
+ macOS: same as user_data_dir
+ Unix: ~/.config/
+ Win *: same as user_data_dir
+
+ For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
+ That means, by default "~/.config/".
+ """
+ if WINDOWS:
+ path = user_data_dir(appname, roaming=roaming)
+ elif sys.platform == "darwin":
+ path = user_data_dir(appname)
+ else:
+ path = os.getenv('XDG_CONFIG_HOME', expanduser("~/.config"))
+ path = os.path.join(path, appname)
+
+ return path
+
+
+# for the discussion regarding site_config_dirs locations
+# see
+def site_config_dirs(appname):
+ r"""Return a list of potential user-shared config dirs for this application.
+
+ "appname" is the name of application.
+
+ Typical user config directories are:
+ macOS: /Library/Application Support//
+ Unix: /etc or $XDG_CONFIG_DIRS[i]// for each value in
+ $XDG_CONFIG_DIRS
+ Win XP: C:\Documents and Settings\All Users\Application ...
+ ...Data\\
+ Vista: (Fail! "C:\ProgramData" is a hidden *system* directory
+ on Vista.)
+ Win 7: Hidden, but writeable on Win 7:
+ C:\ProgramData\\
+ """
+ if WINDOWS:
+ path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
+ pathlist = [os.path.join(path, appname)]
+ elif sys.platform == 'darwin':
+ pathlist = [os.path.join('/Library/Application Support', appname)]
+ else:
+ # try looking in $XDG_CONFIG_DIRS
+ xdg_config_dirs = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
+ if xdg_config_dirs:
+ pathlist = [
+ os.path.join(expanduser(x), appname)
+ for x in xdg_config_dirs.split(os.pathsep)
+ ]
+ else:
+ pathlist = []
+
+ # always look in /etc directly as well
+ pathlist.append('/etc')
+
+ return pathlist
+
+
+# -- Windows support functions --
+
+def _get_win_folder_from_registry(csidl_name):
+ """
+ This is a fallback technique at best. I'm not sure if using the
+ registry for this guarantees us the correct answer for all CSIDL_*
+ names.
+ """
+ import _winreg
+
+ shell_folder_name = {
+ "CSIDL_APPDATA": "AppData",
+ "CSIDL_COMMON_APPDATA": "Common AppData",
+ "CSIDL_LOCAL_APPDATA": "Local AppData",
+ }[csidl_name]
+
+ key = _winreg.OpenKey(
+ _winreg.HKEY_CURRENT_USER,
+ r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
+ )
+ directory, _type = _winreg.QueryValueEx(key, shell_folder_name)
+ return directory
+
+
+def _get_win_folder_with_ctypes(csidl_name):
+ csidl_const = {
+ "CSIDL_APPDATA": 26,
+ "CSIDL_COMMON_APPDATA": 35,
+ "CSIDL_LOCAL_APPDATA": 28,
+ }[csidl_name]
+
+ buf = ctypes.create_unicode_buffer(1024)
+ ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
+
+ # Downgrade to short path name if have highbit chars. See
+ # .
+ has_high_char = False
+ for c in buf:
+ if ord(c) > 255:
+ has_high_char = True
+ break
+ if has_high_char:
+ buf2 = ctypes.create_unicode_buffer(1024)
+ if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
+ buf = buf2
+
+ return buf.value
+
+
+if WINDOWS:
+ try:
+ import ctypes
+ _get_win_folder = _get_win_folder_with_ctypes
+ except ImportError:
+ _get_win_folder = _get_win_folder_from_registry
+
+
+def _win_path_to_bytes(path):
+ """Encode Windows paths to bytes. Only used on Python 2.
+
+ Motivation is to be consistent with other operating systems where paths
+ are also returned as bytes. This avoids problems mixing bytes and Unicode
+ elsewhere in the codebase. For more details and discussion see
+ .
+
+ If encoding using ASCII and MBCS fails, return the original Unicode path.
+ """
+ for encoding in ('ASCII', 'MBCS'):
+ try:
+ return path.encode(encoding)
+ except (UnicodeEncodeError, LookupError):
+ pass
+ return path
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/deprecation.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/deprecation.py
index a907172..c0e3884 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/deprecation.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/deprecation.py
@@ -1,77 +1,77 @@
-"""
-A module that implements tooling to enable easy warnings about deprecations.
-"""
-from __future__ import absolute_import
-
-import logging
-import warnings
-
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import Any
-
-
-class PipDeprecationWarning(Warning):
- pass
-
-
-class Pending(object):
- pass
-
-
-class RemovedInPip11Warning(PipDeprecationWarning):
- pass
-
-
-class RemovedInPip12Warning(PipDeprecationWarning, Pending):
- pass
-
-
-# Warnings <-> Logging Integration
-
-
-_warnings_showwarning = None # type: Any
-
-
-def _showwarning(message, category, filename, lineno, file=None, line=None):
- if file is not None:
- if _warnings_showwarning is not None:
- _warnings_showwarning(
- message, category, filename, lineno, file, line,
- )
- else:
- if issubclass(category, PipDeprecationWarning):
- # We use a specially named logger which will handle all of the
- # deprecation messages for pip.
- logger = logging.getLogger("pip._internal.deprecations")
-
- # This is purposely using the % formatter here instead of letting
- # the logging module handle the interpolation. This is because we
- # want it to appear as if someone typed this entire message out.
- log_message = "DEPRECATION: %s" % message
-
- # PipDeprecationWarnings that are Pending still have at least 2
- # versions to go until they are removed so they can just be
- # warnings. Otherwise, they will be removed in the very next
- # version of pip. We want these to be more obvious so we use the
- # ERROR logging level.
- if issubclass(category, Pending):
- logger.warning(log_message)
- else:
- logger.error(log_message)
- else:
- _warnings_showwarning(
- message, category, filename, lineno, file, line,
- )
-
-
-def install_warning_logger():
- # Enable our Deprecation Warnings
- warnings.simplefilter("default", PipDeprecationWarning, append=True)
-
- global _warnings_showwarning
-
- if _warnings_showwarning is None:
- _warnings_showwarning = warnings.showwarning
- warnings.showwarning = _showwarning
+"""
+A module that implements tooling to enable easy warnings about deprecations.
+"""
+from __future__ import absolute_import
+
+import logging
+import warnings
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Any
+
+
+class PipDeprecationWarning(Warning):
+ pass
+
+
+class Pending(object):
+ pass
+
+
+class RemovedInPip11Warning(PipDeprecationWarning):
+ pass
+
+
+class RemovedInPip12Warning(PipDeprecationWarning, Pending):
+ pass
+
+
+# Warnings <-> Logging Integration
+
+
+_warnings_showwarning = None # type: Any
+
+
+def _showwarning(message, category, filename, lineno, file=None, line=None):
+ if file is not None:
+ if _warnings_showwarning is not None:
+ _warnings_showwarning(
+ message, category, filename, lineno, file, line,
+ )
+ else:
+ if issubclass(category, PipDeprecationWarning):
+ # We use a specially named logger which will handle all of the
+ # deprecation messages for pip.
+ logger = logging.getLogger("pip._internal.deprecations")
+
+ # This is purposely using the % formatter here instead of letting
+ # the logging module handle the interpolation. This is because we
+ # want it to appear as if someone typed this entire message out.
+ log_message = "DEPRECATION: %s" % message
+
+ # PipDeprecationWarnings that are Pending still have at least 2
+ # versions to go until they are removed so they can just be
+ # warnings. Otherwise, they will be removed in the very next
+ # version of pip. We want these to be more obvious so we use the
+ # ERROR logging level.
+ if issubclass(category, Pending):
+ logger.warning(log_message)
+ else:
+ logger.error(log_message)
+ else:
+ _warnings_showwarning(
+ message, category, filename, lineno, file, line,
+ )
+
+
+def install_warning_logger():
+ # Enable our Deprecation Warnings
+ warnings.simplefilter("default", PipDeprecationWarning, append=True)
+
+ global _warnings_showwarning
+
+ if _warnings_showwarning is None:
+ _warnings_showwarning = warnings.showwarning
+ warnings.showwarning = _showwarning
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/encoding.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/encoding.py
index 56f6036..831f3f6 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/encoding.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/encoding.py
@@ -1,33 +1,33 @@
-import codecs
-import locale
-import re
-import sys
-
-BOMS = [
- (codecs.BOM_UTF8, 'utf8'),
- (codecs.BOM_UTF16, 'utf16'),
- (codecs.BOM_UTF16_BE, 'utf16-be'),
- (codecs.BOM_UTF16_LE, 'utf16-le'),
- (codecs.BOM_UTF32, 'utf32'),
- (codecs.BOM_UTF32_BE, 'utf32-be'),
- (codecs.BOM_UTF32_LE, 'utf32-le'),
-]
-
-ENCODING_RE = re.compile(br'coding[:=]\s*([-\w.]+)')
-
-
-def auto_decode(data):
- """Check a bytes string for a BOM to correctly detect the encoding
-
- Fallback to locale.getpreferredencoding(False) like open() on Python3"""
- for bom, encoding in BOMS:
- if data.startswith(bom):
- return data[len(bom):].decode(encoding)
- # Lets check the first two lines as in PEP263
- for line in data.split(b'\n')[:2]:
- if line[0:1] == b'#' and ENCODING_RE.search(line):
- encoding = ENCODING_RE.search(line).groups()[0].decode('ascii')
- return data.decode(encoding)
- return data.decode(
- locale.getpreferredencoding(False) or sys.getdefaultencoding(),
- )
+import codecs
+import locale
+import re
+import sys
+
+BOMS = [
+ (codecs.BOM_UTF8, 'utf8'),
+ (codecs.BOM_UTF16, 'utf16'),
+ (codecs.BOM_UTF16_BE, 'utf16-be'),
+ (codecs.BOM_UTF16_LE, 'utf16-le'),
+ (codecs.BOM_UTF32, 'utf32'),
+ (codecs.BOM_UTF32_BE, 'utf32-be'),
+ (codecs.BOM_UTF32_LE, 'utf32-le'),
+]
+
+ENCODING_RE = re.compile(br'coding[:=]\s*([-\w.]+)')
+
+
+def auto_decode(data):
+ """Check a bytes string for a BOM to correctly detect the encoding
+
+ Fallback to locale.getpreferredencoding(False) like open() on Python3"""
+ for bom, encoding in BOMS:
+ if data.startswith(bom):
+ return data[len(bom):].decode(encoding)
+ # Lets check the first two lines as in PEP263
+ for line in data.split(b'\n')[:2]:
+ if line[0:1] == b'#' and ENCODING_RE.search(line):
+ encoding = ENCODING_RE.search(line).groups()[0].decode('ascii')
+ return data.decode(encoding)
+ return data.decode(
+ locale.getpreferredencoding(False) or sys.getdefaultencoding(),
+ )
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/filesystem.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/filesystem.py
index ee45501..94fa2c6 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/filesystem.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/filesystem.py
@@ -1,28 +1,28 @@
-import os
-import os.path
-
-from pip._internal.compat import get_path_uid
-
-
-def check_path_owner(path):
- # If we don't have a way to check the effective uid of this process, then
- # we'll just assume that we own the directory.
- if not hasattr(os, "geteuid"):
- return True
-
- previous = None
- while path != previous:
- if os.path.lexists(path):
- # Check if path is writable by current user.
- if os.geteuid() == 0:
- # Special handling for root user in order to handle properly
- # cases where users use sudo without -H flag.
- try:
- path_uid = get_path_uid(path)
- except OSError:
- return False
- return path_uid == 0
- else:
- return os.access(path, os.W_OK)
- else:
- previous, path = path, os.path.dirname(path)
+import os
+import os.path
+
+from pip._internal.compat import get_path_uid
+
+
+def check_path_owner(path):
+ # If we don't have a way to check the effective uid of this process, then
+ # we'll just assume that we own the directory.
+ if not hasattr(os, "geteuid"):
+ return True
+
+ previous = None
+ while path != previous:
+ if os.path.lexists(path):
+ # Check if path is writable by current user.
+ if os.geteuid() == 0:
+ # Special handling for root user in order to handle properly
+ # cases where users use sudo without -H flag.
+ try:
+ path_uid = get_path_uid(path)
+ except OSError:
+ return False
+ return path_uid == 0
+ else:
+ return os.access(path, os.W_OK)
+ else:
+ previous, path = path, os.path.dirname(path)
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/glibc.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/glibc.py
index ebcfc5b..5900a10 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/glibc.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/glibc.py
@@ -1,84 +1,84 @@
-from __future__ import absolute_import
-
-import ctypes
-import re
-import warnings
-
-
-def glibc_version_string():
- "Returns glibc version string, or None if not using glibc."
-
- # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
- # manpage says, "If filename is NULL, then the returned handle is for the
- # main program". This way we can let the linker do the work to figure out
- # which libc our process is actually using.
- process_namespace = ctypes.CDLL(None)
- try:
- gnu_get_libc_version = process_namespace.gnu_get_libc_version
- except AttributeError:
- # Symbol doesn't exist -> therefore, we are not linked to
- # glibc.
- return None
-
- # Call gnu_get_libc_version, which returns a string like "2.5"
- gnu_get_libc_version.restype = ctypes.c_char_p
- version_str = gnu_get_libc_version()
- # py2 / py3 compatibility:
- if not isinstance(version_str, str):
- version_str = version_str.decode("ascii")
-
- return version_str
-
-
-# Separated out from have_compatible_glibc for easier unit testing
-def check_glibc_version(version_str, required_major, minimum_minor):
- # Parse string and check against requested version.
- #
- # We use a regexp instead of str.split because we want to discard any
- # random junk that might come after the minor version -- this might happen
- # in patched/forked versions of glibc (e.g. Linaro's version of glibc
- # uses version strings like "2.20-2014.11"). See gh-3588.
- m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str)
- if not m:
- warnings.warn("Expected glibc version with 2 components major.minor,"
- " got: %s" % version_str, RuntimeWarning)
- return False
- return (int(m.group("major")) == required_major and
- int(m.group("minor")) >= minimum_minor)
-
-
-def have_compatible_glibc(required_major, minimum_minor):
- version_str = glibc_version_string()
- if version_str is None:
- return False
- return check_glibc_version(version_str, required_major, minimum_minor)
-
-
-# platform.libc_ver regularly returns completely nonsensical glibc
-# versions. E.g. on my computer, platform says:
-#
-# ~$ python2.7 -c 'import platform; print(platform.libc_ver())'
-# ('glibc', '2.7')
-# ~$ python3.5 -c 'import platform; print(platform.libc_ver())'
-# ('glibc', '2.9')
-#
-# But the truth is:
-#
-# ~$ ldd --version
-# ldd (Debian GLIBC 2.22-11) 2.22
-#
-# This is unfortunate, because it means that the linehaul data on libc
-# versions that was generated by pip 8.1.2 and earlier is useless and
-# misleading. Solution: instead of using platform, use our code that actually
-# works.
-def libc_ver():
- """Try to determine the glibc version
-
- Returns a tuple of strings (lib, version) which default to empty strings
- in case the lookup fails.
- """
- glibc_version = glibc_version_string()
- if glibc_version is None:
- return ("", "")
- else:
- return ("glibc", glibc_version)
+from __future__ import absolute_import
+
+import ctypes
+import re
+import warnings
+
+
+def glibc_version_string():
+ "Returns glibc version string, or None if not using glibc."
+
+ # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
+ # manpage says, "If filename is NULL, then the returned handle is for the
+ # main program". This way we can let the linker do the work to figure out
+ # which libc our process is actually using.
+ process_namespace = ctypes.CDLL(None)
+ try:
+ gnu_get_libc_version = process_namespace.gnu_get_libc_version
+ except AttributeError:
+ # Symbol doesn't exist -> therefore, we are not linked to
+ # glibc.
+ return None
+
+ # Call gnu_get_libc_version, which returns a string like "2.5"
+ gnu_get_libc_version.restype = ctypes.c_char_p
+ version_str = gnu_get_libc_version()
+ # py2 / py3 compatibility:
+ if not isinstance(version_str, str):
+ version_str = version_str.decode("ascii")
+
+ return version_str
+
+
+# Separated out from have_compatible_glibc for easier unit testing
+def check_glibc_version(version_str, required_major, minimum_minor):
+ # Parse string and check against requested version.
+ #
+ # We use a regexp instead of str.split because we want to discard any
+ # random junk that might come after the minor version -- this might happen
+ # in patched/forked versions of glibc (e.g. Linaro's version of glibc
+ # uses version strings like "2.20-2014.11"). See gh-3588.
+ m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str)
+ if not m:
+ warnings.warn("Expected glibc version with 2 components major.minor,"
+ " got: %s" % version_str, RuntimeWarning)
+ return False
+ return (int(m.group("major")) == required_major and
+ int(m.group("minor")) >= minimum_minor)
+
+
+def have_compatible_glibc(required_major, minimum_minor):
+ version_str = glibc_version_string()
+ if version_str is None:
+ return False
+ return check_glibc_version(version_str, required_major, minimum_minor)
+
+
+# platform.libc_ver regularly returns completely nonsensical glibc
+# versions. E.g. on my computer, platform says:
+#
+# ~$ python2.7 -c 'import platform; print(platform.libc_ver())'
+# ('glibc', '2.7')
+# ~$ python3.5 -c 'import platform; print(platform.libc_ver())'
+# ('glibc', '2.9')
+#
+# But the truth is:
+#
+# ~$ ldd --version
+# ldd (Debian GLIBC 2.22-11) 2.22
+#
+# This is unfortunate, because it means that the linehaul data on libc
+# versions that was generated by pip 8.1.2 and earlier is useless and
+# misleading. Solution: instead of using platform, use our code that actually
+# works.
+def libc_ver():
+ """Try to determine the glibc version
+
+ Returns a tuple of strings (lib, version) which default to empty strings
+ in case the lookup fails.
+ """
+ glibc_version = glibc_version_string()
+ if glibc_version is None:
+ return ("", "")
+ else:
+ return ("glibc", glibc_version)
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/hashes.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/hashes.py
index 8b909ba..8cf6367 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/hashes.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/hashes.py
@@ -1,94 +1,94 @@
-from __future__ import absolute_import
-
-import hashlib
-
-from pip._vendor.six import iteritems, iterkeys, itervalues
-
-from pip._internal.exceptions import (
- HashMismatch, HashMissing, InstallationError,
-)
-from pip._internal.utils.misc import read_chunks
-
-# The recommended hash algo of the moment. Change this whenever the state of
-# the art changes; it won't hurt backward compatibility.
-FAVORITE_HASH = 'sha256'
-
-
-# Names of hashlib algorithms allowed by the --hash option and ``pip hash``
-# Currently, those are the ones at least as collision-resistant as sha256.
-STRONG_HASHES = ['sha256', 'sha384', 'sha512']
-
-
-class Hashes(object):
- """A wrapper that builds multiple hashes at once and checks them against
- known-good values
-
- """
- def __init__(self, hashes=None):
- """
- :param hashes: A dict of algorithm names pointing to lists of allowed
- hex digests
- """
- self._allowed = {} if hashes is None else hashes
-
- def check_against_chunks(self, chunks):
- """Check good hashes against ones built from iterable of chunks of
- data.
-
- Raise HashMismatch if none match.
-
- """
- gots = {}
- for hash_name in iterkeys(self._allowed):
- try:
- gots[hash_name] = hashlib.new(hash_name)
- except (ValueError, TypeError):
- raise InstallationError('Unknown hash name: %s' % hash_name)
-
- for chunk in chunks:
- for hash in itervalues(gots):
- hash.update(chunk)
-
- for hash_name, got in iteritems(gots):
- if got.hexdigest() in self._allowed[hash_name]:
- return
- self._raise(gots)
-
- def _raise(self, gots):
- raise HashMismatch(self._allowed, gots)
-
- def check_against_file(self, file):
- """Check good hashes against a file-like object
-
- Raise HashMismatch if none match.
-
- """
- return self.check_against_chunks(read_chunks(file))
-
- def check_against_path(self, path):
- with open(path, 'rb') as file:
- return self.check_against_file(file)
-
- def __nonzero__(self):
- """Return whether I know any known-good hashes."""
- return bool(self._allowed)
-
- def __bool__(self):
- return self.__nonzero__()
-
-
-class MissingHashes(Hashes):
- """A workalike for Hashes used when we're missing a hash for a requirement
-
- It computes the actual hash of the requirement and raises a HashMissing
- exception showing it to the user.
-
- """
- def __init__(self):
- """Don't offer the ``hashes`` kwarg."""
- # Pass our favorite hash in to generate a "gotten hash". With the
- # empty list, it will never match, so an error will always raise.
- super(MissingHashes, self).__init__(hashes={FAVORITE_HASH: []})
-
- def _raise(self, gots):
- raise HashMissing(gots[FAVORITE_HASH].hexdigest())
+from __future__ import absolute_import
+
+import hashlib
+
+from pip._vendor.six import iteritems, iterkeys, itervalues
+
+from pip._internal.exceptions import (
+ HashMismatch, HashMissing, InstallationError,
+)
+from pip._internal.utils.misc import read_chunks
+
+# The recommended hash algo of the moment. Change this whenever the state of
+# the art changes; it won't hurt backward compatibility.
+FAVORITE_HASH = 'sha256'
+
+
+# Names of hashlib algorithms allowed by the --hash option and ``pip hash``
+# Currently, those are the ones at least as collision-resistant as sha256.
+STRONG_HASHES = ['sha256', 'sha384', 'sha512']
+
+
+class Hashes(object):
+ """A wrapper that builds multiple hashes at once and checks them against
+ known-good values
+
+ """
+ def __init__(self, hashes=None):
+ """
+ :param hashes: A dict of algorithm names pointing to lists of allowed
+ hex digests
+ """
+ self._allowed = {} if hashes is None else hashes
+
+ def check_against_chunks(self, chunks):
+ """Check good hashes against ones built from iterable of chunks of
+ data.
+
+ Raise HashMismatch if none match.
+
+ """
+ gots = {}
+ for hash_name in iterkeys(self._allowed):
+ try:
+ gots[hash_name] = hashlib.new(hash_name)
+ except (ValueError, TypeError):
+ raise InstallationError('Unknown hash name: %s' % hash_name)
+
+ for chunk in chunks:
+ for hash in itervalues(gots):
+ hash.update(chunk)
+
+ for hash_name, got in iteritems(gots):
+ if got.hexdigest() in self._allowed[hash_name]:
+ return
+ self._raise(gots)
+
+ def _raise(self, gots):
+ raise HashMismatch(self._allowed, gots)
+
+ def check_against_file(self, file):
+ """Check good hashes against a file-like object
+
+ Raise HashMismatch if none match.
+
+ """
+ return self.check_against_chunks(read_chunks(file))
+
+ def check_against_path(self, path):
+ with open(path, 'rb') as file:
+ return self.check_against_file(file)
+
+ def __nonzero__(self):
+ """Return whether I know any known-good hashes."""
+ return bool(self._allowed)
+
+ def __bool__(self):
+ return self.__nonzero__()
+
+
+class MissingHashes(Hashes):
+ """A workalike for Hashes used when we're missing a hash for a requirement
+
+ It computes the actual hash of the requirement and raises a HashMissing
+ exception showing it to the user.
+
+ """
+ def __init__(self):
+ """Don't offer the ``hashes`` kwarg."""
+ # Pass our favorite hash in to generate a "gotten hash". With the
+ # empty list, it will never match, so an error will always raise.
+ super(MissingHashes, self).__init__(hashes={FAVORITE_HASH: []})
+
+ def _raise(self, gots):
+ raise HashMissing(gots[FAVORITE_HASH].hexdigest())
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/logging.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/logging.py
index 5a5a7d7..1fb3e8a 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/logging.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/logging.py
@@ -1,132 +1,132 @@
-from __future__ import absolute_import
-
-import contextlib
-import logging
-import logging.handlers
-import os
-
-from pip._internal.compat import WINDOWS
-from pip._internal.utils.misc import ensure_dir
-
-try:
- import threading
-except ImportError:
- import dummy_threading as threading # type: ignore
-
-
-try:
- from pip._vendor import colorama
-# Lots of different errors can come from this, including SystemError and
-# ImportError.
-except Exception:
- colorama = None
-
-
-_log_state = threading.local()
-_log_state.indentation = 0
-
-
-@contextlib.contextmanager
-def indent_log(num=2):
- """
- A context manager which will cause the log output to be indented for any
- log messages emitted inside it.
- """
- _log_state.indentation += num
- try:
- yield
- finally:
- _log_state.indentation -= num
-
-
-def get_indentation():
- return getattr(_log_state, 'indentation', 0)
-
-
-class IndentingFormatter(logging.Formatter):
-
- def format(self, record):
- """
- Calls the standard formatter, but will indent all of the log messages
- by our current indentation level.
- """
- formatted = logging.Formatter.format(self, record)
- formatted = "".join([
- (" " * get_indentation()) + line
- for line in formatted.splitlines(True)
- ])
- return formatted
-
-
-def _color_wrap(*colors):
- def wrapped(inp):
- return "".join(list(colors) + [inp, colorama.Style.RESET_ALL])
- return wrapped
-
-
-class ColorizedStreamHandler(logging.StreamHandler):
-
- # Don't build up a list of colors if we don't have colorama
- if colorama:
- COLORS = [
- # This needs to be in order from highest logging level to lowest.
- (logging.ERROR, _color_wrap(colorama.Fore.RED)),
- (logging.WARNING, _color_wrap(colorama.Fore.YELLOW)),
- ]
- else:
- COLORS = []
-
- def __init__(self, stream=None, no_color=None):
- logging.StreamHandler.__init__(self, stream)
- self._no_color = no_color
-
- if WINDOWS and colorama:
- self.stream = colorama.AnsiToWin32(self.stream)
-
- def should_color(self):
- # Don't colorize things if we do not have colorama or if told not to
- if not colorama or self._no_color:
- return False
-
- real_stream = (
- self.stream if not isinstance(self.stream, colorama.AnsiToWin32)
- else self.stream.wrapped
- )
-
- # If the stream is a tty we should color it
- if hasattr(real_stream, "isatty") and real_stream.isatty():
- return True
-
- # If we have an ASNI term we should color it
- if os.environ.get("TERM") == "ANSI":
- return True
-
- # If anything else we should not color it
- return False
-
- def format(self, record):
- msg = logging.StreamHandler.format(self, record)
-
- if self.should_color():
- for level, color in self.COLORS:
- if record.levelno >= level:
- msg = color(msg)
- break
-
- return msg
-
-
-class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler):
-
- def _open(self):
- ensure_dir(os.path.dirname(self.baseFilename))
- return logging.handlers.RotatingFileHandler._open(self)
-
-
-class MaxLevelFilter(logging.Filter):
-
- def __init__(self, level):
- self.level = level
-
- def filter(self, record):
- return record.levelno < self.level
+from __future__ import absolute_import
+
+import contextlib
+import logging
+import logging.handlers
+import os
+
+from pip._internal.compat import WINDOWS
+from pip._internal.utils.misc import ensure_dir
+
+try:
+ import threading
+except ImportError:
+ import dummy_threading as threading # type: ignore
+
+
+try:
+ from pip._vendor import colorama
+# Lots of different errors can come from this, including SystemError and
+# ImportError.
+except Exception:
+ colorama = None
+
+
+_log_state = threading.local()
+_log_state.indentation = 0
+
+
+@contextlib.contextmanager
+def indent_log(num=2):
+ """
+ A context manager which will cause the log output to be indented for any
+ log messages emitted inside it.
+ """
+ _log_state.indentation += num
+ try:
+ yield
+ finally:
+ _log_state.indentation -= num
+
+
+def get_indentation():
+ return getattr(_log_state, 'indentation', 0)
+
+
+class IndentingFormatter(logging.Formatter):
+
+ def format(self, record):
+ """
+ Calls the standard formatter, but will indent all of the log messages
+ by our current indentation level.
+ """
+ formatted = logging.Formatter.format(self, record)
+ formatted = "".join([
+ (" " * get_indentation()) + line
+ for line in formatted.splitlines(True)
+ ])
+ return formatted
+
+
+def _color_wrap(*colors):
+ def wrapped(inp):
+ return "".join(list(colors) + [inp, colorama.Style.RESET_ALL])
+ return wrapped
+
+
+class ColorizedStreamHandler(logging.StreamHandler):
+
+ # Don't build up a list of colors if we don't have colorama
+ if colorama:
+ COLORS = [
+ # This needs to be in order from highest logging level to lowest.
+ (logging.ERROR, _color_wrap(colorama.Fore.RED)),
+ (logging.WARNING, _color_wrap(colorama.Fore.YELLOW)),
+ ]
+ else:
+ COLORS = []
+
+ def __init__(self, stream=None, no_color=None):
+ logging.StreamHandler.__init__(self, stream)
+ self._no_color = no_color
+
+ if WINDOWS and colorama:
+ self.stream = colorama.AnsiToWin32(self.stream)
+
+ def should_color(self):
+ # Don't colorize things if we do not have colorama or if told not to
+ if not colorama or self._no_color:
+ return False
+
+ real_stream = (
+ self.stream if not isinstance(self.stream, colorama.AnsiToWin32)
+ else self.stream.wrapped
+ )
+
+ # If the stream is a tty we should color it
+ if hasattr(real_stream, "isatty") and real_stream.isatty():
+ return True
+
+ # If we have an ASNI term we should color it
+ if os.environ.get("TERM") == "ANSI":
+ return True
+
+ # If anything else we should not color it
+ return False
+
+ def format(self, record):
+ msg = logging.StreamHandler.format(self, record)
+
+ if self.should_color():
+ for level, color in self.COLORS:
+ if record.levelno >= level:
+ msg = color(msg)
+ break
+
+ return msg
+
+
+class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler):
+
+ def _open(self):
+ ensure_dir(os.path.dirname(self.baseFilename))
+ return logging.handlers.RotatingFileHandler._open(self)
+
+
+class MaxLevelFilter(logging.Filter):
+
+ def __init__(self, level):
+ self.level = level
+
+ def filter(self, record):
+ return record.levelno < self.level
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/misc.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/misc.py
index 9d4c9b1..db84a7c 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/misc.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/misc.py
@@ -1,851 +1,851 @@
-from __future__ import absolute_import
-
-import contextlib
-import errno
-import io
-import locale
-# we have a submodule named 'logging' which would shadow this if we used the
-# regular name:
-import logging as std_logging
-import os
-import posixpath
-import re
-import shutil
-import stat
-import subprocess
-import sys
-import tarfile
-import zipfile
-from collections import deque
-
-from pip._vendor import pkg_resources
-# NOTE: retrying is not annotated in typeshed as on 2017-07-17, which is
-# why we ignore the type on this import.
-from pip._vendor.retrying import retry # type: ignore
-from pip._vendor.six import PY2
-from pip._vendor.six.moves import input
-
-from pip._internal.compat import console_to_str, expanduser, stdlib_pkgs
-from pip._internal.exceptions import InstallationError
-from pip._internal.locations import (
- running_under_virtualenv, site_packages, user_site, virtualenv_no_global,
- write_delete_marker_file,
-)
-
-if PY2:
- from io import BytesIO as StringIO
-else:
- from io import StringIO
-
-__all__ = ['rmtree', 'display_path', 'backup_dir',
- 'ask', 'splitext',
- 'format_size', 'is_installable_dir',
- 'is_svn_page', 'file_contents',
- 'split_leading_dir', 'has_leading_dir',
- 'normalize_path',
- 'renames', 'get_prog',
- 'unzip_file', 'untar_file', 'unpack_file', 'call_subprocess',
- 'captured_stdout', 'ensure_dir',
- 'ARCHIVE_EXTENSIONS', 'SUPPORTED_EXTENSIONS',
- 'get_installed_version']
-
-
-logger = std_logging.getLogger(__name__)
-
-BZ2_EXTENSIONS = ('.tar.bz2', '.tbz')
-XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', '.tar.lz', '.tar.lzma')
-ZIP_EXTENSIONS = ('.zip', '.whl')
-TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar')
-ARCHIVE_EXTENSIONS = (
- ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS)
-SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS
-try:
- import bz2 # noqa
- SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS
-except ImportError:
- logger.debug('bz2 module is not available')
-
-try:
- # Only for Python 3.3+
- import lzma # noqa
- SUPPORTED_EXTENSIONS += XZ_EXTENSIONS
-except ImportError:
- logger.debug('lzma module is not available')
-
-
-def import_or_raise(pkg_or_module_string, ExceptionType, *args, **kwargs):
- try:
- return __import__(pkg_or_module_string)
- except ImportError:
- raise ExceptionType(*args, **kwargs)
-
-
-def ensure_dir(path):
- """os.path.makedirs without EEXIST."""
- try:
- os.makedirs(path)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
-
-
-def get_prog():
- try:
- prog = os.path.basename(sys.argv[0])
- if prog in ('__main__.py', '-c'):
- return "%s -m pip" % sys.executable
- else:
- return prog
- except (AttributeError, TypeError, IndexError):
- pass
- return 'pip'
-
-
-# Retry every half second for up to 3 seconds
-@retry(stop_max_delay=3000, wait_fixed=500)
-def rmtree(dir, ignore_errors=False):
- shutil.rmtree(dir, ignore_errors=ignore_errors,
- onerror=rmtree_errorhandler)
-
-
-def rmtree_errorhandler(func, path, exc_info):
- """On Windows, the files in .svn are read-only, so when rmtree() tries to
- remove them, an exception is thrown. We catch that here, remove the
- read-only attribute, and hopefully continue without problems."""
- # if file type currently read only
- if os.stat(path).st_mode & stat.S_IREAD:
- # convert to read/write
- os.chmod(path, stat.S_IWRITE)
- # use the original function to repeat the operation
- func(path)
- return
- else:
- raise
-
-
-def display_path(path):
- """Gives the display value for a given path, making it relative to cwd
- if possible."""
- path = os.path.normcase(os.path.abspath(path))
- if sys.version_info[0] == 2:
- path = path.decode(sys.getfilesystemencoding(), 'replace')
- path = path.encode(sys.getdefaultencoding(), 'replace')
- if path.startswith(os.getcwd() + os.path.sep):
- path = '.' + path[len(os.getcwd()):]
- return path
-
-
-def backup_dir(dir, ext='.bak'):
- """Figure out the name of a directory to back up the given dir to
- (adding .bak, .bak2, etc)"""
- n = 1
- extension = ext
- while os.path.exists(dir + extension):
- n += 1
- extension = ext + str(n)
- return dir + extension
-
-
-def ask_path_exists(message, options):
- for action in os.environ.get('PIP_EXISTS_ACTION', '').split():
- if action in options:
- return action
- return ask(message, options)
-
-
-def ask(message, options):
- """Ask the message interactively, with the given possible responses"""
- while 1:
- if os.environ.get('PIP_NO_INPUT'):
- raise Exception(
- 'No input was expected ($PIP_NO_INPUT set); question: %s' %
- message
- )
- response = input(message)
- response = response.strip().lower()
- if response not in options:
- print(
- 'Your response (%r) was not one of the expected responses: '
- '%s' % (response, ', '.join(options))
- )
- else:
- return response
-
-
-def format_size(bytes):
- if bytes > 1000 * 1000:
- return '%.1fMB' % (bytes / 1000.0 / 1000)
- elif bytes > 10 * 1000:
- return '%ikB' % (bytes / 1000)
- elif bytes > 1000:
- return '%.1fkB' % (bytes / 1000.0)
- else:
- return '%ibytes' % bytes
-
-
-def is_installable_dir(path):
- """Return True if `path` is a directory containing a setup.py file."""
- if not os.path.isdir(path):
- return False
- setup_py = os.path.join(path, 'setup.py')
- if os.path.isfile(setup_py):
- return True
- return False
-
-
-def is_svn_page(html):
- """
- Returns true if the page appears to be the index page of an svn repository
- """
- return (re.search(r'[^<]*Revision \d+:', html) and
- re.search(r'Powered by (?:]*?>)?Subversion', html, re.I))
-
-
-def file_contents(filename):
- with open(filename, 'rb') as fp:
- return fp.read().decode('utf-8')
-
-
-def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE):
- """Yield pieces of data from a file-like object until EOF."""
- while True:
- chunk = file.read(size)
- if not chunk:
- break
- yield chunk
-
-
-def split_leading_dir(path):
- path = path.lstrip('/').lstrip('\\')
- if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) or
- '\\' not in path):
- return path.split('/', 1)
- elif '\\' in path:
- return path.split('\\', 1)
- else:
- return path, ''
-
-
-def has_leading_dir(paths):
- """Returns true if all the paths have the same leading path name
- (i.e., everything is in one subdirectory in an archive)"""
- common_prefix = None
- for path in paths:
- prefix, rest = split_leading_dir(path)
- if not prefix:
- return False
- elif common_prefix is None:
- common_prefix = prefix
- elif prefix != common_prefix:
- return False
- return True
-
-
-def normalize_path(path, resolve_symlinks=True):
- """
- Convert a path to its canonical, case-normalized, absolute version.
-
- """
- path = expanduser(path)
- if resolve_symlinks:
- path = os.path.realpath(path)
- else:
- path = os.path.abspath(path)
- return os.path.normcase(path)
-
-
-def splitext(path):
- """Like os.path.splitext, but take off .tar too"""
- base, ext = posixpath.splitext(path)
- if base.lower().endswith('.tar'):
- ext = base[-4:] + ext
- base = base[:-4]
- return base, ext
-
-
-def renames(old, new):
- """Like os.renames(), but handles renaming across devices."""
- # Implementation borrowed from os.renames().
- head, tail = os.path.split(new)
- if head and tail and not os.path.exists(head):
- os.makedirs(head)
-
- shutil.move(old, new)
-
- head, tail = os.path.split(old)
- if head and tail:
- try:
- os.removedirs(head)
- except OSError:
- pass
-
-
-def is_local(path):
- """
- Return True if path is within sys.prefix, if we're running in a virtualenv.
-
- If we're not in a virtualenv, all paths are considered "local."
-
- """
- if not running_under_virtualenv():
- return True
- return normalize_path(path).startswith(normalize_path(sys.prefix))
-
-
-def dist_is_local(dist):
- """
- Return True if given Distribution object is installed locally
- (i.e. within current virtualenv).
-
- Always True if we're not in a virtualenv.
-
- """
- return is_local(dist_location(dist))
-
-
-def dist_in_usersite(dist):
- """
- Return True if given Distribution is installed in user site.
- """
- norm_path = normalize_path(dist_location(dist))
- return norm_path.startswith(normalize_path(user_site))
-
-
-def dist_in_site_packages(dist):
- """
- Return True if given Distribution is installed in
- sysconfig.get_python_lib().
- """
- return normalize_path(
- dist_location(dist)
- ).startswith(normalize_path(site_packages))
-
-
-def dist_is_editable(dist):
- """Is distribution an editable install?"""
- for path_item in sys.path:
- egg_link = os.path.join(path_item, dist.project_name + '.egg-link')
- if os.path.isfile(egg_link):
- return True
- return False
-
-
-def get_installed_distributions(local_only=True,
- skip=stdlib_pkgs,
- include_editables=True,
- editables_only=False,
- user_only=False):
- """
- Return a list of installed Distribution objects.
-
- If ``local_only`` is True (default), only return installations
- local to the current virtualenv, if in a virtualenv.
-
- ``skip`` argument is an iterable of lower-case project names to
- ignore; defaults to stdlib_pkgs
-
- If ``include_editables`` is False, don't report editables.
-
- If ``editables_only`` is True , only report editables.
-
- If ``user_only`` is True , only report installations in the user
- site directory.
-
- """
- if local_only:
- local_test = dist_is_local
- else:
- def local_test(d):
- return True
-
- if include_editables:
- def editable_test(d):
- return True
- else:
- def editable_test(d):
- return not dist_is_editable(d)
-
- if editables_only:
- def editables_only_test(d):
- return dist_is_editable(d)
- else:
- def editables_only_test(d):
- return True
-
- if user_only:
- user_test = dist_in_usersite
- else:
- def user_test(d):
- return True
-
- return [d for d in pkg_resources.working_set
- if local_test(d) and
- d.key not in skip and
- editable_test(d) and
- editables_only_test(d) and
- user_test(d)
- ]
-
-
-def egg_link_path(dist):
- """
- Return the path for the .egg-link file if it exists, otherwise, None.
-
- There's 3 scenarios:
- 1) not in a virtualenv
- try to find in site.USER_SITE, then site_packages
- 2) in a no-global virtualenv
- try to find in site_packages
- 3) in a yes-global virtualenv
- try to find in site_packages, then site.USER_SITE
- (don't look in global location)
-
- For #1 and #3, there could be odd cases, where there's an egg-link in 2
- locations.
-
- This method will just return the first one found.
- """
- sites = []
- if running_under_virtualenv():
- if virtualenv_no_global():
- sites.append(site_packages)
- else:
- sites.append(site_packages)
- if user_site:
- sites.append(user_site)
- else:
- if user_site:
- sites.append(user_site)
- sites.append(site_packages)
-
- for site in sites:
- egglink = os.path.join(site, dist.project_name) + '.egg-link'
- if os.path.isfile(egglink):
- return egglink
-
-
-def dist_location(dist):
- """
- Get the site-packages location of this distribution. Generally
- this is dist.location, except in the case of develop-installed
- packages, where dist.location is the source code location, and we
- want to know where the egg-link file is.
-
- """
- egg_link = egg_link_path(dist)
- if egg_link:
- return egg_link
- return dist.location
-
-
-def current_umask():
- """Get the current umask which involves having to set it temporarily."""
- mask = os.umask(0)
- os.umask(mask)
- return mask
-
-
-def unzip_file(filename, location, flatten=True):
- """
- Unzip the file (with path `filename`) to the destination `location`. All
- files are written based on system defaults and umask (i.e. permissions are
- not preserved), except that regular file members with any execute
- permissions (user, group, or world) have "chmod +x" applied after being
- written. Note that for windows, any execute changes using os.chmod are
- no-ops per the python docs.
- """
- ensure_dir(location)
- zipfp = open(filename, 'rb')
- try:
- zip = zipfile.ZipFile(zipfp, allowZip64=True)
- leading = has_leading_dir(zip.namelist()) and flatten
- for info in zip.infolist():
- name = info.filename
- data = zip.read(name)
- fn = name
- if leading:
- fn = split_leading_dir(name)[1]
- fn = os.path.join(location, fn)
- dir = os.path.dirname(fn)
- if fn.endswith('/') or fn.endswith('\\'):
- # A directory
- ensure_dir(fn)
- else:
- ensure_dir(dir)
- fp = open(fn, 'wb')
- try:
- fp.write(data)
- finally:
- fp.close()
- mode = info.external_attr >> 16
- # if mode and regular file and any execute permissions for
- # user/group/world?
- if mode and stat.S_ISREG(mode) and mode & 0o111:
- # make dest file have execute for user/group/world
- # (chmod +x) no-op on windows per python docs
- os.chmod(fn, (0o777 - current_umask() | 0o111))
- finally:
- zipfp.close()
-
-
-def untar_file(filename, location):
- """
- Untar the file (with path `filename`) to the destination `location`.
- All files are written based on system defaults and umask (i.e. permissions
- are not preserved), except that regular file members with any execute
- permissions (user, group, or world) have "chmod +x" applied after being
- written. Note that for windows, any execute changes using os.chmod are
- no-ops per the python docs.
- """
- ensure_dir(location)
- if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
- mode = 'r:gz'
- elif filename.lower().endswith(BZ2_EXTENSIONS):
- mode = 'r:bz2'
- elif filename.lower().endswith(XZ_EXTENSIONS):
- mode = 'r:xz'
- elif filename.lower().endswith('.tar'):
- mode = 'r'
- else:
- logger.warning(
- 'Cannot determine compression type for file %s', filename,
- )
- mode = 'r:*'
- tar = tarfile.open(filename, mode)
- try:
- # note: python<=2.5 doesn't seem to know about pax headers, filter them
- leading = has_leading_dir([
- member.name for member in tar.getmembers()
- if member.name != 'pax_global_header'
- ])
- for member in tar.getmembers():
- fn = member.name
- if fn == 'pax_global_header':
- continue
- if leading:
- fn = split_leading_dir(fn)[1]
- path = os.path.join(location, fn)
- if member.isdir():
- ensure_dir(path)
- elif member.issym():
- try:
- tar._extract_member(member, path)
- except Exception as exc:
- # Some corrupt tar files seem to produce this
- # (specifically bad symlinks)
- logger.warning(
- 'In the tar file %s the member %s is invalid: %s',
- filename, member.name, exc,
- )
- continue
- else:
- try:
- fp = tar.extractfile(member)
- except (KeyError, AttributeError) as exc:
- # Some corrupt tar files seem to produce this
- # (specifically bad symlinks)
- logger.warning(
- 'In the tar file %s the member %s is invalid: %s',
- filename, member.name, exc,
- )
- continue
- ensure_dir(os.path.dirname(path))
- with open(path, 'wb') as destfp:
- shutil.copyfileobj(fp, destfp)
- fp.close()
- # Update the timestamp (useful for cython compiled files)
- tar.utime(member, path)
- # member have any execute permissions for user/group/world?
- if member.mode & 0o111:
- # make dest file have execute for user/group/world
- # no-op on windows per python docs
- os.chmod(path, (0o777 - current_umask() | 0o111))
- finally:
- tar.close()
-
-
-def unpack_file(filename, location, content_type, link):
- filename = os.path.realpath(filename)
- if (content_type == 'application/zip' or
- filename.lower().endswith(ZIP_EXTENSIONS) or
- zipfile.is_zipfile(filename)):
- unzip_file(
- filename,
- location,
- flatten=not filename.endswith('.whl')
- )
- elif (content_type == 'application/x-gzip' or
- tarfile.is_tarfile(filename) or
- filename.lower().endswith(
- TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)):
- untar_file(filename, location)
- elif (content_type and content_type.startswith('text/html') and
- is_svn_page(file_contents(filename))):
- # We don't really care about this
- from pip._internal.vcs.subversion import Subversion
- Subversion('svn+' + link.url).unpack(location)
- else:
- # FIXME: handle?
- # FIXME: magic signatures?
- logger.critical(
- 'Cannot unpack file %s (downloaded from %s, content-type: %s); '
- 'cannot detect archive format',
- filename, location, content_type,
- )
- raise InstallationError(
- 'Cannot determine archive format of %s' % location
- )
-
-
-def call_subprocess(cmd, show_stdout=True, cwd=None,
- on_returncode='raise',
- command_desc=None,
- extra_environ=None, unset_environ=None, spinner=None):
- """
- Args:
- unset_environ: an iterable of environment variable names to unset
- prior to calling subprocess.Popen().
- """
- if unset_environ is None:
- unset_environ = []
- # This function's handling of subprocess output is confusing and I
- # previously broke it terribly, so as penance I will write a long comment
- # explaining things.
- #
- # The obvious thing that affects output is the show_stdout=
- # kwarg. show_stdout=True means, let the subprocess write directly to our
- # stdout. Even though it is nominally the default, it is almost never used
- # inside pip (and should not be used in new code without a very good
- # reason); as of 2016-02-22 it is only used in a few places inside the VCS
- # wrapper code. Ideally we should get rid of it entirely, because it
- # creates a lot of complexity here for a rarely used feature.
- #
- # Most places in pip set show_stdout=False. What this means is:
- # - We connect the child stdout to a pipe, which we read.
- # - By default, we hide the output but show a spinner -- unless the
- # subprocess exits with an error, in which case we show the output.
- # - If the --verbose option was passed (= loglevel is DEBUG), then we show
- # the output unconditionally. (But in this case we don't want to show
- # the output a second time if it turns out that there was an error.)
- #
- # stderr is always merged with stdout (even if show_stdout=True).
- if show_stdout:
- stdout = None
- else:
- stdout = subprocess.PIPE
- if command_desc is None:
- cmd_parts = []
- for part in cmd:
- if ' ' in part or '\n' in part or '"' in part or "'" in part:
- part = '"%s"' % part.replace('"', '\\"')
- cmd_parts.append(part)
- command_desc = ' '.join(cmd_parts)
- logger.debug("Running command %s", command_desc)
- env = os.environ.copy()
- if extra_environ:
- env.update(extra_environ)
- for name in unset_environ:
- env.pop(name, None)
- try:
- proc = subprocess.Popen(
- cmd, stderr=subprocess.STDOUT, stdin=subprocess.PIPE,
- stdout=stdout, cwd=cwd, env=env,
- )
- proc.stdin.close()
- except Exception as exc:
- logger.critical(
- "Error %s while executing command %s", exc, command_desc,
- )
- raise
- all_output = []
- if stdout is not None:
- while True:
- line = console_to_str(proc.stdout.readline())
- if not line:
- break
- line = line.rstrip()
- all_output.append(line + '\n')
- if logger.getEffectiveLevel() <= std_logging.DEBUG:
- # Show the line immediately
- logger.debug(line)
- else:
- # Update the spinner
- if spinner is not None:
- spinner.spin()
- try:
- proc.wait()
- finally:
- if proc.stdout:
- proc.stdout.close()
- if spinner is not None:
- if proc.returncode:
- spinner.finish("error")
- else:
- spinner.finish("done")
- if proc.returncode:
- if on_returncode == 'raise':
- if (logger.getEffectiveLevel() > std_logging.DEBUG and
- not show_stdout):
- logger.info(
- 'Complete output from command %s:', command_desc,
- )
- logger.info(
- ''.join(all_output) +
- '\n----------------------------------------'
- )
- raise InstallationError(
- 'Command "%s" failed with error code %s in %s'
- % (command_desc, proc.returncode, cwd))
- elif on_returncode == 'warn':
- logger.warning(
- 'Command "%s" had error code %s in %s',
- command_desc, proc.returncode, cwd,
- )
- elif on_returncode == 'ignore':
- pass
- else:
- raise ValueError('Invalid value: on_returncode=%s' %
- repr(on_returncode))
- if not show_stdout:
- return ''.join(all_output)
-
-
-def read_text_file(filename):
- """Return the contents of *filename*.
-
- Try to decode the file contents with utf-8, the preferred system encoding
- (e.g., cp1252 on some Windows machines), and latin1, in that order.
- Decoding a byte string with latin1 will never raise an error. In the worst
- case, the returned string will contain some garbage characters.
-
- """
- with open(filename, 'rb') as fp:
- data = fp.read()
-
- encodings = ['utf-8', locale.getpreferredencoding(False), 'latin1']
- for enc in encodings:
- try:
- data = data.decode(enc)
- except UnicodeDecodeError:
- continue
- break
-
- assert type(data) != bytes # Latin1 should have worked.
- return data
-
-
-def _make_build_dir(build_dir):
- os.makedirs(build_dir)
- write_delete_marker_file(build_dir)
-
-
-class FakeFile(object):
- """Wrap a list of lines in an object with readline() to make
- ConfigParser happy."""
- def __init__(self, lines):
- self._gen = (l for l in lines)
-
- def readline(self):
- try:
- try:
- return next(self._gen)
- except NameError:
- return self._gen.next()
- except StopIteration:
- return ''
-
- def __iter__(self):
- return self._gen
-
-
-class StreamWrapper(StringIO):
-
- @classmethod
- def from_stream(cls, orig_stream):
- cls.orig_stream = orig_stream
- return cls()
-
- # compileall.compile_dir() needs stdout.encoding to print to stdout
- @property
- def encoding(self):
- return self.orig_stream.encoding
-
-
-@contextlib.contextmanager
-def captured_output(stream_name):
- """Return a context manager used by captured_stdout/stdin/stderr
- that temporarily replaces the sys stream *stream_name* with a StringIO.
-
- Taken from Lib/support/__init__.py in the CPython repo.
- """
- orig_stdout = getattr(sys, stream_name)
- setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout))
- try:
- yield getattr(sys, stream_name)
- finally:
- setattr(sys, stream_name, orig_stdout)
-
-
-def captured_stdout():
- """Capture the output of sys.stdout:
-
- with captured_stdout() as stdout:
- print('hello')
- self.assertEqual(stdout.getvalue(), 'hello\n')
-
- Taken from Lib/support/__init__.py in the CPython repo.
- """
- return captured_output('stdout')
-
-
-class cached_property(object):
- """A property that is only computed once per instance and then replaces
- itself with an ordinary attribute. Deleting the attribute resets the
- property.
-
- Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175
- """
-
- def __init__(self, func):
- self.__doc__ = getattr(func, '__doc__')
- self.func = func
-
- def __get__(self, obj, cls):
- if obj is None:
- # We're being accessed from the class itself, not from an object
- return self
- value = obj.__dict__[self.func.__name__] = self.func(obj)
- return value
-
-
-def get_installed_version(dist_name, lookup_dirs=None):
- """Get the installed version of dist_name avoiding pkg_resources cache"""
- # Create a requirement that we'll look for inside of setuptools.
- req = pkg_resources.Requirement.parse(dist_name)
-
- # We want to avoid having this cached, so we need to construct a new
- # working set each time.
- if lookup_dirs is None:
- working_set = pkg_resources.WorkingSet()
- else:
- working_set = pkg_resources.WorkingSet(lookup_dirs)
-
- # Get the installed distribution from our working set
- dist = working_set.find(req)
-
- # Check to see if we got an installed distribution or not, if we did
- # we want to return it's version.
- return dist.version if dist else None
-
-
-def consume(iterator):
- """Consume an iterable at C speed."""
- deque(iterator, maxlen=0)
-
-
-# Simulates an enum
-def enum(*sequential, **named):
- enums = dict(zip(sequential, range(len(sequential))), **named)
- reverse = {value: key for key, value in enums.items()}
- enums['reverse_mapping'] = reverse
- return type('Enum', (), enums)
+from __future__ import absolute_import
+
+import contextlib
+import errno
+import io
+import locale
+# we have a submodule named 'logging' which would shadow this if we used the
+# regular name:
+import logging as std_logging
+import os
+import posixpath
+import re
+import shutil
+import stat
+import subprocess
+import sys
+import tarfile
+import zipfile
+from collections import deque
+
+from pip._vendor import pkg_resources
+# NOTE: retrying is not annotated in typeshed as on 2017-07-17, which is
+# why we ignore the type on this import.
+from pip._vendor.retrying import retry # type: ignore
+from pip._vendor.six import PY2
+from pip._vendor.six.moves import input
+
+from pip._internal.compat import console_to_str, expanduser, stdlib_pkgs
+from pip._internal.exceptions import InstallationError
+from pip._internal.locations import (
+ running_under_virtualenv, site_packages, user_site, virtualenv_no_global,
+ write_delete_marker_file,
+)
+
+if PY2:
+ from io import BytesIO as StringIO
+else:
+ from io import StringIO
+
+__all__ = ['rmtree', 'display_path', 'backup_dir',
+ 'ask', 'splitext',
+ 'format_size', 'is_installable_dir',
+ 'is_svn_page', 'file_contents',
+ 'split_leading_dir', 'has_leading_dir',
+ 'normalize_path',
+ 'renames', 'get_prog',
+ 'unzip_file', 'untar_file', 'unpack_file', 'call_subprocess',
+ 'captured_stdout', 'ensure_dir',
+ 'ARCHIVE_EXTENSIONS', 'SUPPORTED_EXTENSIONS',
+ 'get_installed_version']
+
+
+logger = std_logging.getLogger(__name__)
+
+BZ2_EXTENSIONS = ('.tar.bz2', '.tbz')
+XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', '.tar.lz', '.tar.lzma')
+ZIP_EXTENSIONS = ('.zip', '.whl')
+TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar')
+ARCHIVE_EXTENSIONS = (
+ ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS)
+SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS
+try:
+ import bz2 # noqa
+ SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS
+except ImportError:
+ logger.debug('bz2 module is not available')
+
+try:
+ # Only for Python 3.3+
+ import lzma # noqa
+ SUPPORTED_EXTENSIONS += XZ_EXTENSIONS
+except ImportError:
+ logger.debug('lzma module is not available')
+
+
+def import_or_raise(pkg_or_module_string, ExceptionType, *args, **kwargs):
+ try:
+ return __import__(pkg_or_module_string)
+ except ImportError:
+ raise ExceptionType(*args, **kwargs)
+
+
+def ensure_dir(path):
+ """os.path.makedirs without EEXIST."""
+ try:
+ os.makedirs(path)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+
+def get_prog():
+ try:
+ prog = os.path.basename(sys.argv[0])
+ if prog in ('__main__.py', '-c'):
+ return "%s -m pip" % sys.executable
+ else:
+ return prog
+ except (AttributeError, TypeError, IndexError):
+ pass
+ return 'pip'
+
+
+# Retry every half second for up to 3 seconds
+@retry(stop_max_delay=3000, wait_fixed=500)
+def rmtree(dir, ignore_errors=False):
+ shutil.rmtree(dir, ignore_errors=ignore_errors,
+ onerror=rmtree_errorhandler)
+
+
+def rmtree_errorhandler(func, path, exc_info):
+ """On Windows, the files in .svn are read-only, so when rmtree() tries to
+ remove them, an exception is thrown. We catch that here, remove the
+ read-only attribute, and hopefully continue without problems."""
+ # if file type currently read only
+ if os.stat(path).st_mode & stat.S_IREAD:
+ # convert to read/write
+ os.chmod(path, stat.S_IWRITE)
+ # use the original function to repeat the operation
+ func(path)
+ return
+ else:
+ raise
+
+
+def display_path(path):
+ """Gives the display value for a given path, making it relative to cwd
+ if possible."""
+ path = os.path.normcase(os.path.abspath(path))
+ if sys.version_info[0] == 2:
+ path = path.decode(sys.getfilesystemencoding(), 'replace')
+ path = path.encode(sys.getdefaultencoding(), 'replace')
+ if path.startswith(os.getcwd() + os.path.sep):
+ path = '.' + path[len(os.getcwd()):]
+ return path
+
+
+def backup_dir(dir, ext='.bak'):
+ """Figure out the name of a directory to back up the given dir to
+ (adding .bak, .bak2, etc)"""
+ n = 1
+ extension = ext
+ while os.path.exists(dir + extension):
+ n += 1
+ extension = ext + str(n)
+ return dir + extension
+
+
+def ask_path_exists(message, options):
+ for action in os.environ.get('PIP_EXISTS_ACTION', '').split():
+ if action in options:
+ return action
+ return ask(message, options)
+
+
+def ask(message, options):
+ """Ask the message interactively, with the given possible responses"""
+ while 1:
+ if os.environ.get('PIP_NO_INPUT'):
+ raise Exception(
+ 'No input was expected ($PIP_NO_INPUT set); question: %s' %
+ message
+ )
+ response = input(message)
+ response = response.strip().lower()
+ if response not in options:
+ print(
+ 'Your response (%r) was not one of the expected responses: '
+ '%s' % (response, ', '.join(options))
+ )
+ else:
+ return response
+
+
+def format_size(bytes):
+ if bytes > 1000 * 1000:
+ return '%.1fMB' % (bytes / 1000.0 / 1000)
+ elif bytes > 10 * 1000:
+ return '%ikB' % (bytes / 1000)
+ elif bytes > 1000:
+ return '%.1fkB' % (bytes / 1000.0)
+ else:
+ return '%ibytes' % bytes
+
+
+def is_installable_dir(path):
+ """Return True if `path` is a directory containing a setup.py file."""
+ if not os.path.isdir(path):
+ return False
+ setup_py = os.path.join(path, 'setup.py')
+ if os.path.isfile(setup_py):
+ return True
+ return False
+
+
+def is_svn_page(html):
+ """
+ Returns true if the page appears to be the index page of an svn repository
+ """
+ return (re.search(r'[^<]*Revision \d+:', html) and
+ re.search(r'Powered by (?:]*?>)?Subversion', html, re.I))
+
+
+def file_contents(filename):
+ with open(filename, 'rb') as fp:
+ return fp.read().decode('utf-8')
+
+
+def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE):
+ """Yield pieces of data from a file-like object until EOF."""
+ while True:
+ chunk = file.read(size)
+ if not chunk:
+ break
+ yield chunk
+
+
+def split_leading_dir(path):
+ path = path.lstrip('/').lstrip('\\')
+ if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) or
+ '\\' not in path):
+ return path.split('/', 1)
+ elif '\\' in path:
+ return path.split('\\', 1)
+ else:
+ return path, ''
+
+
+def has_leading_dir(paths):
+ """Returns true if all the paths have the same leading path name
+ (i.e., everything is in one subdirectory in an archive)"""
+ common_prefix = None
+ for path in paths:
+ prefix, rest = split_leading_dir(path)
+ if not prefix:
+ return False
+ elif common_prefix is None:
+ common_prefix = prefix
+ elif prefix != common_prefix:
+ return False
+ return True
+
+
+def normalize_path(path, resolve_symlinks=True):
+ """
+ Convert a path to its canonical, case-normalized, absolute version.
+
+ """
+ path = expanduser(path)
+ if resolve_symlinks:
+ path = os.path.realpath(path)
+ else:
+ path = os.path.abspath(path)
+ return os.path.normcase(path)
+
+
+def splitext(path):
+ """Like os.path.splitext, but take off .tar too"""
+ base, ext = posixpath.splitext(path)
+ if base.lower().endswith('.tar'):
+ ext = base[-4:] + ext
+ base = base[:-4]
+ return base, ext
+
+
+def renames(old, new):
+ """Like os.renames(), but handles renaming across devices."""
+ # Implementation borrowed from os.renames().
+ head, tail = os.path.split(new)
+ if head and tail and not os.path.exists(head):
+ os.makedirs(head)
+
+ shutil.move(old, new)
+
+ head, tail = os.path.split(old)
+ if head and tail:
+ try:
+ os.removedirs(head)
+ except OSError:
+ pass
+
+
+def is_local(path):
+ """
+ Return True if path is within sys.prefix, if we're running in a virtualenv.
+
+ If we're not in a virtualenv, all paths are considered "local."
+
+ """
+ if not running_under_virtualenv():
+ return True
+ return normalize_path(path).startswith(normalize_path(sys.prefix))
+
+
+def dist_is_local(dist):
+ """
+ Return True if given Distribution object is installed locally
+ (i.e. within current virtualenv).
+
+ Always True if we're not in a virtualenv.
+
+ """
+ return is_local(dist_location(dist))
+
+
+def dist_in_usersite(dist):
+ """
+ Return True if given Distribution is installed in user site.
+ """
+ norm_path = normalize_path(dist_location(dist))
+ return norm_path.startswith(normalize_path(user_site))
+
+
+def dist_in_site_packages(dist):
+ """
+ Return True if given Distribution is installed in
+ sysconfig.get_python_lib().
+ """
+ return normalize_path(
+ dist_location(dist)
+ ).startswith(normalize_path(site_packages))
+
+
+def dist_is_editable(dist):
+ """Is distribution an editable install?"""
+ for path_item in sys.path:
+ egg_link = os.path.join(path_item, dist.project_name + '.egg-link')
+ if os.path.isfile(egg_link):
+ return True
+ return False
+
+
+def get_installed_distributions(local_only=True,
+ skip=stdlib_pkgs,
+ include_editables=True,
+ editables_only=False,
+ user_only=False):
+ """
+ Return a list of installed Distribution objects.
+
+ If ``local_only`` is True (default), only return installations
+ local to the current virtualenv, if in a virtualenv.
+
+ ``skip`` argument is an iterable of lower-case project names to
+ ignore; defaults to stdlib_pkgs
+
+ If ``include_editables`` is False, don't report editables.
+
+ If ``editables_only`` is True , only report editables.
+
+ If ``user_only`` is True , only report installations in the user
+ site directory.
+
+ """
+ if local_only:
+ local_test = dist_is_local
+ else:
+ def local_test(d):
+ return True
+
+ if include_editables:
+ def editable_test(d):
+ return True
+ else:
+ def editable_test(d):
+ return not dist_is_editable(d)
+
+ if editables_only:
+ def editables_only_test(d):
+ return dist_is_editable(d)
+ else:
+ def editables_only_test(d):
+ return True
+
+ if user_only:
+ user_test = dist_in_usersite
+ else:
+ def user_test(d):
+ return True
+
+ return [d for d in pkg_resources.working_set
+ if local_test(d) and
+ d.key not in skip and
+ editable_test(d) and
+ editables_only_test(d) and
+ user_test(d)
+ ]
+
+
+def egg_link_path(dist):
+ """
+ Return the path for the .egg-link file if it exists, otherwise, None.
+
+ There's 3 scenarios:
+ 1) not in a virtualenv
+ try to find in site.USER_SITE, then site_packages
+ 2) in a no-global virtualenv
+ try to find in site_packages
+ 3) in a yes-global virtualenv
+ try to find in site_packages, then site.USER_SITE
+ (don't look in global location)
+
+ For #1 and #3, there could be odd cases, where there's an egg-link in 2
+ locations.
+
+ This method will just return the first one found.
+ """
+ sites = []
+ if running_under_virtualenv():
+ if virtualenv_no_global():
+ sites.append(site_packages)
+ else:
+ sites.append(site_packages)
+ if user_site:
+ sites.append(user_site)
+ else:
+ if user_site:
+ sites.append(user_site)
+ sites.append(site_packages)
+
+ for site in sites:
+ egglink = os.path.join(site, dist.project_name) + '.egg-link'
+ if os.path.isfile(egglink):
+ return egglink
+
+
+def dist_location(dist):
+ """
+ Get the site-packages location of this distribution. Generally
+ this is dist.location, except in the case of develop-installed
+ packages, where dist.location is the source code location, and we
+ want to know where the egg-link file is.
+
+ """
+ egg_link = egg_link_path(dist)
+ if egg_link:
+ return egg_link
+ return dist.location
+
+
+def current_umask():
+ """Get the current umask which involves having to set it temporarily."""
+ mask = os.umask(0)
+ os.umask(mask)
+ return mask
+
+
+def unzip_file(filename, location, flatten=True):
+ """
+ Unzip the file (with path `filename`) to the destination `location`. All
+ files are written based on system defaults and umask (i.e. permissions are
+ not preserved), except that regular file members with any execute
+ permissions (user, group, or world) have "chmod +x" applied after being
+ written. Note that for windows, any execute changes using os.chmod are
+ no-ops per the python docs.
+ """
+ ensure_dir(location)
+ zipfp = open(filename, 'rb')
+ try:
+ zip = zipfile.ZipFile(zipfp, allowZip64=True)
+ leading = has_leading_dir(zip.namelist()) and flatten
+ for info in zip.infolist():
+ name = info.filename
+ data = zip.read(name)
+ fn = name
+ if leading:
+ fn = split_leading_dir(name)[1]
+ fn = os.path.join(location, fn)
+ dir = os.path.dirname(fn)
+ if fn.endswith('/') or fn.endswith('\\'):
+ # A directory
+ ensure_dir(fn)
+ else:
+ ensure_dir(dir)
+ fp = open(fn, 'wb')
+ try:
+ fp.write(data)
+ finally:
+ fp.close()
+ mode = info.external_attr >> 16
+ # if mode and regular file and any execute permissions for
+ # user/group/world?
+ if mode and stat.S_ISREG(mode) and mode & 0o111:
+ # make dest file have execute for user/group/world
+ # (chmod +x) no-op on windows per python docs
+ os.chmod(fn, (0o777 - current_umask() | 0o111))
+ finally:
+ zipfp.close()
+
+
+def untar_file(filename, location):
+ """
+ Untar the file (with path `filename`) to the destination `location`.
+ All files are written based on system defaults and umask (i.e. permissions
+ are not preserved), except that regular file members with any execute
+ permissions (user, group, or world) have "chmod +x" applied after being
+ written. Note that for windows, any execute changes using os.chmod are
+ no-ops per the python docs.
+ """
+ ensure_dir(location)
+ if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
+ mode = 'r:gz'
+ elif filename.lower().endswith(BZ2_EXTENSIONS):
+ mode = 'r:bz2'
+ elif filename.lower().endswith(XZ_EXTENSIONS):
+ mode = 'r:xz'
+ elif filename.lower().endswith('.tar'):
+ mode = 'r'
+ else:
+ logger.warning(
+ 'Cannot determine compression type for file %s', filename,
+ )
+ mode = 'r:*'
+ tar = tarfile.open(filename, mode)
+ try:
+ # note: python<=2.5 doesn't seem to know about pax headers, filter them
+ leading = has_leading_dir([
+ member.name for member in tar.getmembers()
+ if member.name != 'pax_global_header'
+ ])
+ for member in tar.getmembers():
+ fn = member.name
+ if fn == 'pax_global_header':
+ continue
+ if leading:
+ fn = split_leading_dir(fn)[1]
+ path = os.path.join(location, fn)
+ if member.isdir():
+ ensure_dir(path)
+ elif member.issym():
+ try:
+ tar._extract_member(member, path)
+ except Exception as exc:
+ # Some corrupt tar files seem to produce this
+ # (specifically bad symlinks)
+ logger.warning(
+ 'In the tar file %s the member %s is invalid: %s',
+ filename, member.name, exc,
+ )
+ continue
+ else:
+ try:
+ fp = tar.extractfile(member)
+ except (KeyError, AttributeError) as exc:
+ # Some corrupt tar files seem to produce this
+ # (specifically bad symlinks)
+ logger.warning(
+ 'In the tar file %s the member %s is invalid: %s',
+ filename, member.name, exc,
+ )
+ continue
+ ensure_dir(os.path.dirname(path))
+ with open(path, 'wb') as destfp:
+ shutil.copyfileobj(fp, destfp)
+ fp.close()
+ # Update the timestamp (useful for cython compiled files)
+ tar.utime(member, path)
+ # member have any execute permissions for user/group/world?
+ if member.mode & 0o111:
+ # make dest file have execute for user/group/world
+ # no-op on windows per python docs
+ os.chmod(path, (0o777 - current_umask() | 0o111))
+ finally:
+ tar.close()
+
+
+def unpack_file(filename, location, content_type, link):
+ filename = os.path.realpath(filename)
+ if (content_type == 'application/zip' or
+ filename.lower().endswith(ZIP_EXTENSIONS) or
+ zipfile.is_zipfile(filename)):
+ unzip_file(
+ filename,
+ location,
+ flatten=not filename.endswith('.whl')
+ )
+ elif (content_type == 'application/x-gzip' or
+ tarfile.is_tarfile(filename) or
+ filename.lower().endswith(
+ TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)):
+ untar_file(filename, location)
+ elif (content_type and content_type.startswith('text/html') and
+ is_svn_page(file_contents(filename))):
+ # We don't really care about this
+ from pip._internal.vcs.subversion import Subversion
+ Subversion('svn+' + link.url).unpack(location)
+ else:
+ # FIXME: handle?
+ # FIXME: magic signatures?
+ logger.critical(
+ 'Cannot unpack file %s (downloaded from %s, content-type: %s); '
+ 'cannot detect archive format',
+ filename, location, content_type,
+ )
+ raise InstallationError(
+ 'Cannot determine archive format of %s' % location
+ )
+
+
+def call_subprocess(cmd, show_stdout=True, cwd=None,
+ on_returncode='raise',
+ command_desc=None,
+ extra_environ=None, unset_environ=None, spinner=None):
+ """
+ Args:
+ unset_environ: an iterable of environment variable names to unset
+ prior to calling subprocess.Popen().
+ """
+ if unset_environ is None:
+ unset_environ = []
+ # This function's handling of subprocess output is confusing and I
+ # previously broke it terribly, so as penance I will write a long comment
+ # explaining things.
+ #
+ # The obvious thing that affects output is the show_stdout=
+ # kwarg. show_stdout=True means, let the subprocess write directly to our
+ # stdout. Even though it is nominally the default, it is almost never used
+ # inside pip (and should not be used in new code without a very good
+ # reason); as of 2016-02-22 it is only used in a few places inside the VCS
+ # wrapper code. Ideally we should get rid of it entirely, because it
+ # creates a lot of complexity here for a rarely used feature.
+ #
+ # Most places in pip set show_stdout=False. What this means is:
+ # - We connect the child stdout to a pipe, which we read.
+ # - By default, we hide the output but show a spinner -- unless the
+ # subprocess exits with an error, in which case we show the output.
+ # - If the --verbose option was passed (= loglevel is DEBUG), then we show
+ # the output unconditionally. (But in this case we don't want to show
+ # the output a second time if it turns out that there was an error.)
+ #
+ # stderr is always merged with stdout (even if show_stdout=True).
+ if show_stdout:
+ stdout = None
+ else:
+ stdout = subprocess.PIPE
+ if command_desc is None:
+ cmd_parts = []
+ for part in cmd:
+ if ' ' in part or '\n' in part or '"' in part or "'" in part:
+ part = '"%s"' % part.replace('"', '\\"')
+ cmd_parts.append(part)
+ command_desc = ' '.join(cmd_parts)
+ logger.debug("Running command %s", command_desc)
+ env = os.environ.copy()
+ if extra_environ:
+ env.update(extra_environ)
+ for name in unset_environ:
+ env.pop(name, None)
+ try:
+ proc = subprocess.Popen(
+ cmd, stderr=subprocess.STDOUT, stdin=subprocess.PIPE,
+ stdout=stdout, cwd=cwd, env=env,
+ )
+ proc.stdin.close()
+ except Exception as exc:
+ logger.critical(
+ "Error %s while executing command %s", exc, command_desc,
+ )
+ raise
+ all_output = []
+ if stdout is not None:
+ while True:
+ line = console_to_str(proc.stdout.readline())
+ if not line:
+ break
+ line = line.rstrip()
+ all_output.append(line + '\n')
+ if logger.getEffectiveLevel() <= std_logging.DEBUG:
+ # Show the line immediately
+ logger.debug(line)
+ else:
+ # Update the spinner
+ if spinner is not None:
+ spinner.spin()
+ try:
+ proc.wait()
+ finally:
+ if proc.stdout:
+ proc.stdout.close()
+ if spinner is not None:
+ if proc.returncode:
+ spinner.finish("error")
+ else:
+ spinner.finish("done")
+ if proc.returncode:
+ if on_returncode == 'raise':
+ if (logger.getEffectiveLevel() > std_logging.DEBUG and
+ not show_stdout):
+ logger.info(
+ 'Complete output from command %s:', command_desc,
+ )
+ logger.info(
+ ''.join(all_output) +
+ '\n----------------------------------------'
+ )
+ raise InstallationError(
+ 'Command "%s" failed with error code %s in %s'
+ % (command_desc, proc.returncode, cwd))
+ elif on_returncode == 'warn':
+ logger.warning(
+ 'Command "%s" had error code %s in %s',
+ command_desc, proc.returncode, cwd,
+ )
+ elif on_returncode == 'ignore':
+ pass
+ else:
+ raise ValueError('Invalid value: on_returncode=%s' %
+ repr(on_returncode))
+ if not show_stdout:
+ return ''.join(all_output)
+
+
+def read_text_file(filename):
+ """Return the contents of *filename*.
+
+ Try to decode the file contents with utf-8, the preferred system encoding
+ (e.g., cp1252 on some Windows machines), and latin1, in that order.
+ Decoding a byte string with latin1 will never raise an error. In the worst
+ case, the returned string will contain some garbage characters.
+
+ """
+ with open(filename, 'rb') as fp:
+ data = fp.read()
+
+ encodings = ['utf-8', locale.getpreferredencoding(False), 'latin1']
+ for enc in encodings:
+ try:
+ data = data.decode(enc)
+ except UnicodeDecodeError:
+ continue
+ break
+
+ assert type(data) != bytes # Latin1 should have worked.
+ return data
+
+
+def _make_build_dir(build_dir):
+ os.makedirs(build_dir)
+ write_delete_marker_file(build_dir)
+
+
+class FakeFile(object):
+ """Wrap a list of lines in an object with readline() to make
+ ConfigParser happy."""
+ def __init__(self, lines):
+ self._gen = (l for l in lines)
+
+ def readline(self):
+ try:
+ try:
+ return next(self._gen)
+ except NameError:
+ return self._gen.next()
+ except StopIteration:
+ return ''
+
+ def __iter__(self):
+ return self._gen
+
+
+class StreamWrapper(StringIO):
+
+ @classmethod
+ def from_stream(cls, orig_stream):
+ cls.orig_stream = orig_stream
+ return cls()
+
+ # compileall.compile_dir() needs stdout.encoding to print to stdout
+ @property
+ def encoding(self):
+ return self.orig_stream.encoding
+
+
+@contextlib.contextmanager
+def captured_output(stream_name):
+ """Return a context manager used by captured_stdout/stdin/stderr
+ that temporarily replaces the sys stream *stream_name* with a StringIO.
+
+ Taken from Lib/support/__init__.py in the CPython repo.
+ """
+ orig_stdout = getattr(sys, stream_name)
+ setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout))
+ try:
+ yield getattr(sys, stream_name)
+ finally:
+ setattr(sys, stream_name, orig_stdout)
+
+
+def captured_stdout():
+ """Capture the output of sys.stdout:
+
+ with captured_stdout() as stdout:
+ print('hello')
+ self.assertEqual(stdout.getvalue(), 'hello\n')
+
+ Taken from Lib/support/__init__.py in the CPython repo.
+ """
+ return captured_output('stdout')
+
+
+class cached_property(object):
+ """A property that is only computed once per instance and then replaces
+ itself with an ordinary attribute. Deleting the attribute resets the
+ property.
+
+ Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175
+ """
+
+ def __init__(self, func):
+ self.__doc__ = getattr(func, '__doc__')
+ self.func = func
+
+ def __get__(self, obj, cls):
+ if obj is None:
+ # We're being accessed from the class itself, not from an object
+ return self
+ value = obj.__dict__[self.func.__name__] = self.func(obj)
+ return value
+
+
+def get_installed_version(dist_name, lookup_dirs=None):
+ """Get the installed version of dist_name avoiding pkg_resources cache"""
+ # Create a requirement that we'll look for inside of setuptools.
+ req = pkg_resources.Requirement.parse(dist_name)
+
+ # We want to avoid having this cached, so we need to construct a new
+ # working set each time.
+ if lookup_dirs is None:
+ working_set = pkg_resources.WorkingSet()
+ else:
+ working_set = pkg_resources.WorkingSet(lookup_dirs)
+
+ # Get the installed distribution from our working set
+ dist = working_set.find(req)
+
+ # Check to see if we got an installed distribution or not, if we did
+ # we want to return it's version.
+ return dist.version if dist else None
+
+
+def consume(iterator):
+ """Consume an iterable at C speed."""
+ deque(iterator, maxlen=0)
+
+
+# Simulates an enum
+def enum(*sequential, **named):
+ enums = dict(zip(sequential, range(len(sequential))), **named)
+ reverse = {value: key for key, value in enums.items()}
+ enums['reverse_mapping'] = reverse
+ return type('Enum', (), enums)
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/outdated.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/outdated.py
index f8f6466..f4572ab 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/outdated.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/outdated.py
@@ -1,163 +1,163 @@
-from __future__ import absolute_import
-
-import datetime
-import json
-import logging
-import os.path
-import sys
-
-from pip._vendor import lockfile
-from pip._vendor.packaging import version as packaging_version
-
-from pip._internal.compat import WINDOWS
-from pip._internal.index import PackageFinder
-from pip._internal.locations import USER_CACHE_DIR, running_under_virtualenv
-from pip._internal.utils.filesystem import check_path_owner
-from pip._internal.utils.misc import ensure_dir, get_installed_version
-
-SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ"
-
-
-logger = logging.getLogger(__name__)
-
-
-class VirtualenvSelfCheckState(object):
- def __init__(self):
- self.statefile_path = os.path.join(sys.prefix, "pip-selfcheck.json")
-
- # Load the existing state
- try:
- with open(self.statefile_path) as statefile:
- self.state = json.load(statefile)
- except (IOError, ValueError):
- self.state = {}
-
- def save(self, pypi_version, current_time):
- # Attempt to write out our version check file
- with open(self.statefile_path, "w") as statefile:
- json.dump(
- {
- "last_check": current_time.strftime(SELFCHECK_DATE_FMT),
- "pypi_version": pypi_version,
- },
- statefile,
- sort_keys=True,
- separators=(",", ":")
- )
-
-
-class GlobalSelfCheckState(object):
- def __init__(self):
- self.statefile_path = os.path.join(USER_CACHE_DIR, "selfcheck.json")
-
- # Load the existing state
- try:
- with open(self.statefile_path) as statefile:
- self.state = json.load(statefile)[sys.prefix]
- except (IOError, ValueError, KeyError):
- self.state = {}
-
- def save(self, pypi_version, current_time):
- # Check to make sure that we own the directory
- if not check_path_owner(os.path.dirname(self.statefile_path)):
- return
-
- # Now that we've ensured the directory is owned by this user, we'll go
- # ahead and make sure that all our directories are created.
- ensure_dir(os.path.dirname(self.statefile_path))
-
- # Attempt to write out our version check file
- with lockfile.LockFile(self.statefile_path):
- if os.path.exists(self.statefile_path):
- with open(self.statefile_path) as statefile:
- state = json.load(statefile)
- else:
- state = {}
-
- state[sys.prefix] = {
- "last_check": current_time.strftime(SELFCHECK_DATE_FMT),
- "pypi_version": pypi_version,
- }
-
- with open(self.statefile_path, "w") as statefile:
- json.dump(state, statefile, sort_keys=True,
- separators=(",", ":"))
-
-
-def load_selfcheck_statefile():
- if running_under_virtualenv():
- return VirtualenvSelfCheckState()
- else:
- return GlobalSelfCheckState()
-
-
-def pip_version_check(session, options):
- """Check for an update for pip.
-
- Limit the frequency of checks to once per week. State is stored either in
- the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix
- of the pip script path.
- """
- installed_version = get_installed_version("pip")
- if not installed_version:
- return
-
- pip_version = packaging_version.parse(installed_version)
- pypi_version = None
-
- try:
- state = load_selfcheck_statefile()
-
- current_time = datetime.datetime.utcnow()
- # Determine if we need to refresh the state
- if "last_check" in state.state and "pypi_version" in state.state:
- last_check = datetime.datetime.strptime(
- state.state["last_check"],
- SELFCHECK_DATE_FMT
- )
- if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60:
- pypi_version = state.state["pypi_version"]
-
- # Refresh the version if we need to or just see if we need to warn
- if pypi_version is None:
- # Lets use PackageFinder to see what the latest pip version is
- finder = PackageFinder(
- find_links=options.find_links,
- index_urls=[options.index_url] + options.extra_index_urls,
- allow_all_prereleases=False, # Explicitly set to False
- trusted_hosts=options.trusted_hosts,
- process_dependency_links=options.process_dependency_links,
- session=session,
- )
- all_candidates = finder.find_all_candidates("pip")
- if not all_candidates:
- return
- pypi_version = str(
- max(all_candidates, key=lambda c: c.version).version
- )
-
- # save that we've performed a check
- state.save(pypi_version, current_time)
-
- remote_version = packaging_version.parse(pypi_version)
-
- # Determine if our pypi_version is older
- if (pip_version < remote_version and
- pip_version.base_version != remote_version.base_version):
- # Advise "python -m pip" on Windows to avoid issues
- # with overwriting pip.exe.
- if WINDOWS:
- pip_cmd = "python -m pip"
- else:
- pip_cmd = "pip"
- logger.warning(
- "You are using pip version %s, however version %s is "
- "available.\nYou should consider upgrading via the "
- "'%s install --upgrade pip' command.",
- pip_version, pypi_version, pip_cmd
- )
- except Exception:
- logger.debug(
- "There was an error checking the latest version of pip",
- exc_info=True,
- )
+from __future__ import absolute_import
+
+import datetime
+import json
+import logging
+import os.path
+import sys
+
+from pip._vendor import lockfile
+from pip._vendor.packaging import version as packaging_version
+
+from pip._internal.compat import WINDOWS
+from pip._internal.index import PackageFinder
+from pip._internal.locations import USER_CACHE_DIR, running_under_virtualenv
+from pip._internal.utils.filesystem import check_path_owner
+from pip._internal.utils.misc import ensure_dir, get_installed_version
+
+SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ"
+
+
+logger = logging.getLogger(__name__)
+
+
+class VirtualenvSelfCheckState(object):
+ def __init__(self):
+ self.statefile_path = os.path.join(sys.prefix, "pip-selfcheck.json")
+
+ # Load the existing state
+ try:
+ with open(self.statefile_path) as statefile:
+ self.state = json.load(statefile)
+ except (IOError, ValueError):
+ self.state = {}
+
+ def save(self, pypi_version, current_time):
+ # Attempt to write out our version check file
+ with open(self.statefile_path, "w") as statefile:
+ json.dump(
+ {
+ "last_check": current_time.strftime(SELFCHECK_DATE_FMT),
+ "pypi_version": pypi_version,
+ },
+ statefile,
+ sort_keys=True,
+ separators=(",", ":")
+ )
+
+
+class GlobalSelfCheckState(object):
+ def __init__(self):
+ self.statefile_path = os.path.join(USER_CACHE_DIR, "selfcheck.json")
+
+ # Load the existing state
+ try:
+ with open(self.statefile_path) as statefile:
+ self.state = json.load(statefile)[sys.prefix]
+ except (IOError, ValueError, KeyError):
+ self.state = {}
+
+ def save(self, pypi_version, current_time):
+ # Check to make sure that we own the directory
+ if not check_path_owner(os.path.dirname(self.statefile_path)):
+ return
+
+ # Now that we've ensured the directory is owned by this user, we'll go
+ # ahead and make sure that all our directories are created.
+ ensure_dir(os.path.dirname(self.statefile_path))
+
+ # Attempt to write out our version check file
+ with lockfile.LockFile(self.statefile_path):
+ if os.path.exists(self.statefile_path):
+ with open(self.statefile_path) as statefile:
+ state = json.load(statefile)
+ else:
+ state = {}
+
+ state[sys.prefix] = {
+ "last_check": current_time.strftime(SELFCHECK_DATE_FMT),
+ "pypi_version": pypi_version,
+ }
+
+ with open(self.statefile_path, "w") as statefile:
+ json.dump(state, statefile, sort_keys=True,
+ separators=(",", ":"))
+
+
+def load_selfcheck_statefile():
+ if running_under_virtualenv():
+ return VirtualenvSelfCheckState()
+ else:
+ return GlobalSelfCheckState()
+
+
+def pip_version_check(session, options):
+ """Check for an update for pip.
+
+ Limit the frequency of checks to once per week. State is stored either in
+ the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix
+ of the pip script path.
+ """
+ installed_version = get_installed_version("pip")
+ if not installed_version:
+ return
+
+ pip_version = packaging_version.parse(installed_version)
+ pypi_version = None
+
+ try:
+ state = load_selfcheck_statefile()
+
+ current_time = datetime.datetime.utcnow()
+ # Determine if we need to refresh the state
+ if "last_check" in state.state and "pypi_version" in state.state:
+ last_check = datetime.datetime.strptime(
+ state.state["last_check"],
+ SELFCHECK_DATE_FMT
+ )
+ if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60:
+ pypi_version = state.state["pypi_version"]
+
+ # Refresh the version if we need to or just see if we need to warn
+ if pypi_version is None:
+ # Lets use PackageFinder to see what the latest pip version is
+ finder = PackageFinder(
+ find_links=options.find_links,
+ index_urls=[options.index_url] + options.extra_index_urls,
+ allow_all_prereleases=False, # Explicitly set to False
+ trusted_hosts=options.trusted_hosts,
+ process_dependency_links=options.process_dependency_links,
+ session=session,
+ )
+ all_candidates = finder.find_all_candidates("pip")
+ if not all_candidates:
+ return
+ pypi_version = str(
+ max(all_candidates, key=lambda c: c.version).version
+ )
+
+ # save that we've performed a check
+ state.save(pypi_version, current_time)
+
+ remote_version = packaging_version.parse(pypi_version)
+
+ # Determine if our pypi_version is older
+ if (pip_version < remote_version and
+ pip_version.base_version != remote_version.base_version):
+ # Advise "python -m pip" on Windows to avoid issues
+ # with overwriting pip.exe.
+ if WINDOWS:
+ pip_cmd = "python -m pip"
+ else:
+ pip_cmd = "pip"
+ logger.warning(
+ "You are using pip version %s, however version %s is "
+ "available.\nYou should consider upgrading via the "
+ "'%s install --upgrade pip' command.",
+ pip_version, pypi_version, pip_cmd
+ )
+ except Exception:
+ logger.debug(
+ "There was an error checking the latest version of pip",
+ exc_info=True,
+ )
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/packaging.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/packaging.py
index 5f9bb93..d523953 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/packaging.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/packaging.py
@@ -1,70 +1,70 @@
-from __future__ import absolute_import
-
-import logging
-import sys
-from email.parser import FeedParser # type: ignore
-
-from pip._vendor import pkg_resources
-from pip._vendor.packaging import specifiers, version
-
-from pip._internal import exceptions
-
-logger = logging.getLogger(__name__)
-
-
-def check_requires_python(requires_python):
- """
- Check if the python version in use match the `requires_python` specifier.
-
- Returns `True` if the version of python in use matches the requirement.
- Returns `False` if the version of python in use does not matches the
- requirement.
-
- Raises an InvalidSpecifier if `requires_python` have an invalid format.
- """
- if requires_python is None:
- # The package provides no information
- return True
- requires_python_specifier = specifiers.SpecifierSet(requires_python)
-
- # We only use major.minor.micro
- python_version = version.parse('.'.join(map(str, sys.version_info[:3])))
- return python_version in requires_python_specifier
-
-
-def get_metadata(dist):
- if (isinstance(dist, pkg_resources.DistInfoDistribution) and
- dist.has_metadata('METADATA')):
- return dist.get_metadata('METADATA')
- elif dist.has_metadata('PKG-INFO'):
- return dist.get_metadata('PKG-INFO')
-
-
-def check_dist_requires_python(dist):
- metadata = get_metadata(dist)
- feed_parser = FeedParser()
- feed_parser.feed(metadata)
- pkg_info_dict = feed_parser.close()
- requires_python = pkg_info_dict.get('Requires-Python')
- try:
- if not check_requires_python(requires_python):
- raise exceptions.UnsupportedPythonVersion(
- "%s requires Python '%s' but the running Python is %s" % (
- dist.project_name,
- requires_python,
- '.'.join(map(str, sys.version_info[:3])),)
- )
- except specifiers.InvalidSpecifier as e:
- logger.warning(
- "Package %s has an invalid Requires-Python entry %s - %s",
- dist.project_name, requires_python, e,
- )
- return
-
-
-def get_installer(dist):
- if dist.has_metadata('INSTALLER'):
- for line in dist.get_metadata_lines('INSTALLER'):
- if line.strip():
- return line.strip()
- return ''
+from __future__ import absolute_import
+
+import logging
+import sys
+from email.parser import FeedParser # type: ignore
+
+from pip._vendor import pkg_resources
+from pip._vendor.packaging import specifiers, version
+
+from pip._internal import exceptions
+
+logger = logging.getLogger(__name__)
+
+
+def check_requires_python(requires_python):
+ """
+ Check if the python version in use match the `requires_python` specifier.
+
+ Returns `True` if the version of python in use matches the requirement.
+ Returns `False` if the version of python in use does not matches the
+ requirement.
+
+ Raises an InvalidSpecifier if `requires_python` have an invalid format.
+ """
+ if requires_python is None:
+ # The package provides no information
+ return True
+ requires_python_specifier = specifiers.SpecifierSet(requires_python)
+
+ # We only use major.minor.micro
+ python_version = version.parse('.'.join(map(str, sys.version_info[:3])))
+ return python_version in requires_python_specifier
+
+
+def get_metadata(dist):
+ if (isinstance(dist, pkg_resources.DistInfoDistribution) and
+ dist.has_metadata('METADATA')):
+ return dist.get_metadata('METADATA')
+ elif dist.has_metadata('PKG-INFO'):
+ return dist.get_metadata('PKG-INFO')
+
+
+def check_dist_requires_python(dist):
+ metadata = get_metadata(dist)
+ feed_parser = FeedParser()
+ feed_parser.feed(metadata)
+ pkg_info_dict = feed_parser.close()
+ requires_python = pkg_info_dict.get('Requires-Python')
+ try:
+ if not check_requires_python(requires_python):
+ raise exceptions.UnsupportedPythonVersion(
+ "%s requires Python '%s' but the running Python is %s" % (
+ dist.project_name,
+ requires_python,
+ '.'.join(map(str, sys.version_info[:3])),)
+ )
+ except specifiers.InvalidSpecifier as e:
+ logger.warning(
+ "Package %s has an invalid Requires-Python entry %s - %s",
+ dist.project_name, requires_python, e,
+ )
+ return
+
+
+def get_installer(dist):
+ if dist.has_metadata('INSTALLER'):
+ for line in dist.get_metadata_lines('INSTALLER'):
+ if line.strip():
+ return line.strip()
+ return ''
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/setuptools_build.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/setuptools_build.py
index 03973e9..9d32174 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/setuptools_build.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/setuptools_build.py
@@ -1,8 +1,8 @@
-# Shim to wrap setup.py invocation with setuptools
-SETUPTOOLS_SHIM = (
- "import setuptools, tokenize;__file__=%r;"
- "f=getattr(tokenize, 'open', open)(__file__);"
- "code=f.read().replace('\\r\\n', '\\n');"
- "f.close();"
- "exec(compile(code, __file__, 'exec'))"
-)
+# Shim to wrap setup.py invocation with setuptools
+SETUPTOOLS_SHIM = (
+ "import setuptools, tokenize;__file__=%r;"
+ "f=getattr(tokenize, 'open', open)(__file__);"
+ "code=f.read().replace('\\r\\n', '\\n');"
+ "f.close();"
+ "exec(compile(code, __file__, 'exec'))"
+)
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/temp_dir.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/temp_dir.py
index edc506b..25bc0d9 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/temp_dir.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/temp_dir.py
@@ -1,82 +1,82 @@
-from __future__ import absolute_import
-
-import logging
-import os.path
-import tempfile
-
-from pip._internal.utils.misc import rmtree
-
-logger = logging.getLogger(__name__)
-
-
-class TempDirectory(object):
- """Helper class that owns and cleans up a temporary directory.
-
- This class can be used as a context manager or as an OO representation of a
- temporary directory.
-
- Attributes:
- path
- Location to the created temporary directory or None
- delete
- Whether the directory should be deleted when exiting
- (when used as a contextmanager)
-
- Methods:
- create()
- Creates a temporary directory and stores its path in the path
- attribute.
- cleanup()
- Deletes the temporary directory and sets path attribute to None
-
- When used as a context manager, a temporary directory is created on
- entering the context and, if the delete attribute is True, on exiting the
- context the created directory is deleted.
- """
-
- def __init__(self, path=None, delete=None, kind="temp"):
- super(TempDirectory, self).__init__()
-
- if path is None and delete is None:
- # If we were not given an explicit directory, and we were not given
- # an explicit delete option, then we'll default to deleting.
- delete = True
-
- self.path = path
- self.delete = delete
- self.kind = kind
-
- def __repr__(self):
- return "<{} {!r}>".format(self.__class__.__name__, self.path)
-
- def __enter__(self):
- self.create()
- return self
-
- def __exit__(self, exc, value, tb):
- if self.delete:
- self.cleanup()
-
- def create(self):
- """Create a temporary directory and store it's path in self.path
- """
- if self.path is not None:
- logger.debug(
- "Skipped creation of temporary directory: {}".format(self.path)
- )
- return
- # We realpath here because some systems have their default tmpdir
- # symlinked to another directory. This tends to confuse build
- # scripts, so we canonicalize the path by traversing potential
- # symlinks here.
- self.path = os.path.realpath(
- tempfile.mkdtemp(prefix="pip-{}-".format(self.kind))
- )
- logger.debug("Created temporary directory: {}".format(self.path))
-
- def cleanup(self):
- """Remove the temporary directory created and reset state
- """
- if self.path is not None and os.path.exists(self.path):
- rmtree(self.path)
- self.path = None
+from __future__ import absolute_import
+
+import logging
+import os.path
+import tempfile
+
+from pip._internal.utils.misc import rmtree
+
+logger = logging.getLogger(__name__)
+
+
+class TempDirectory(object):
+ """Helper class that owns and cleans up a temporary directory.
+
+ This class can be used as a context manager or as an OO representation of a
+ temporary directory.
+
+ Attributes:
+ path
+ Location to the created temporary directory or None
+ delete
+ Whether the directory should be deleted when exiting
+ (when used as a contextmanager)
+
+ Methods:
+ create()
+ Creates a temporary directory and stores its path in the path
+ attribute.
+ cleanup()
+ Deletes the temporary directory and sets path attribute to None
+
+ When used as a context manager, a temporary directory is created on
+ entering the context and, if the delete attribute is True, on exiting the
+ context the created directory is deleted.
+ """
+
+ def __init__(self, path=None, delete=None, kind="temp"):
+ super(TempDirectory, self).__init__()
+
+ if path is None and delete is None:
+ # If we were not given an explicit directory, and we were not given
+ # an explicit delete option, then we'll default to deleting.
+ delete = True
+
+ self.path = path
+ self.delete = delete
+ self.kind = kind
+
+ def __repr__(self):
+ return "<{} {!r}>".format(self.__class__.__name__, self.path)
+
+ def __enter__(self):
+ self.create()
+ return self
+
+ def __exit__(self, exc, value, tb):
+ if self.delete:
+ self.cleanup()
+
+ def create(self):
+ """Create a temporary directory and store it's path in self.path
+ """
+ if self.path is not None:
+ logger.debug(
+ "Skipped creation of temporary directory: {}".format(self.path)
+ )
+ return
+ # We realpath here because some systems have their default tmpdir
+ # symlinked to another directory. This tends to confuse build
+ # scripts, so we canonicalize the path by traversing potential
+ # symlinks here.
+ self.path = os.path.realpath(
+ tempfile.mkdtemp(prefix="pip-{}-".format(self.kind))
+ )
+ logger.debug("Created temporary directory: {}".format(self.path))
+
+ def cleanup(self):
+ """Remove the temporary directory created and reset state
+ """
+ if self.path is not None and os.path.exists(self.path):
+ rmtree(self.path)
+ self.path = None
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/typing.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/typing.py
index cb57f8f..4e25ae6 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/typing.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/typing.py
@@ -1,29 +1,29 @@
-"""For neatly implementing static typing in pip.
-
-`mypy` - the static type analysis tool we use - uses the `typing` module, which
-provides core functionality fundamental to mypy's functioning.
-
-Generally, `typing` would be imported at runtime and used in that fashion -
-it acts as a no-op at runtime and does not have any run-time overhead by
-design.
-
-As it turns out, `typing` is not vendorable - it uses separate sources for
-Python 2/Python 3. Thus, this codebase can not expect it to be present.
-To work around this, mypy allows the typing import to be behind a False-y
-optional to prevent it from running at runtime and type-comments can be used
-to remove the need for the types to be accessible directly during runtime.
-
-This module provides the False-y guard in a nicely named fashion so that a
-curious maintainer can reach here to read this.
-
-In pip, all static-typing related imports should be guarded as follows:
-
- from pip.utils.typing import MYPY_CHECK_RUNNING
-
- if MYPY_CHECK_RUNNING:
- from typing import ...
-
-Ref: https://github.com/python/mypy/issues/3216
-"""
-
-MYPY_CHECK_RUNNING = False
+"""For neatly implementing static typing in pip.
+
+`mypy` - the static type analysis tool we use - uses the `typing` module, which
+provides core functionality fundamental to mypy's functioning.
+
+Generally, `typing` would be imported at runtime and used in that fashion -
+it acts as a no-op at runtime and does not have any run-time overhead by
+design.
+
+As it turns out, `typing` is not vendorable - it uses separate sources for
+Python 2/Python 3. Thus, this codebase can not expect it to be present.
+To work around this, mypy allows the typing import to be behind a False-y
+optional to prevent it from running at runtime and type-comments can be used
+to remove the need for the types to be accessible directly during runtime.
+
+This module provides the False-y guard in a nicely named fashion so that a
+curious maintainer can reach here to read this.
+
+In pip, all static-typing related imports should be guarded as follows:
+
+ from pip.utils.typing import MYPY_CHECK_RUNNING
+
+ if MYPY_CHECK_RUNNING:
+ from typing import ...
+
+Ref: https://github.com/python/mypy/issues/3216
+"""
+
+MYPY_CHECK_RUNNING = False
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/ui.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/ui.py
index 8ade1e2..d97ea36 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/ui.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/ui.py
@@ -1,421 +1,421 @@
-from __future__ import absolute_import, division
-
-import contextlib
-import itertools
-import logging
-import sys
-import time
-from signal import SIGINT, default_int_handler, signal
-
-from pip._vendor import six
-from pip._vendor.progress.bar import (
- Bar, ChargingBar, FillingCirclesBar, FillingSquaresBar, IncrementalBar,
- ShadyBar,
-)
-from pip._vendor.progress.helpers import HIDE_CURSOR, SHOW_CURSOR, WritelnMixin
-from pip._vendor.progress.spinner import Spinner
-
-from pip._internal.compat import WINDOWS
-from pip._internal.utils.logging import get_indentation
-from pip._internal.utils.misc import format_size
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import Any
-
-try:
- from pip._vendor import colorama
-# Lots of different errors can come from this, including SystemError and
-# ImportError.
-except Exception:
- colorama = None
-
-logger = logging.getLogger(__name__)
-
-
-def _select_progress_class(preferred, fallback):
- encoding = getattr(preferred.file, "encoding", None)
-
- # If we don't know what encoding this file is in, then we'll just assume
- # that it doesn't support unicode and use the ASCII bar.
- if not encoding:
- return fallback
-
- # Collect all of the possible characters we want to use with the preferred
- # bar.
- characters = [
- getattr(preferred, "empty_fill", six.text_type()),
- getattr(preferred, "fill", six.text_type()),
- ]
- characters += list(getattr(preferred, "phases", []))
-
- # Try to decode the characters we're using for the bar using the encoding
- # of the given file, if this works then we'll assume that we can use the
- # fancier bar and if not we'll fall back to the plaintext bar.
- try:
- six.text_type().join(characters).encode(encoding)
- except UnicodeEncodeError:
- return fallback
- else:
- return preferred
-
-
-_BaseBar = _select_progress_class(IncrementalBar, Bar) # type: Any
-
-
-class InterruptibleMixin(object):
- """
- Helper to ensure that self.finish() gets called on keyboard interrupt.
-
- This allows downloads to be interrupted without leaving temporary state
- (like hidden cursors) behind.
-
- This class is similar to the progress library's existing SigIntMixin
- helper, but as of version 1.2, that helper has the following problems:
-
- 1. It calls sys.exit().
- 2. It discards the existing SIGINT handler completely.
- 3. It leaves its own handler in place even after an uninterrupted finish,
- which will have unexpected delayed effects if the user triggers an
- unrelated keyboard interrupt some time after a progress-displaying
- download has already completed, for example.
- """
-
- def __init__(self, *args, **kwargs):
- """
- Save the original SIGINT handler for later.
- """
- super(InterruptibleMixin, self).__init__(*args, **kwargs)
-
- self.original_handler = signal(SIGINT, self.handle_sigint)
-
- # If signal() returns None, the previous handler was not installed from
- # Python, and we cannot restore it. This probably should not happen,
- # but if it does, we must restore something sensible instead, at least.
- # The least bad option should be Python's default SIGINT handler, which
- # just raises KeyboardInterrupt.
- if self.original_handler is None:
- self.original_handler = default_int_handler
-
- def finish(self):
- """
- Restore the original SIGINT handler after finishing.
-
- This should happen regardless of whether the progress display finishes
- normally, or gets interrupted.
- """
- super(InterruptibleMixin, self).finish()
- signal(SIGINT, self.original_handler)
-
- def handle_sigint(self, signum, frame):
- """
- Call self.finish() before delegating to the original SIGINT handler.
-
- This handler should only be in place while the progress display is
- active.
- """
- self.finish()
- self.original_handler(signum, frame)
-
-
-class SilentBar(Bar):
-
- def update(self):
- pass
-
-
-class BlueEmojiBar(IncrementalBar):
-
- suffix = "%(percent)d%%"
- bar_prefix = " "
- bar_suffix = " "
- phases = (u"\U0001F539", u"\U0001F537", u"\U0001F535") # type: Any
-
-
-class DownloadProgressMixin(object):
-
- def __init__(self, *args, **kwargs):
- super(DownloadProgressMixin, self).__init__(*args, **kwargs)
- self.message = (" " * (get_indentation() + 2)) + self.message
-
- @property
- def downloaded(self):
- return format_size(self.index)
-
- @property
- def download_speed(self):
- # Avoid zero division errors...
- if self.avg == 0.0:
- return "..."
- return format_size(1 / self.avg) + "/s"
-
- @property
- def pretty_eta(self):
- if self.eta:
- return "eta %s" % self.eta_td
- return ""
-
- def iter(self, it, n=1):
- for x in it:
- yield x
- self.next(n)
- self.finish()
-
-
-class WindowsMixin(object):
-
- def __init__(self, *args, **kwargs):
- # The Windows terminal does not support the hide/show cursor ANSI codes
- # even with colorama. So we'll ensure that hide_cursor is False on
- # Windows.
- # This call neds to go before the super() call, so that hide_cursor
- # is set in time. The base progress bar class writes the "hide cursor"
- # code to the terminal in its init, so if we don't set this soon
- # enough, we get a "hide" with no corresponding "show"...
- if WINDOWS and self.hide_cursor:
- self.hide_cursor = False
-
- super(WindowsMixin, self).__init__(*args, **kwargs)
-
- # Check if we are running on Windows and we have the colorama module,
- # if we do then wrap our file with it.
- if WINDOWS and colorama:
- self.file = colorama.AnsiToWin32(self.file)
- # The progress code expects to be able to call self.file.isatty()
- # but the colorama.AnsiToWin32() object doesn't have that, so we'll
- # add it.
- self.file.isatty = lambda: self.file.wrapped.isatty()
- # The progress code expects to be able to call self.file.flush()
- # but the colorama.AnsiToWin32() object doesn't have that, so we'll
- # add it.
- self.file.flush = lambda: self.file.wrapped.flush()
-
-
-class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin,
- DownloadProgressMixin):
-
- file = sys.stdout
- message = "%(percent)d%%"
- suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
-
-# NOTE: The "type: ignore" comments on the following classes are there to
-# work around https://github.com/python/typing/issues/241
-
-
-class DefaultDownloadProgressBar(BaseDownloadProgressBar,
- _BaseBar): # type: ignore
- pass
-
-
-class DownloadSilentBar(BaseDownloadProgressBar, SilentBar): # type: ignore
- pass
-
-
-class DownloadIncrementalBar(BaseDownloadProgressBar, # type: ignore
- IncrementalBar):
- pass
-
-
-class DownloadChargingBar(BaseDownloadProgressBar, # type: ignore
- ChargingBar):
- pass
-
-
-class DownloadShadyBar(BaseDownloadProgressBar, ShadyBar): # type: ignore
- pass
-
-
-class DownloadFillingSquaresBar(BaseDownloadProgressBar, # type: ignore
- FillingSquaresBar):
- pass
-
-
-class DownloadFillingCirclesBar(BaseDownloadProgressBar, # type: ignore
- FillingCirclesBar):
- pass
-
-
-class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, # type: ignore
- BlueEmojiBar):
- pass
-
-
-class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin,
- DownloadProgressMixin, WritelnMixin, Spinner):
-
- file = sys.stdout
- suffix = "%(downloaded)s %(download_speed)s"
-
- def next_phase(self):
- if not hasattr(self, "_phaser"):
- self._phaser = itertools.cycle(self.phases)
- return next(self._phaser)
-
- def update(self):
- message = self.message % self
- phase = self.next_phase()
- suffix = self.suffix % self
- line = ''.join([
- message,
- " " if message else "",
- phase,
- " " if suffix else "",
- suffix,
- ])
-
- self.writeln(line)
-
-
-BAR_TYPES = {
- "off": (DownloadSilentBar, DownloadSilentBar),
- "on": (DefaultDownloadProgressBar, DownloadProgressSpinner),
- "ascii": (DownloadIncrementalBar, DownloadProgressSpinner),
- "pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner),
- "emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner)
-}
-
-
-def DownloadProgressProvider(progress_bar, max=None):
- if max is None or max == 0:
- return BAR_TYPES[progress_bar][1]().iter
- else:
- return BAR_TYPES[progress_bar][0](max=max).iter
-
-
-################################################################
-# Generic "something is happening" spinners
-#
-# We don't even try using progress.spinner.Spinner here because it's actually
-# simpler to reimplement from scratch than to coerce their code into doing
-# what we need.
-################################################################
-
-@contextlib.contextmanager
-def hidden_cursor(file):
- # The Windows terminal does not support the hide/show cursor ANSI codes,
- # even via colorama. So don't even try.
- if WINDOWS:
- yield
- # We don't want to clutter the output with control characters if we're
- # writing to a file, or if the user is running with --quiet.
- # See https://github.com/pypa/pip/issues/3418
- elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
- yield
- else:
- file.write(HIDE_CURSOR)
- try:
- yield
- finally:
- file.write(SHOW_CURSOR)
-
-
-class RateLimiter(object):
- def __init__(self, min_update_interval_seconds):
- self._min_update_interval_seconds = min_update_interval_seconds
- self._last_update = 0
-
- def ready(self):
- now = time.time()
- delta = now - self._last_update
- return delta >= self._min_update_interval_seconds
-
- def reset(self):
- self._last_update = time.time()
-
-
-class InteractiveSpinner(object):
- def __init__(self, message, file=None, spin_chars="-\\|/",
- # Empirically, 8 updates/second looks nice
- min_update_interval_seconds=0.125):
- self._message = message
- if file is None:
- file = sys.stdout
- self._file = file
- self._rate_limiter = RateLimiter(min_update_interval_seconds)
- self._finished = False
-
- self._spin_cycle = itertools.cycle(spin_chars)
-
- self._file.write(" " * get_indentation() + self._message + " ... ")
- self._width = 0
-
- def _write(self, status):
- assert not self._finished
- # Erase what we wrote before by backspacing to the beginning, writing
- # spaces to overwrite the old text, and then backspacing again
- backup = "\b" * self._width
- self._file.write(backup + " " * self._width + backup)
- # Now we have a blank slate to add our status
- self._file.write(status)
- self._width = len(status)
- self._file.flush()
- self._rate_limiter.reset()
-
- def spin(self):
- if self._finished:
- return
- if not self._rate_limiter.ready():
- return
- self._write(next(self._spin_cycle))
-
- def finish(self, final_status):
- if self._finished:
- return
- self._write(final_status)
- self._file.write("\n")
- self._file.flush()
- self._finished = True
-
-
-# Used for dumb terminals, non-interactive installs (no tty), etc.
-# We still print updates occasionally (once every 60 seconds by default) to
-# act as a keep-alive for systems like Travis-CI that take lack-of-output as
-# an indication that a task has frozen.
-class NonInteractiveSpinner(object):
- def __init__(self, message, min_update_interval_seconds=60):
- self._message = message
- self._finished = False
- self._rate_limiter = RateLimiter(min_update_interval_seconds)
- self._update("started")
-
- def _update(self, status):
- assert not self._finished
- self._rate_limiter.reset()
- logger.info("%s: %s", self._message, status)
-
- def spin(self):
- if self._finished:
- return
- if not self._rate_limiter.ready():
- return
- self._update("still running...")
-
- def finish(self, final_status):
- if self._finished:
- return
- self._update("finished with status '%s'" % (final_status,))
- self._finished = True
-
-
-@contextlib.contextmanager
-def open_spinner(message):
- # Interactive spinner goes directly to sys.stdout rather than being routed
- # through the logging system, but it acts like it has level INFO,
- # i.e. it's only displayed if we're at level INFO or better.
- # Non-interactive spinner goes through the logging system, so it is always
- # in sync with logging configuration.
- if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
- spinner = InteractiveSpinner(message)
- else:
- spinner = NonInteractiveSpinner(message)
- try:
- with hidden_cursor(sys.stdout):
- yield spinner
- except KeyboardInterrupt:
- spinner.finish("canceled")
- raise
- except Exception:
- spinner.finish("error")
- raise
- else:
- spinner.finish("done")
+from __future__ import absolute_import, division
+
+import contextlib
+import itertools
+import logging
+import sys
+import time
+from signal import SIGINT, default_int_handler, signal
+
+from pip._vendor import six
+from pip._vendor.progress.bar import (
+ Bar, ChargingBar, FillingCirclesBar, FillingSquaresBar, IncrementalBar,
+ ShadyBar,
+)
+from pip._vendor.progress.helpers import HIDE_CURSOR, SHOW_CURSOR, WritelnMixin
+from pip._vendor.progress.spinner import Spinner
+
+from pip._internal.compat import WINDOWS
+from pip._internal.utils.logging import get_indentation
+from pip._internal.utils.misc import format_size
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Any
+
+try:
+ from pip._vendor import colorama
+# Lots of different errors can come from this, including SystemError and
+# ImportError.
+except Exception:
+ colorama = None
+
+logger = logging.getLogger(__name__)
+
+
+def _select_progress_class(preferred, fallback):
+ encoding = getattr(preferred.file, "encoding", None)
+
+ # If we don't know what encoding this file is in, then we'll just assume
+ # that it doesn't support unicode and use the ASCII bar.
+ if not encoding:
+ return fallback
+
+ # Collect all of the possible characters we want to use with the preferred
+ # bar.
+ characters = [
+ getattr(preferred, "empty_fill", six.text_type()),
+ getattr(preferred, "fill", six.text_type()),
+ ]
+ characters += list(getattr(preferred, "phases", []))
+
+ # Try to decode the characters we're using for the bar using the encoding
+ # of the given file, if this works then we'll assume that we can use the
+ # fancier bar and if not we'll fall back to the plaintext bar.
+ try:
+ six.text_type().join(characters).encode(encoding)
+ except UnicodeEncodeError:
+ return fallback
+ else:
+ return preferred
+
+
+_BaseBar = _select_progress_class(IncrementalBar, Bar) # type: Any
+
+
+class InterruptibleMixin(object):
+ """
+ Helper to ensure that self.finish() gets called on keyboard interrupt.
+
+ This allows downloads to be interrupted without leaving temporary state
+ (like hidden cursors) behind.
+
+ This class is similar to the progress library's existing SigIntMixin
+ helper, but as of version 1.2, that helper has the following problems:
+
+ 1. It calls sys.exit().
+ 2. It discards the existing SIGINT handler completely.
+ 3. It leaves its own handler in place even after an uninterrupted finish,
+ which will have unexpected delayed effects if the user triggers an
+ unrelated keyboard interrupt some time after a progress-displaying
+ download has already completed, for example.
+ """
+
+ def __init__(self, *args, **kwargs):
+ """
+ Save the original SIGINT handler for later.
+ """
+ super(InterruptibleMixin, self).__init__(*args, **kwargs)
+
+ self.original_handler = signal(SIGINT, self.handle_sigint)
+
+ # If signal() returns None, the previous handler was not installed from
+ # Python, and we cannot restore it. This probably should not happen,
+ # but if it does, we must restore something sensible instead, at least.
+ # The least bad option should be Python's default SIGINT handler, which
+ # just raises KeyboardInterrupt.
+ if self.original_handler is None:
+ self.original_handler = default_int_handler
+
+ def finish(self):
+ """
+ Restore the original SIGINT handler after finishing.
+
+ This should happen regardless of whether the progress display finishes
+ normally, or gets interrupted.
+ """
+ super(InterruptibleMixin, self).finish()
+ signal(SIGINT, self.original_handler)
+
+ def handle_sigint(self, signum, frame):
+ """
+ Call self.finish() before delegating to the original SIGINT handler.
+
+ This handler should only be in place while the progress display is
+ active.
+ """
+ self.finish()
+ self.original_handler(signum, frame)
+
+
+class SilentBar(Bar):
+
+ def update(self):
+ pass
+
+
+class BlueEmojiBar(IncrementalBar):
+
+ suffix = "%(percent)d%%"
+ bar_prefix = " "
+ bar_suffix = " "
+ phases = (u"\U0001F539", u"\U0001F537", u"\U0001F535") # type: Any
+
+
+class DownloadProgressMixin(object):
+
+ def __init__(self, *args, **kwargs):
+ super(DownloadProgressMixin, self).__init__(*args, **kwargs)
+ self.message = (" " * (get_indentation() + 2)) + self.message
+
+ @property
+ def downloaded(self):
+ return format_size(self.index)
+
+ @property
+ def download_speed(self):
+ # Avoid zero division errors...
+ if self.avg == 0.0:
+ return "..."
+ return format_size(1 / self.avg) + "/s"
+
+ @property
+ def pretty_eta(self):
+ if self.eta:
+ return "eta %s" % self.eta_td
+ return ""
+
+ def iter(self, it, n=1):
+ for x in it:
+ yield x
+ self.next(n)
+ self.finish()
+
+
+class WindowsMixin(object):
+
+ def __init__(self, *args, **kwargs):
+ # The Windows terminal does not support the hide/show cursor ANSI codes
+ # even with colorama. So we'll ensure that hide_cursor is False on
+ # Windows.
+ # This call neds to go before the super() call, so that hide_cursor
+ # is set in time. The base progress bar class writes the "hide cursor"
+ # code to the terminal in its init, so if we don't set this soon
+ # enough, we get a "hide" with no corresponding "show"...
+ if WINDOWS and self.hide_cursor:
+ self.hide_cursor = False
+
+ super(WindowsMixin, self).__init__(*args, **kwargs)
+
+ # Check if we are running on Windows and we have the colorama module,
+ # if we do then wrap our file with it.
+ if WINDOWS and colorama:
+ self.file = colorama.AnsiToWin32(self.file)
+ # The progress code expects to be able to call self.file.isatty()
+ # but the colorama.AnsiToWin32() object doesn't have that, so we'll
+ # add it.
+ self.file.isatty = lambda: self.file.wrapped.isatty()
+ # The progress code expects to be able to call self.file.flush()
+ # but the colorama.AnsiToWin32() object doesn't have that, so we'll
+ # add it.
+ self.file.flush = lambda: self.file.wrapped.flush()
+
+
+class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin,
+ DownloadProgressMixin):
+
+ file = sys.stdout
+ message = "%(percent)d%%"
+ suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
+
+# NOTE: The "type: ignore" comments on the following classes are there to
+# work around https://github.com/python/typing/issues/241
+
+
+class DefaultDownloadProgressBar(BaseDownloadProgressBar,
+ _BaseBar): # type: ignore
+ pass
+
+
+class DownloadSilentBar(BaseDownloadProgressBar, SilentBar): # type: ignore
+ pass
+
+
+class DownloadIncrementalBar(BaseDownloadProgressBar, # type: ignore
+ IncrementalBar):
+ pass
+
+
+class DownloadChargingBar(BaseDownloadProgressBar, # type: ignore
+ ChargingBar):
+ pass
+
+
+class DownloadShadyBar(BaseDownloadProgressBar, ShadyBar): # type: ignore
+ pass
+
+
+class DownloadFillingSquaresBar(BaseDownloadProgressBar, # type: ignore
+ FillingSquaresBar):
+ pass
+
+
+class DownloadFillingCirclesBar(BaseDownloadProgressBar, # type: ignore
+ FillingCirclesBar):
+ pass
+
+
+class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, # type: ignore
+ BlueEmojiBar):
+ pass
+
+
+class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin,
+ DownloadProgressMixin, WritelnMixin, Spinner):
+
+ file = sys.stdout
+ suffix = "%(downloaded)s %(download_speed)s"
+
+ def next_phase(self):
+ if not hasattr(self, "_phaser"):
+ self._phaser = itertools.cycle(self.phases)
+ return next(self._phaser)
+
+ def update(self):
+ message = self.message % self
+ phase = self.next_phase()
+ suffix = self.suffix % self
+ line = ''.join([
+ message,
+ " " if message else "",
+ phase,
+ " " if suffix else "",
+ suffix,
+ ])
+
+ self.writeln(line)
+
+
+BAR_TYPES = {
+ "off": (DownloadSilentBar, DownloadSilentBar),
+ "on": (DefaultDownloadProgressBar, DownloadProgressSpinner),
+ "ascii": (DownloadIncrementalBar, DownloadProgressSpinner),
+ "pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner),
+ "emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner)
+}
+
+
+def DownloadProgressProvider(progress_bar, max=None):
+ if max is None or max == 0:
+ return BAR_TYPES[progress_bar][1]().iter
+ else:
+ return BAR_TYPES[progress_bar][0](max=max).iter
+
+
+################################################################
+# Generic "something is happening" spinners
+#
+# We don't even try using progress.spinner.Spinner here because it's actually
+# simpler to reimplement from scratch than to coerce their code into doing
+# what we need.
+################################################################
+
+@contextlib.contextmanager
+def hidden_cursor(file):
+ # The Windows terminal does not support the hide/show cursor ANSI codes,
+ # even via colorama. So don't even try.
+ if WINDOWS:
+ yield
+ # We don't want to clutter the output with control characters if we're
+ # writing to a file, or if the user is running with --quiet.
+ # See https://github.com/pypa/pip/issues/3418
+ elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
+ yield
+ else:
+ file.write(HIDE_CURSOR)
+ try:
+ yield
+ finally:
+ file.write(SHOW_CURSOR)
+
+
+class RateLimiter(object):
+ def __init__(self, min_update_interval_seconds):
+ self._min_update_interval_seconds = min_update_interval_seconds
+ self._last_update = 0
+
+ def ready(self):
+ now = time.time()
+ delta = now - self._last_update
+ return delta >= self._min_update_interval_seconds
+
+ def reset(self):
+ self._last_update = time.time()
+
+
+class InteractiveSpinner(object):
+ def __init__(self, message, file=None, spin_chars="-\\|/",
+ # Empirically, 8 updates/second looks nice
+ min_update_interval_seconds=0.125):
+ self._message = message
+ if file is None:
+ file = sys.stdout
+ self._file = file
+ self._rate_limiter = RateLimiter(min_update_interval_seconds)
+ self._finished = False
+
+ self._spin_cycle = itertools.cycle(spin_chars)
+
+ self._file.write(" " * get_indentation() + self._message + " ... ")
+ self._width = 0
+
+ def _write(self, status):
+ assert not self._finished
+ # Erase what we wrote before by backspacing to the beginning, writing
+ # spaces to overwrite the old text, and then backspacing again
+ backup = "\b" * self._width
+ self._file.write(backup + " " * self._width + backup)
+ # Now we have a blank slate to add our status
+ self._file.write(status)
+ self._width = len(status)
+ self._file.flush()
+ self._rate_limiter.reset()
+
+ def spin(self):
+ if self._finished:
+ return
+ if not self._rate_limiter.ready():
+ return
+ self._write(next(self._spin_cycle))
+
+ def finish(self, final_status):
+ if self._finished:
+ return
+ self._write(final_status)
+ self._file.write("\n")
+ self._file.flush()
+ self._finished = True
+
+
+# Used for dumb terminals, non-interactive installs (no tty), etc.
+# We still print updates occasionally (once every 60 seconds by default) to
+# act as a keep-alive for systems like Travis-CI that take lack-of-output as
+# an indication that a task has frozen.
+class NonInteractiveSpinner(object):
+ def __init__(self, message, min_update_interval_seconds=60):
+ self._message = message
+ self._finished = False
+ self._rate_limiter = RateLimiter(min_update_interval_seconds)
+ self._update("started")
+
+ def _update(self, status):
+ assert not self._finished
+ self._rate_limiter.reset()
+ logger.info("%s: %s", self._message, status)
+
+ def spin(self):
+ if self._finished:
+ return
+ if not self._rate_limiter.ready():
+ return
+ self._update("still running...")
+
+ def finish(self, final_status):
+ if self._finished:
+ return
+ self._update("finished with status '%s'" % (final_status,))
+ self._finished = True
+
+
+@contextlib.contextmanager
+def open_spinner(message):
+ # Interactive spinner goes directly to sys.stdout rather than being routed
+ # through the logging system, but it acts like it has level INFO,
+ # i.e. it's only displayed if we're at level INFO or better.
+ # Non-interactive spinner goes through the logging system, so it is always
+ # in sync with logging configuration.
+ if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
+ spinner = InteractiveSpinner(message)
+ else:
+ spinner = NonInteractiveSpinner(message)
+ try:
+ with hidden_cursor(sys.stdout):
+ yield spinner
+ except KeyboardInterrupt:
+ spinner.finish("canceled")
+ raise
+ except Exception:
+ spinner.finish("error")
+ raise
+ else:
+ spinner.finish("done")
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/vcs/__init__.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/vcs/__init__.py
index 8b159cb..bff94fa 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/vcs/__init__.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/vcs/__init__.py
@@ -1,471 +1,471 @@
-"""Handles all VCS (version control) support"""
-from __future__ import absolute_import
-
-import copy
-import errno
-import logging
-import os
-import shutil
-import sys
-
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-
-from pip._internal.exceptions import BadCommand
-from pip._internal.utils.misc import (
- display_path, backup_dir, call_subprocess, rmtree, ask_path_exists,
-)
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import Dict, Optional, Tuple
- from pip._internal.basecommand import Command
-
-__all__ = ['vcs', 'get_src_requirement']
-
-
-logger = logging.getLogger(__name__)
-
-
-class RevOptions(object):
-
- """
- Encapsulates a VCS-specific revision to install, along with any VCS
- install options.
-
- Instances of this class should be treated as if immutable.
- """
-
- def __init__(self, vcs, rev=None, extra_args=None):
- """
- Args:
- vcs: a VersionControl object.
- rev: the name of the revision to install.
- extra_args: a list of extra options.
- """
- if extra_args is None:
- extra_args = []
-
- self.extra_args = extra_args
- self.rev = rev
- self.vcs = vcs
-
- def __repr__(self):
- return ''.format(self.vcs.name, self.rev)
-
- @property
- def arg_rev(self):
- if self.rev is None:
- return self.vcs.default_arg_rev
-
- return self.rev
-
- def to_args(self):
- """
- Return the VCS-specific command arguments.
- """
- args = []
- rev = self.arg_rev
- if rev is not None:
- args += self.vcs.get_base_rev_args(rev)
- args += self.extra_args
-
- return args
-
- def to_display(self):
- if not self.rev:
- return ''
-
- return ' (to revision {})'.format(self.rev)
-
- def make_new(self, rev):
- """
- Make a copy of the current instance, but with a new rev.
-
- Args:
- rev: the name of the revision for the new object.
- """
- return self.vcs.make_rev_options(rev, extra_args=self.extra_args)
-
-
-class VcsSupport(object):
- _registry = {} # type: Dict[str, Command]
- schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn']
-
- def __init__(self):
- # Register more schemes with urlparse for various version control
- # systems
- urllib_parse.uses_netloc.extend(self.schemes)
- # Python >= 2.7.4, 3.3 doesn't have uses_fragment
- if getattr(urllib_parse, 'uses_fragment', None):
- urllib_parse.uses_fragment.extend(self.schemes)
- super(VcsSupport, self).__init__()
-
- def __iter__(self):
- return self._registry.__iter__()
-
- @property
- def backends(self):
- return list(self._registry.values())
-
- @property
- def dirnames(self):
- return [backend.dirname for backend in self.backends]
-
- @property
- def all_schemes(self):
- schemes = []
- for backend in self.backends:
- schemes.extend(backend.schemes)
- return schemes
-
- def register(self, cls):
- if not hasattr(cls, 'name'):
- logger.warning('Cannot register VCS %s', cls.__name__)
- return
- if cls.name not in self._registry:
- self._registry[cls.name] = cls
- logger.debug('Registered VCS backend: %s', cls.name)
-
- def unregister(self, cls=None, name=None):
- if name in self._registry:
- del self._registry[name]
- elif cls in self._registry.values():
- del self._registry[cls.name]
- else:
- logger.warning('Cannot unregister because no class or name given')
-
- def get_backend_name(self, location):
- """
- Return the name of the version control backend if found at given
- location, e.g. vcs.get_backend_name('/path/to/vcs/checkout')
- """
- for vc_type in self._registry.values():
- if vc_type.controls_location(location):
- logger.debug('Determine that %s uses VCS: %s',
- location, vc_type.name)
- return vc_type.name
- return None
-
- def get_backend(self, name):
- name = name.lower()
- if name in self._registry:
- return self._registry[name]
-
- def get_backend_from_location(self, location):
- vc_type = self.get_backend_name(location)
- if vc_type:
- return self.get_backend(vc_type)
- return None
-
-
-vcs = VcsSupport()
-
-
-class VersionControl(object):
- name = ''
- dirname = ''
- # List of supported schemes for this Version Control
- schemes = () # type: Tuple[str, ...]
- # Iterable of environment variable names to pass to call_subprocess().
- unset_environ = () # type: Tuple[str, ...]
- default_arg_rev = None # type: Optional[str]
-
- def __init__(self, url=None, *args, **kwargs):
- self.url = url
- super(VersionControl, self).__init__(*args, **kwargs)
-
- def get_base_rev_args(self, rev):
- """
- Return the base revision arguments for a vcs command.
-
- Args:
- rev: the name of a revision to install. Cannot be None.
- """
- raise NotImplementedError
-
- def make_rev_options(self, rev=None, extra_args=None):
- """
- Return a RevOptions object.
-
- Args:
- rev: the name of a revision to install.
- extra_args: a list of extra options.
- """
- return RevOptions(self, rev, extra_args=extra_args)
-
- def _is_local_repository(self, repo):
- """
- posix absolute paths start with os.path.sep,
- win32 ones start with drive (like c:\\folder)
- """
- drive, tail = os.path.splitdrive(repo)
- return repo.startswith(os.path.sep) or drive
-
- # See issue #1083 for why this method was introduced:
- # https://github.com/pypa/pip/issues/1083
- def translate_egg_surname(self, surname):
- # For example, Django has branches of the form "stable/1.7.x".
- return surname.replace('/', '_')
-
- def export(self, location):
- """
- Export the repository at the url to the destination location
- i.e. only download the files, without vcs informations
- """
- raise NotImplementedError
-
- def get_url_rev(self):
- """
- Returns the correct repository URL and revision by parsing the given
- repository URL
- """
- error_message = (
- "Sorry, '%s' is a malformed VCS url. "
- "The format is +://, "
- "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp"
- )
- assert '+' in self.url, error_message % self.url
- url = self.url.split('+', 1)[1]
- scheme, netloc, path, query, frag = urllib_parse.urlsplit(url)
- rev = None
- if '@' in path:
- path, rev = path.rsplit('@', 1)
- url = urllib_parse.urlunsplit((scheme, netloc, path, query, ''))
- return url, rev
-
- def get_info(self, location):
- """
- Returns (url, revision), where both are strings
- """
- assert not location.rstrip('/').endswith(self.dirname), \
- 'Bad directory: %s' % location
- return self.get_url(location), self.get_revision(location)
-
- def normalize_url(self, url):
- """
- Normalize a URL for comparison by unquoting it and removing any
- trailing slash.
- """
- return urllib_parse.unquote(url).rstrip('/')
-
- def compare_urls(self, url1, url2):
- """
- Compare two repo URLs for identity, ignoring incidental differences.
- """
- return (self.normalize_url(url1) == self.normalize_url(url2))
-
- def obtain(self, dest):
- """
- Called when installing or updating an editable package, takes the
- source path of the checkout.
- """
- raise NotImplementedError
-
- def switch(self, dest, url, rev_options):
- """
- Switch the repo at ``dest`` to point to ``URL``.
-
- Args:
- rev_options: a RevOptions object.
- """
- raise NotImplementedError
-
- def update(self, dest, rev_options):
- """
- Update an already-existing repo to the given ``rev_options``.
-
- Args:
- rev_options: a RevOptions object.
- """
- raise NotImplementedError
-
- def is_commit_id_equal(self, dest, name):
- """
- Return whether the id of the current commit equals the given name.
-
- Args:
- dest: the repository directory.
- name: a string name.
- """
- raise NotImplementedError
-
- def check_destination(self, dest, url, rev_options):
- """
- Prepare a location to receive a checkout/clone.
-
- Return True if the location is ready for (and requires) a
- checkout/clone, False otherwise.
-
- Args:
- rev_options: a RevOptions object.
- """
- checkout = True
- prompt = False
- rev_display = rev_options.to_display()
- if os.path.exists(dest):
- checkout = False
- if os.path.exists(os.path.join(dest, self.dirname)):
- existing_url = self.get_url(dest)
- if self.compare_urls(existing_url, url):
- logger.debug(
- '%s in %s exists, and has correct URL (%s)',
- self.repo_name.title(),
- display_path(dest),
- url,
- )
- if not self.is_commit_id_equal(dest, rev_options.rev):
- logger.info(
- 'Updating %s %s%s',
- display_path(dest),
- self.repo_name,
- rev_display,
- )
- self.update(dest, rev_options)
- else:
- logger.info(
- 'Skipping because already up-to-date.')
- else:
- logger.warning(
- '%s %s in %s exists with URL %s',
- self.name,
- self.repo_name,
- display_path(dest),
- existing_url,
- )
- prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
- ('s', 'i', 'w', 'b'))
- else:
- logger.warning(
- 'Directory %s already exists, and is not a %s %s.',
- dest,
- self.name,
- self.repo_name,
- )
- prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
- if prompt:
- logger.warning(
- 'The plan is to install the %s repository %s',
- self.name,
- url,
- )
- response = ask_path_exists('What to do? %s' % prompt[0],
- prompt[1])
-
- if response == 's':
- logger.info(
- 'Switching %s %s to %s%s',
- self.repo_name,
- display_path(dest),
- url,
- rev_display,
- )
- self.switch(dest, url, rev_options)
- elif response == 'i':
- # do nothing
- pass
- elif response == 'w':
- logger.warning('Deleting %s', display_path(dest))
- rmtree(dest)
- checkout = True
- elif response == 'b':
- dest_dir = backup_dir(dest)
- logger.warning(
- 'Backing up %s to %s', display_path(dest), dest_dir,
- )
- shutil.move(dest, dest_dir)
- checkout = True
- elif response == 'a':
- sys.exit(-1)
- return checkout
-
- def unpack(self, location):
- """
- Clean up current location and download the url repository
- (and vcs infos) into location
- """
- if os.path.exists(location):
- rmtree(location)
- self.obtain(location)
-
- def get_src_requirement(self, dist, location):
- """
- Return a string representing the requirement needed to
- redownload the files currently present in location, something
- like:
- {repository_url}@{revision}#egg={project_name}-{version_identifier}
- """
- raise NotImplementedError
-
- def get_url(self, location):
- """
- Return the url used at location
- Used in get_info or check_destination
- """
- raise NotImplementedError
-
- def get_revision(self, location):
- """
- Return the current commit id of the files at the given location.
- """
- raise NotImplementedError
-
- def run_command(self, cmd, show_stdout=True, cwd=None,
- on_returncode='raise',
- command_desc=None,
- extra_environ=None, spinner=None):
- """
- Run a VCS subcommand
- This is simply a wrapper around call_subprocess that adds the VCS
- command name, and checks that the VCS is available
- """
- cmd = [self.name] + cmd
- try:
- return call_subprocess(cmd, show_stdout, cwd,
- on_returncode,
- command_desc, extra_environ,
- unset_environ=self.unset_environ,
- spinner=spinner)
- except OSError as e:
- # errno.ENOENT = no such file or directory
- # In other words, the VCS executable isn't available
- if e.errno == errno.ENOENT:
- raise BadCommand(
- 'Cannot find command %r - do you have '
- '%r installed and in your '
- 'PATH?' % (self.name, self.name))
- else:
- raise # re-raise exception if a different error occurred
-
- @classmethod
- def controls_location(cls, location):
- """
- Check if a location is controlled by the vcs.
- It is meant to be overridden to implement smarter detection
- mechanisms for specific vcs.
- """
- logger.debug('Checking in %s for %s (%s)...',
- location, cls.dirname, cls.name)
- path = os.path.join(location, cls.dirname)
- return os.path.exists(path)
-
-
-def get_src_requirement(dist, location):
- version_control = vcs.get_backend_from_location(location)
- if version_control:
- try:
- return version_control().get_src_requirement(dist,
- location)
- except BadCommand:
- logger.warning(
- 'cannot determine version of editable source in %s '
- '(%s command not found in path)',
- location,
- version_control.name,
- )
- return dist.as_requirement()
- logger.warning(
- 'cannot determine version of editable source in %s (is not SVN '
- 'checkout, Git clone, Mercurial clone or Bazaar branch)',
- location,
- )
- return dist.as_requirement()
+"""Handles all VCS (version control) support"""
+from __future__ import absolute_import
+
+import copy
+import errno
+import logging
+import os
+import shutil
+import sys
+
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+
+from pip._internal.exceptions import BadCommand
+from pip._internal.utils.misc import (
+ display_path, backup_dir, call_subprocess, rmtree, ask_path_exists,
+)
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Dict, Optional, Tuple
+ from pip._internal.basecommand import Command
+
+__all__ = ['vcs', 'get_src_requirement']
+
+
+logger = logging.getLogger(__name__)
+
+
+class RevOptions(object):
+
+ """
+ Encapsulates a VCS-specific revision to install, along with any VCS
+ install options.
+
+ Instances of this class should be treated as if immutable.
+ """
+
+ def __init__(self, vcs, rev=None, extra_args=None):
+ """
+ Args:
+ vcs: a VersionControl object.
+ rev: the name of the revision to install.
+ extra_args: a list of extra options.
+ """
+ if extra_args is None:
+ extra_args = []
+
+ self.extra_args = extra_args
+ self.rev = rev
+ self.vcs = vcs
+
+ def __repr__(self):
+ return ''.format(self.vcs.name, self.rev)
+
+ @property
+ def arg_rev(self):
+ if self.rev is None:
+ return self.vcs.default_arg_rev
+
+ return self.rev
+
+ def to_args(self):
+ """
+ Return the VCS-specific command arguments.
+ """
+ args = []
+ rev = self.arg_rev
+ if rev is not None:
+ args += self.vcs.get_base_rev_args(rev)
+ args += self.extra_args
+
+ return args
+
+ def to_display(self):
+ if not self.rev:
+ return ''
+
+ return ' (to revision {})'.format(self.rev)
+
+ def make_new(self, rev):
+ """
+ Make a copy of the current instance, but with a new rev.
+
+ Args:
+ rev: the name of the revision for the new object.
+ """
+ return self.vcs.make_rev_options(rev, extra_args=self.extra_args)
+
+
+class VcsSupport(object):
+ _registry = {} # type: Dict[str, Command]
+ schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn']
+
+ def __init__(self):
+ # Register more schemes with urlparse for various version control
+ # systems
+ urllib_parse.uses_netloc.extend(self.schemes)
+ # Python >= 2.7.4, 3.3 doesn't have uses_fragment
+ if getattr(urllib_parse, 'uses_fragment', None):
+ urllib_parse.uses_fragment.extend(self.schemes)
+ super(VcsSupport, self).__init__()
+
+ def __iter__(self):
+ return self._registry.__iter__()
+
+ @property
+ def backends(self):
+ return list(self._registry.values())
+
+ @property
+ def dirnames(self):
+ return [backend.dirname for backend in self.backends]
+
+ @property
+ def all_schemes(self):
+ schemes = []
+ for backend in self.backends:
+ schemes.extend(backend.schemes)
+ return schemes
+
+ def register(self, cls):
+ if not hasattr(cls, 'name'):
+ logger.warning('Cannot register VCS %s', cls.__name__)
+ return
+ if cls.name not in self._registry:
+ self._registry[cls.name] = cls
+ logger.debug('Registered VCS backend: %s', cls.name)
+
+ def unregister(self, cls=None, name=None):
+ if name in self._registry:
+ del self._registry[name]
+ elif cls in self._registry.values():
+ del self._registry[cls.name]
+ else:
+ logger.warning('Cannot unregister because no class or name given')
+
+ def get_backend_name(self, location):
+ """
+ Return the name of the version control backend if found at given
+ location, e.g. vcs.get_backend_name('/path/to/vcs/checkout')
+ """
+ for vc_type in self._registry.values():
+ if vc_type.controls_location(location):
+ logger.debug('Determine that %s uses VCS: %s',
+ location, vc_type.name)
+ return vc_type.name
+ return None
+
+ def get_backend(self, name):
+ name = name.lower()
+ if name in self._registry:
+ return self._registry[name]
+
+ def get_backend_from_location(self, location):
+ vc_type = self.get_backend_name(location)
+ if vc_type:
+ return self.get_backend(vc_type)
+ return None
+
+
+vcs = VcsSupport()
+
+
+class VersionControl(object):
+ name = ''
+ dirname = ''
+ # List of supported schemes for this Version Control
+ schemes = () # type: Tuple[str, ...]
+ # Iterable of environment variable names to pass to call_subprocess().
+ unset_environ = () # type: Tuple[str, ...]
+ default_arg_rev = None # type: Optional[str]
+
+ def __init__(self, url=None, *args, **kwargs):
+ self.url = url
+ super(VersionControl, self).__init__(*args, **kwargs)
+
+ def get_base_rev_args(self, rev):
+ """
+ Return the base revision arguments for a vcs command.
+
+ Args:
+ rev: the name of a revision to install. Cannot be None.
+ """
+ raise NotImplementedError
+
+ def make_rev_options(self, rev=None, extra_args=None):
+ """
+ Return a RevOptions object.
+
+ Args:
+ rev: the name of a revision to install.
+ extra_args: a list of extra options.
+ """
+ return RevOptions(self, rev, extra_args=extra_args)
+
+ def _is_local_repository(self, repo):
+ """
+ posix absolute paths start with os.path.sep,
+ win32 ones start with drive (like c:\\folder)
+ """
+ drive, tail = os.path.splitdrive(repo)
+ return repo.startswith(os.path.sep) or drive
+
+ # See issue #1083 for why this method was introduced:
+ # https://github.com/pypa/pip/issues/1083
+ def translate_egg_surname(self, surname):
+ # For example, Django has branches of the form "stable/1.7.x".
+ return surname.replace('/', '_')
+
+ def export(self, location):
+ """
+ Export the repository at the url to the destination location
+ i.e. only download the files, without vcs informations
+ """
+ raise NotImplementedError
+
+ def get_url_rev(self):
+ """
+ Returns the correct repository URL and revision by parsing the given
+ repository URL
+ """
+ error_message = (
+ "Sorry, '%s' is a malformed VCS url. "
+ "The format is +://, "
+ "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp"
+ )
+ assert '+' in self.url, error_message % self.url
+ url = self.url.split('+', 1)[1]
+ scheme, netloc, path, query, frag = urllib_parse.urlsplit(url)
+ rev = None
+ if '@' in path:
+ path, rev = path.rsplit('@', 1)
+ url = urllib_parse.urlunsplit((scheme, netloc, path, query, ''))
+ return url, rev
+
+ def get_info(self, location):
+ """
+ Returns (url, revision), where both are strings
+ """
+ assert not location.rstrip('/').endswith(self.dirname), \
+ 'Bad directory: %s' % location
+ return self.get_url(location), self.get_revision(location)
+
+ def normalize_url(self, url):
+ """
+ Normalize a URL for comparison by unquoting it and removing any
+ trailing slash.
+ """
+ return urllib_parse.unquote(url).rstrip('/')
+
+ def compare_urls(self, url1, url2):
+ """
+ Compare two repo URLs for identity, ignoring incidental differences.
+ """
+ return (self.normalize_url(url1) == self.normalize_url(url2))
+
+ def obtain(self, dest):
+ """
+ Called when installing or updating an editable package, takes the
+ source path of the checkout.
+ """
+ raise NotImplementedError
+
+ def switch(self, dest, url, rev_options):
+ """
+ Switch the repo at ``dest`` to point to ``URL``.
+
+ Args:
+ rev_options: a RevOptions object.
+ """
+ raise NotImplementedError
+
+ def update(self, dest, rev_options):
+ """
+ Update an already-existing repo to the given ``rev_options``.
+
+ Args:
+ rev_options: a RevOptions object.
+ """
+ raise NotImplementedError
+
+ def is_commit_id_equal(self, dest, name):
+ """
+ Return whether the id of the current commit equals the given name.
+
+ Args:
+ dest: the repository directory.
+ name: a string name.
+ """
+ raise NotImplementedError
+
+ def check_destination(self, dest, url, rev_options):
+ """
+ Prepare a location to receive a checkout/clone.
+
+ Return True if the location is ready for (and requires) a
+ checkout/clone, False otherwise.
+
+ Args:
+ rev_options: a RevOptions object.
+ """
+ checkout = True
+ prompt = False
+ rev_display = rev_options.to_display()
+ if os.path.exists(dest):
+ checkout = False
+ if os.path.exists(os.path.join(dest, self.dirname)):
+ existing_url = self.get_url(dest)
+ if self.compare_urls(existing_url, url):
+ logger.debug(
+ '%s in %s exists, and has correct URL (%s)',
+ self.repo_name.title(),
+ display_path(dest),
+ url,
+ )
+ if not self.is_commit_id_equal(dest, rev_options.rev):
+ logger.info(
+ 'Updating %s %s%s',
+ display_path(dest),
+ self.repo_name,
+ rev_display,
+ )
+ self.update(dest, rev_options)
+ else:
+ logger.info(
+ 'Skipping because already up-to-date.')
+ else:
+ logger.warning(
+ '%s %s in %s exists with URL %s',
+ self.name,
+ self.repo_name,
+ display_path(dest),
+ existing_url,
+ )
+ prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
+ ('s', 'i', 'w', 'b'))
+ else:
+ logger.warning(
+ 'Directory %s already exists, and is not a %s %s.',
+ dest,
+ self.name,
+ self.repo_name,
+ )
+ prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
+ if prompt:
+ logger.warning(
+ 'The plan is to install the %s repository %s',
+ self.name,
+ url,
+ )
+ response = ask_path_exists('What to do? %s' % prompt[0],
+ prompt[1])
+
+ if response == 's':
+ logger.info(
+ 'Switching %s %s to %s%s',
+ self.repo_name,
+ display_path(dest),
+ url,
+ rev_display,
+ )
+ self.switch(dest, url, rev_options)
+ elif response == 'i':
+ # do nothing
+ pass
+ elif response == 'w':
+ logger.warning('Deleting %s', display_path(dest))
+ rmtree(dest)
+ checkout = True
+ elif response == 'b':
+ dest_dir = backup_dir(dest)
+ logger.warning(
+ 'Backing up %s to %s', display_path(dest), dest_dir,
+ )
+ shutil.move(dest, dest_dir)
+ checkout = True
+ elif response == 'a':
+ sys.exit(-1)
+ return checkout
+
+ def unpack(self, location):
+ """
+ Clean up current location and download the url repository
+ (and vcs infos) into location
+ """
+ if os.path.exists(location):
+ rmtree(location)
+ self.obtain(location)
+
+ def get_src_requirement(self, dist, location):
+ """
+ Return a string representing the requirement needed to
+ redownload the files currently present in location, something
+ like:
+ {repository_url}@{revision}#egg={project_name}-{version_identifier}
+ """
+ raise NotImplementedError
+
+ def get_url(self, location):
+ """
+ Return the url used at location
+ Used in get_info or check_destination
+ """
+ raise NotImplementedError
+
+ def get_revision(self, location):
+ """
+ Return the current commit id of the files at the given location.
+ """
+ raise NotImplementedError
+
+ def run_command(self, cmd, show_stdout=True, cwd=None,
+ on_returncode='raise',
+ command_desc=None,
+ extra_environ=None, spinner=None):
+ """
+ Run a VCS subcommand
+ This is simply a wrapper around call_subprocess that adds the VCS
+ command name, and checks that the VCS is available
+ """
+ cmd = [self.name] + cmd
+ try:
+ return call_subprocess(cmd, show_stdout, cwd,
+ on_returncode,
+ command_desc, extra_environ,
+ unset_environ=self.unset_environ,
+ spinner=spinner)
+ except OSError as e:
+ # errno.ENOENT = no such file or directory
+ # In other words, the VCS executable isn't available
+ if e.errno == errno.ENOENT:
+ raise BadCommand(
+ 'Cannot find command %r - do you have '
+ '%r installed and in your '
+ 'PATH?' % (self.name, self.name))
+ else:
+ raise # re-raise exception if a different error occurred
+
+ @classmethod
+ def controls_location(cls, location):
+ """
+ Check if a location is controlled by the vcs.
+ It is meant to be overridden to implement smarter detection
+ mechanisms for specific vcs.
+ """
+ logger.debug('Checking in %s for %s (%s)...',
+ location, cls.dirname, cls.name)
+ path = os.path.join(location, cls.dirname)
+ return os.path.exists(path)
+
+
+def get_src_requirement(dist, location):
+ version_control = vcs.get_backend_from_location(location)
+ if version_control:
+ try:
+ return version_control().get_src_requirement(dist,
+ location)
+ except BadCommand:
+ logger.warning(
+ 'cannot determine version of editable source in %s '
+ '(%s command not found in path)',
+ location,
+ version_control.name,
+ )
+ return dist.as_requirement()
+ logger.warning(
+ 'cannot determine version of editable source in %s (is not SVN '
+ 'checkout, Git clone, Mercurial clone or Bazaar branch)',
+ location,
+ )
+ return dist.as_requirement()
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/vcs/bazaar.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/vcs/bazaar.py
index b4e46e0..6ed629a 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/vcs/bazaar.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/vcs/bazaar.py
@@ -1,113 +1,113 @@
-from __future__ import absolute_import
-
-import logging
-import os
-
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-
-from pip._internal.download import path_to_url
-from pip._internal.utils.misc import display_path, rmtree
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.vcs import VersionControl, vcs
-
-logger = logging.getLogger(__name__)
-
-
-class Bazaar(VersionControl):
- name = 'bzr'
- dirname = '.bzr'
- repo_name = 'branch'
- schemes = (
- 'bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp',
- 'bzr+lp',
- )
-
- def __init__(self, url=None, *args, **kwargs):
- super(Bazaar, self).__init__(url, *args, **kwargs)
- # This is only needed for python <2.7.5
- # Register lp but do not expose as a scheme to support bzr+lp.
- if getattr(urllib_parse, 'uses_fragment', None):
- urllib_parse.uses_fragment.extend(['lp'])
-
- def get_base_rev_args(self, rev):
- return ['-r', rev]
-
- def export(self, location):
- """
- Export the Bazaar repository at the url to the destination location
- """
- # Remove the location to make sure Bazaar can export it correctly
- if os.path.exists(location):
- rmtree(location)
-
- with TempDirectory(kind="export") as temp_dir:
- self.unpack(temp_dir.path)
-
- self.run_command(
- ['export', location],
- cwd=temp_dir.path, show_stdout=False,
- )
-
- def switch(self, dest, url, rev_options):
- self.run_command(['switch', url], cwd=dest)
-
- def update(self, dest, rev_options):
- cmd_args = ['pull', '-q'] + rev_options.to_args()
- self.run_command(cmd_args, cwd=dest)
-
- def obtain(self, dest):
- url, rev = self.get_url_rev()
- rev_options = self.make_rev_options(rev)
- if self.check_destination(dest, url, rev_options):
- rev_display = rev_options.to_display()
- logger.info(
- 'Checking out %s%s to %s',
- url,
- rev_display,
- display_path(dest),
- )
- cmd_args = ['branch', '-q'] + rev_options.to_args() + [url, dest]
- self.run_command(cmd_args)
-
- def get_url_rev(self):
- # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
- url, rev = super(Bazaar, self).get_url_rev()
- if url.startswith('ssh://'):
- url = 'bzr+' + url
- return url, rev
-
- def get_url(self, location):
- urls = self.run_command(['info'], show_stdout=False, cwd=location)
- for line in urls.splitlines():
- line = line.strip()
- for x in ('checkout of branch: ',
- 'parent branch: '):
- if line.startswith(x):
- repo = line.split(x)[1]
- if self._is_local_repository(repo):
- return path_to_url(repo)
- return repo
- return None
-
- def get_revision(self, location):
- revision = self.run_command(
- ['revno'], show_stdout=False, cwd=location,
- )
- return revision.splitlines()[-1]
-
- def get_src_requirement(self, dist, location):
- repo = self.get_url(location)
- if not repo:
- return None
- if not repo.lower().startswith('bzr:'):
- repo = 'bzr+' + repo
- egg_project_name = dist.egg_name().split('-', 1)[0]
- current_rev = self.get_revision(location)
- return '%s@%s#egg=%s' % (repo, current_rev, egg_project_name)
-
- def is_commit_id_equal(self, dest, name):
- """Always assume the versions don't match"""
- return False
-
-
-vcs.register(Bazaar)
+from __future__ import absolute_import
+
+import logging
+import os
+
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+
+from pip._internal.download import path_to_url
+from pip._internal.utils.misc import display_path, rmtree
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.vcs import VersionControl, vcs
+
+logger = logging.getLogger(__name__)
+
+
+class Bazaar(VersionControl):
+ name = 'bzr'
+ dirname = '.bzr'
+ repo_name = 'branch'
+ schemes = (
+ 'bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp',
+ 'bzr+lp',
+ )
+
+ def __init__(self, url=None, *args, **kwargs):
+ super(Bazaar, self).__init__(url, *args, **kwargs)
+ # This is only needed for python <2.7.5
+ # Register lp but do not expose as a scheme to support bzr+lp.
+ if getattr(urllib_parse, 'uses_fragment', None):
+ urllib_parse.uses_fragment.extend(['lp'])
+
+ def get_base_rev_args(self, rev):
+ return ['-r', rev]
+
+ def export(self, location):
+ """
+ Export the Bazaar repository at the url to the destination location
+ """
+ # Remove the location to make sure Bazaar can export it correctly
+ if os.path.exists(location):
+ rmtree(location)
+
+ with TempDirectory(kind="export") as temp_dir:
+ self.unpack(temp_dir.path)
+
+ self.run_command(
+ ['export', location],
+ cwd=temp_dir.path, show_stdout=False,
+ )
+
+ def switch(self, dest, url, rev_options):
+ self.run_command(['switch', url], cwd=dest)
+
+ def update(self, dest, rev_options):
+ cmd_args = ['pull', '-q'] + rev_options.to_args()
+ self.run_command(cmd_args, cwd=dest)
+
+ def obtain(self, dest):
+ url, rev = self.get_url_rev()
+ rev_options = self.make_rev_options(rev)
+ if self.check_destination(dest, url, rev_options):
+ rev_display = rev_options.to_display()
+ logger.info(
+ 'Checking out %s%s to %s',
+ url,
+ rev_display,
+ display_path(dest),
+ )
+ cmd_args = ['branch', '-q'] + rev_options.to_args() + [url, dest]
+ self.run_command(cmd_args)
+
+ def get_url_rev(self):
+ # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
+ url, rev = super(Bazaar, self).get_url_rev()
+ if url.startswith('ssh://'):
+ url = 'bzr+' + url
+ return url, rev
+
+ def get_url(self, location):
+ urls = self.run_command(['info'], show_stdout=False, cwd=location)
+ for line in urls.splitlines():
+ line = line.strip()
+ for x in ('checkout of branch: ',
+ 'parent branch: '):
+ if line.startswith(x):
+ repo = line.split(x)[1]
+ if self._is_local_repository(repo):
+ return path_to_url(repo)
+ return repo
+ return None
+
+ def get_revision(self, location):
+ revision = self.run_command(
+ ['revno'], show_stdout=False, cwd=location,
+ )
+ return revision.splitlines()[-1]
+
+ def get_src_requirement(self, dist, location):
+ repo = self.get_url(location)
+ if not repo:
+ return None
+ if not repo.lower().startswith('bzr:'):
+ repo = 'bzr+' + repo
+ egg_project_name = dist.egg_name().split('-', 1)[0]
+ current_rev = self.get_revision(location)
+ return '%s@%s#egg=%s' % (repo, current_rev, egg_project_name)
+
+ def is_commit_id_equal(self, dest, name):
+ """Always assume the versions don't match"""
+ return False
+
+
+vcs.register(Bazaar)
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/vcs/git.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/vcs/git.py
index 33c6806..7a63dfa 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/vcs/git.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/vcs/git.py
@@ -1,311 +1,311 @@
-from __future__ import absolute_import
-
-import logging
-import os.path
-import re
-
-from pip._vendor.packaging.version import parse as parse_version
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-from pip._vendor.six.moves.urllib import request as urllib_request
-
-from pip._internal.compat import samefile
-from pip._internal.exceptions import BadCommand
-from pip._internal.utils.misc import display_path
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.vcs import VersionControl, vcs
-
-urlsplit = urllib_parse.urlsplit
-urlunsplit = urllib_parse.urlunsplit
-
-
-logger = logging.getLogger(__name__)
-
-
-HASH_REGEX = re.compile('[a-fA-F0-9]{40}')
-
-
-def looks_like_hash(sha):
- return bool(HASH_REGEX.match(sha))
-
-
-class Git(VersionControl):
- name = 'git'
- dirname = '.git'
- repo_name = 'clone'
- schemes = (
- 'git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file',
- )
- # Prevent the user's environment variables from interfering with pip:
- # https://github.com/pypa/pip/issues/1130
- unset_environ = ('GIT_DIR', 'GIT_WORK_TREE')
- default_arg_rev = 'HEAD'
-
- def __init__(self, url=None, *args, **kwargs):
-
- # Works around an apparent Git bug
- # (see http://article.gmane.org/gmane.comp.version-control.git/146500)
- if url:
- scheme, netloc, path, query, fragment = urlsplit(url)
- if scheme.endswith('file'):
- initial_slashes = path[:-len(path.lstrip('/'))]
- newpath = (
- initial_slashes +
- urllib_request.url2pathname(path)
- .replace('\\', '/').lstrip('/')
- )
- url = urlunsplit((scheme, netloc, newpath, query, fragment))
- after_plus = scheme.find('+') + 1
- url = scheme[:after_plus] + urlunsplit(
- (scheme[after_plus:], netloc, newpath, query, fragment),
- )
-
- super(Git, self).__init__(url, *args, **kwargs)
-
- def get_base_rev_args(self, rev):
- return [rev]
-
- def get_git_version(self):
- VERSION_PFX = 'git version '
- version = self.run_command(['version'], show_stdout=False)
- if version.startswith(VERSION_PFX):
- version = version[len(VERSION_PFX):].split()[0]
- else:
- version = ''
- # get first 3 positions of the git version becasue
- # on windows it is x.y.z.windows.t, and this parses as
- # LegacyVersion which always smaller than a Version.
- version = '.'.join(version.split('.')[:3])
- return parse_version(version)
-
- def export(self, location):
- """Export the Git repository at the url to the destination location"""
- if not location.endswith('/'):
- location = location + '/'
-
- with TempDirectory(kind="export") as temp_dir:
- self.unpack(temp_dir.path)
- self.run_command(
- ['checkout-index', '-a', '-f', '--prefix', location],
- show_stdout=False, cwd=temp_dir.path
- )
-
- def get_revision_sha(self, dest, rev):
- """
- Return a commit hash for the given revision if it names a remote
- branch or tag. Otherwise, return None.
-
- Args:
- dest: the repository directory.
- rev: the revision name.
- """
- # Pass rev to pre-filter the list.
- output = self.run_command(['show-ref', rev], cwd=dest,
- show_stdout=False, on_returncode='ignore')
- refs = {}
- for line in output.strip().splitlines():
- try:
- sha, ref = line.split()
- except ValueError:
- # Include the offending line to simplify troubleshooting if
- # this error ever occurs.
- raise ValueError('unexpected show-ref line: {!r}'.format(line))
-
- refs[ref] = sha
-
- branch_ref = 'refs/remotes/origin/{}'.format(rev)
- tag_ref = 'refs/tags/{}'.format(rev)
-
- return refs.get(branch_ref) or refs.get(tag_ref)
-
- def check_rev_options(self, dest, rev_options):
- """Check the revision options before checkout.
-
- Returns a new RevOptions object for the SHA1 of the branch or tag
- if found.
-
- Args:
- rev_options: a RevOptions object.
- """
- rev = rev_options.arg_rev
- sha = self.get_revision_sha(dest, rev)
-
- if sha is not None:
- return rev_options.make_new(sha)
-
- # Do not show a warning for the common case of something that has
- # the form of a Git commit hash.
- if not looks_like_hash(rev):
- logger.warning(
- "Did not find branch or tag '%s', assuming revision or ref.",
- rev,
- )
- return rev_options
-
- def is_commit_id_equal(self, dest, name):
- """
- Return whether the current commit hash equals the given name.
-
- Args:
- dest: the repository directory.
- name: a string name.
- """
- if not name:
- # Then avoid an unnecessary subprocess call.
- return False
-
- return self.get_revision(dest) == name
-
- def switch(self, dest, url, rev_options):
- self.run_command(['config', 'remote.origin.url', url], cwd=dest)
- cmd_args = ['checkout', '-q'] + rev_options.to_args()
- self.run_command(cmd_args, cwd=dest)
-
- self.update_submodules(dest)
-
- def update(self, dest, rev_options):
- # First fetch changes from the default remote
- if self.get_git_version() >= parse_version('1.9.0'):
- # fetch tags in addition to everything else
- self.run_command(['fetch', '-q', '--tags'], cwd=dest)
- else:
- self.run_command(['fetch', '-q'], cwd=dest)
- # Then reset to wanted revision (maybe even origin/master)
- rev_options = self.check_rev_options(dest, rev_options)
- cmd_args = ['reset', '--hard', '-q'] + rev_options.to_args()
- self.run_command(cmd_args, cwd=dest)
- #: update submodules
- self.update_submodules(dest)
-
- def obtain(self, dest):
- url, rev = self.get_url_rev()
- rev_options = self.make_rev_options(rev)
- if self.check_destination(dest, url, rev_options):
- rev_display = rev_options.to_display()
- logger.info(
- 'Cloning %s%s to %s', url, rev_display, display_path(dest),
- )
- self.run_command(['clone', '-q', url, dest])
-
- if rev:
- rev_options = self.check_rev_options(dest, rev_options)
- # Only do a checkout if the current commit id doesn't match
- # the requested revision.
- if not self.is_commit_id_equal(dest, rev_options.rev):
- rev = rev_options.rev
- # Only fetch the revision if it's a ref
- if rev.startswith('refs/'):
- self.run_command(
- ['fetch', '-q', url] + rev_options.to_args(),
- cwd=dest,
- )
- # Change the revision to the SHA of the ref we fetched
- rev = 'FETCH_HEAD'
- self.run_command(['checkout', '-q', rev], cwd=dest)
-
- #: repo may contain submodules
- self.update_submodules(dest)
-
- def get_url(self, location):
- """Return URL of the first remote encountered."""
- remotes = self.run_command(
- ['config', '--get-regexp', r'remote\..*\.url'],
- show_stdout=False, cwd=location,
- )
- remotes = remotes.splitlines()
- found_remote = remotes[0]
- for remote in remotes:
- if remote.startswith('remote.origin.url '):
- found_remote = remote
- break
- url = found_remote.split(' ')[1]
- return url.strip()
-
- def get_revision(self, location):
- current_rev = self.run_command(
- ['rev-parse', 'HEAD'], show_stdout=False, cwd=location,
- )
- return current_rev.strip()
-
- def _get_subdirectory(self, location):
- """Return the relative path of setup.py to the git repo root."""
- # find the repo root
- git_dir = self.run_command(['rev-parse', '--git-dir'],
- show_stdout=False, cwd=location).strip()
- if not os.path.isabs(git_dir):
- git_dir = os.path.join(location, git_dir)
- root_dir = os.path.join(git_dir, '..')
- # find setup.py
- orig_location = location
- while not os.path.exists(os.path.join(location, 'setup.py')):
- last_location = location
- location = os.path.dirname(location)
- if location == last_location:
- # We've traversed up to the root of the filesystem without
- # finding setup.py
- logger.warning(
- "Could not find setup.py for directory %s (tried all "
- "parent directories)",
- orig_location,
- )
- return None
- # relative path of setup.py to repo root
- if samefile(root_dir, location):
- return None
- return os.path.relpath(location, root_dir)
-
- def get_src_requirement(self, dist, location):
- repo = self.get_url(location)
- if not repo.lower().startswith('git:'):
- repo = 'git+' + repo
- egg_project_name = dist.egg_name().split('-', 1)[0]
- if not repo:
- return None
- current_rev = self.get_revision(location)
- req = '%s@%s#egg=%s' % (repo, current_rev, egg_project_name)
- subdirectory = self._get_subdirectory(location)
- if subdirectory:
- req += '&subdirectory=' + subdirectory
- return req
-
- def get_url_rev(self):
- """
- Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
- That's required because although they use SSH they sometimes doesn't
- work with a ssh:// scheme (e.g. Github). But we need a scheme for
- parsing. Hence we remove it again afterwards and return it as a stub.
- """
- if '://' not in self.url:
- assert 'file:' not in self.url
- self.url = self.url.replace('git+', 'git+ssh://')
- url, rev = super(Git, self).get_url_rev()
- url = url.replace('ssh://', '')
- else:
- url, rev = super(Git, self).get_url_rev()
-
- return url, rev
-
- def update_submodules(self, location):
- if not os.path.exists(os.path.join(location, '.gitmodules')):
- return
- self.run_command(
- ['submodule', 'update', '--init', '--recursive', '-q'],
- cwd=location,
- )
-
- @classmethod
- def controls_location(cls, location):
- if super(Git, cls).controls_location(location):
- return True
- try:
- r = cls().run_command(['rev-parse'],
- cwd=location,
- show_stdout=False,
- on_returncode='ignore')
- return not r
- except BadCommand:
- logger.debug("could not determine if %s is under git control "
- "because git is not available", location)
- return False
-
-
-vcs.register(Git)
+from __future__ import absolute_import
+
+import logging
+import os.path
+import re
+
+from pip._vendor.packaging.version import parse as parse_version
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+from pip._vendor.six.moves.urllib import request as urllib_request
+
+from pip._internal.compat import samefile
+from pip._internal.exceptions import BadCommand
+from pip._internal.utils.misc import display_path
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.vcs import VersionControl, vcs
+
+urlsplit = urllib_parse.urlsplit
+urlunsplit = urllib_parse.urlunsplit
+
+
+logger = logging.getLogger(__name__)
+
+
+HASH_REGEX = re.compile('[a-fA-F0-9]{40}')
+
+
+def looks_like_hash(sha):
+ return bool(HASH_REGEX.match(sha))
+
+
+class Git(VersionControl):
+ name = 'git'
+ dirname = '.git'
+ repo_name = 'clone'
+ schemes = (
+ 'git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file',
+ )
+ # Prevent the user's environment variables from interfering with pip:
+ # https://github.com/pypa/pip/issues/1130
+ unset_environ = ('GIT_DIR', 'GIT_WORK_TREE')
+ default_arg_rev = 'HEAD'
+
+ def __init__(self, url=None, *args, **kwargs):
+
+ # Works around an apparent Git bug
+ # (see http://article.gmane.org/gmane.comp.version-control.git/146500)
+ if url:
+ scheme, netloc, path, query, fragment = urlsplit(url)
+ if scheme.endswith('file'):
+ initial_slashes = path[:-len(path.lstrip('/'))]
+ newpath = (
+ initial_slashes +
+ urllib_request.url2pathname(path)
+ .replace('\\', '/').lstrip('/')
+ )
+ url = urlunsplit((scheme, netloc, newpath, query, fragment))
+ after_plus = scheme.find('+') + 1
+ url = scheme[:after_plus] + urlunsplit(
+ (scheme[after_plus:], netloc, newpath, query, fragment),
+ )
+
+ super(Git, self).__init__(url, *args, **kwargs)
+
+ def get_base_rev_args(self, rev):
+ return [rev]
+
+ def get_git_version(self):
+ VERSION_PFX = 'git version '
+ version = self.run_command(['version'], show_stdout=False)
+ if version.startswith(VERSION_PFX):
+ version = version[len(VERSION_PFX):].split()[0]
+ else:
+ version = ''
+ # get first 3 positions of the git version becasue
+ # on windows it is x.y.z.windows.t, and this parses as
+ # LegacyVersion which always smaller than a Version.
+ version = '.'.join(version.split('.')[:3])
+ return parse_version(version)
+
+ def export(self, location):
+ """Export the Git repository at the url to the destination location"""
+ if not location.endswith('/'):
+ location = location + '/'
+
+ with TempDirectory(kind="export") as temp_dir:
+ self.unpack(temp_dir.path)
+ self.run_command(
+ ['checkout-index', '-a', '-f', '--prefix', location],
+ show_stdout=False, cwd=temp_dir.path
+ )
+
+ def get_revision_sha(self, dest, rev):
+ """
+ Return a commit hash for the given revision if it names a remote
+ branch or tag. Otherwise, return None.
+
+ Args:
+ dest: the repository directory.
+ rev: the revision name.
+ """
+ # Pass rev to pre-filter the list.
+ output = self.run_command(['show-ref', rev], cwd=dest,
+ show_stdout=False, on_returncode='ignore')
+ refs = {}
+ for line in output.strip().splitlines():
+ try:
+ sha, ref = line.split()
+ except ValueError:
+ # Include the offending line to simplify troubleshooting if
+ # this error ever occurs.
+ raise ValueError('unexpected show-ref line: {!r}'.format(line))
+
+ refs[ref] = sha
+
+ branch_ref = 'refs/remotes/origin/{}'.format(rev)
+ tag_ref = 'refs/tags/{}'.format(rev)
+
+ return refs.get(branch_ref) or refs.get(tag_ref)
+
+ def check_rev_options(self, dest, rev_options):
+ """Check the revision options before checkout.
+
+ Returns a new RevOptions object for the SHA1 of the branch or tag
+ if found.
+
+ Args:
+ rev_options: a RevOptions object.
+ """
+ rev = rev_options.arg_rev
+ sha = self.get_revision_sha(dest, rev)
+
+ if sha is not None:
+ return rev_options.make_new(sha)
+
+ # Do not show a warning for the common case of something that has
+ # the form of a Git commit hash.
+ if not looks_like_hash(rev):
+ logger.warning(
+ "Did not find branch or tag '%s', assuming revision or ref.",
+ rev,
+ )
+ return rev_options
+
+ def is_commit_id_equal(self, dest, name):
+ """
+ Return whether the current commit hash equals the given name.
+
+ Args:
+ dest: the repository directory.
+ name: a string name.
+ """
+ if not name:
+ # Then avoid an unnecessary subprocess call.
+ return False
+
+ return self.get_revision(dest) == name
+
+ def switch(self, dest, url, rev_options):
+ self.run_command(['config', 'remote.origin.url', url], cwd=dest)
+ cmd_args = ['checkout', '-q'] + rev_options.to_args()
+ self.run_command(cmd_args, cwd=dest)
+
+ self.update_submodules(dest)
+
+ def update(self, dest, rev_options):
+ # First fetch changes from the default remote
+ if self.get_git_version() >= parse_version('1.9.0'):
+ # fetch tags in addition to everything else
+ self.run_command(['fetch', '-q', '--tags'], cwd=dest)
+ else:
+ self.run_command(['fetch', '-q'], cwd=dest)
+ # Then reset to wanted revision (maybe even origin/master)
+ rev_options = self.check_rev_options(dest, rev_options)
+ cmd_args = ['reset', '--hard', '-q'] + rev_options.to_args()
+ self.run_command(cmd_args, cwd=dest)
+ #: update submodules
+ self.update_submodules(dest)
+
+ def obtain(self, dest):
+ url, rev = self.get_url_rev()
+ rev_options = self.make_rev_options(rev)
+ if self.check_destination(dest, url, rev_options):
+ rev_display = rev_options.to_display()
+ logger.info(
+ 'Cloning %s%s to %s', url, rev_display, display_path(dest),
+ )
+ self.run_command(['clone', '-q', url, dest])
+
+ if rev:
+ rev_options = self.check_rev_options(dest, rev_options)
+ # Only do a checkout if the current commit id doesn't match
+ # the requested revision.
+ if not self.is_commit_id_equal(dest, rev_options.rev):
+ rev = rev_options.rev
+ # Only fetch the revision if it's a ref
+ if rev.startswith('refs/'):
+ self.run_command(
+ ['fetch', '-q', url] + rev_options.to_args(),
+ cwd=dest,
+ )
+ # Change the revision to the SHA of the ref we fetched
+ rev = 'FETCH_HEAD'
+ self.run_command(['checkout', '-q', rev], cwd=dest)
+
+ #: repo may contain submodules
+ self.update_submodules(dest)
+
+ def get_url(self, location):
+ """Return URL of the first remote encountered."""
+ remotes = self.run_command(
+ ['config', '--get-regexp', r'remote\..*\.url'],
+ show_stdout=False, cwd=location,
+ )
+ remotes = remotes.splitlines()
+ found_remote = remotes[0]
+ for remote in remotes:
+ if remote.startswith('remote.origin.url '):
+ found_remote = remote
+ break
+ url = found_remote.split(' ')[1]
+ return url.strip()
+
+ def get_revision(self, location):
+ current_rev = self.run_command(
+ ['rev-parse', 'HEAD'], show_stdout=False, cwd=location,
+ )
+ return current_rev.strip()
+
+ def _get_subdirectory(self, location):
+ """Return the relative path of setup.py to the git repo root."""
+ # find the repo root
+ git_dir = self.run_command(['rev-parse', '--git-dir'],
+ show_stdout=False, cwd=location).strip()
+ if not os.path.isabs(git_dir):
+ git_dir = os.path.join(location, git_dir)
+ root_dir = os.path.join(git_dir, '..')
+ # find setup.py
+ orig_location = location
+ while not os.path.exists(os.path.join(location, 'setup.py')):
+ last_location = location
+ location = os.path.dirname(location)
+ if location == last_location:
+ # We've traversed up to the root of the filesystem without
+ # finding setup.py
+ logger.warning(
+ "Could not find setup.py for directory %s (tried all "
+ "parent directories)",
+ orig_location,
+ )
+ return None
+ # relative path of setup.py to repo root
+ if samefile(root_dir, location):
+ return None
+ return os.path.relpath(location, root_dir)
+
+ def get_src_requirement(self, dist, location):
+ repo = self.get_url(location)
+ if not repo.lower().startswith('git:'):
+ repo = 'git+' + repo
+ egg_project_name = dist.egg_name().split('-', 1)[0]
+ if not repo:
+ return None
+ current_rev = self.get_revision(location)
+ req = '%s@%s#egg=%s' % (repo, current_rev, egg_project_name)
+ subdirectory = self._get_subdirectory(location)
+ if subdirectory:
+ req += '&subdirectory=' + subdirectory
+ return req
+
+ def get_url_rev(self):
+ """
+ Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
+ That's required because although they use SSH they sometimes doesn't
+ work with a ssh:// scheme (e.g. Github). But we need a scheme for
+ parsing. Hence we remove it again afterwards and return it as a stub.
+ """
+ if '://' not in self.url:
+ assert 'file:' not in self.url
+ self.url = self.url.replace('git+', 'git+ssh://')
+ url, rev = super(Git, self).get_url_rev()
+ url = url.replace('ssh://', '')
+ else:
+ url, rev = super(Git, self).get_url_rev()
+
+ return url, rev
+
+ def update_submodules(self, location):
+ if not os.path.exists(os.path.join(location, '.gitmodules')):
+ return
+ self.run_command(
+ ['submodule', 'update', '--init', '--recursive', '-q'],
+ cwd=location,
+ )
+
+ @classmethod
+ def controls_location(cls, location):
+ if super(Git, cls).controls_location(location):
+ return True
+ try:
+ r = cls().run_command(['rev-parse'],
+ cwd=location,
+ show_stdout=False,
+ on_returncode='ignore')
+ return not r
+ except BadCommand:
+ logger.debug("could not determine if %s is under git control "
+ "because git is not available", location)
+ return False
+
+
+vcs.register(Git)
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/vcs/mercurial.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/vcs/mercurial.py
index 52a1cce..3936473 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/vcs/mercurial.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/vcs/mercurial.py
@@ -1,105 +1,105 @@
-from __future__ import absolute_import
-
-import logging
-import os
-
-from pip._vendor.six.moves import configparser
-
-from pip._internal.download import path_to_url
-from pip._internal.utils.misc import display_path
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.vcs import VersionControl, vcs
-
-logger = logging.getLogger(__name__)
-
-
-class Mercurial(VersionControl):
- name = 'hg'
- dirname = '.hg'
- repo_name = 'clone'
- schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http')
-
- def get_base_rev_args(self, rev):
- return [rev]
-
- def export(self, location):
- """Export the Hg repository at the url to the destination location"""
- with TempDirectory(kind="export") as temp_dir:
- self.unpack(temp_dir.path)
-
- self.run_command(
- ['archive', location], show_stdout=False, cwd=temp_dir.path
- )
-
- def switch(self, dest, url, rev_options):
- repo_config = os.path.join(dest, self.dirname, 'hgrc')
- config = configparser.SafeConfigParser()
- try:
- config.read(repo_config)
- config.set('paths', 'default', url)
- with open(repo_config, 'w') as config_file:
- config.write(config_file)
- except (OSError, configparser.NoSectionError) as exc:
- logger.warning(
- 'Could not switch Mercurial repository to %s: %s', url, exc,
- )
- else:
- cmd_args = ['update', '-q'] + rev_options.to_args()
- self.run_command(cmd_args, cwd=dest)
-
- def update(self, dest, rev_options):
- self.run_command(['pull', '-q'], cwd=dest)
- cmd_args = ['update', '-q'] + rev_options.to_args()
- self.run_command(cmd_args, cwd=dest)
-
- def obtain(self, dest):
- url, rev = self.get_url_rev()
- rev_options = self.make_rev_options(rev)
- if self.check_destination(dest, url, rev_options):
- rev_display = rev_options.to_display()
- logger.info(
- 'Cloning hg %s%s to %s',
- url,
- rev_display,
- display_path(dest),
- )
- self.run_command(['clone', '--noupdate', '-q', url, dest])
- cmd_args = ['update', '-q'] + rev_options.to_args()
- self.run_command(cmd_args, cwd=dest)
-
- def get_url(self, location):
- url = self.run_command(
- ['showconfig', 'paths.default'],
- show_stdout=False, cwd=location).strip()
- if self._is_local_repository(url):
- url = path_to_url(url)
- return url.strip()
-
- def get_revision(self, location):
- current_revision = self.run_command(
- ['parents', '--template={rev}'],
- show_stdout=False, cwd=location).strip()
- return current_revision
-
- def get_revision_hash(self, location):
- current_rev_hash = self.run_command(
- ['parents', '--template={node}'],
- show_stdout=False, cwd=location).strip()
- return current_rev_hash
-
- def get_src_requirement(self, dist, location):
- repo = self.get_url(location)
- if not repo.lower().startswith('hg:'):
- repo = 'hg+' + repo
- egg_project_name = dist.egg_name().split('-', 1)[0]
- if not repo:
- return None
- current_rev_hash = self.get_revision_hash(location)
- return '%s@%s#egg=%s' % (repo, current_rev_hash, egg_project_name)
-
- def is_commit_id_equal(self, dest, name):
- """Always assume the versions don't match"""
- return False
-
-
-vcs.register(Mercurial)
+from __future__ import absolute_import
+
+import logging
+import os
+
+from pip._vendor.six.moves import configparser
+
+from pip._internal.download import path_to_url
+from pip._internal.utils.misc import display_path
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.vcs import VersionControl, vcs
+
+logger = logging.getLogger(__name__)
+
+
+class Mercurial(VersionControl):
+ name = 'hg'
+ dirname = '.hg'
+ repo_name = 'clone'
+ schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http')
+
+ def get_base_rev_args(self, rev):
+ return [rev]
+
+ def export(self, location):
+ """Export the Hg repository at the url to the destination location"""
+ with TempDirectory(kind="export") as temp_dir:
+ self.unpack(temp_dir.path)
+
+ self.run_command(
+ ['archive', location], show_stdout=False, cwd=temp_dir.path
+ )
+
+ def switch(self, dest, url, rev_options):
+ repo_config = os.path.join(dest, self.dirname, 'hgrc')
+ config = configparser.SafeConfigParser()
+ try:
+ config.read(repo_config)
+ config.set('paths', 'default', url)
+ with open(repo_config, 'w') as config_file:
+ config.write(config_file)
+ except (OSError, configparser.NoSectionError) as exc:
+ logger.warning(
+ 'Could not switch Mercurial repository to %s: %s', url, exc,
+ )
+ else:
+ cmd_args = ['update', '-q'] + rev_options.to_args()
+ self.run_command(cmd_args, cwd=dest)
+
+ def update(self, dest, rev_options):
+ self.run_command(['pull', '-q'], cwd=dest)
+ cmd_args = ['update', '-q'] + rev_options.to_args()
+ self.run_command(cmd_args, cwd=dest)
+
+ def obtain(self, dest):
+ url, rev = self.get_url_rev()
+ rev_options = self.make_rev_options(rev)
+ if self.check_destination(dest, url, rev_options):
+ rev_display = rev_options.to_display()
+ logger.info(
+ 'Cloning hg %s%s to %s',
+ url,
+ rev_display,
+ display_path(dest),
+ )
+ self.run_command(['clone', '--noupdate', '-q', url, dest])
+ cmd_args = ['update', '-q'] + rev_options.to_args()
+ self.run_command(cmd_args, cwd=dest)
+
+ def get_url(self, location):
+ url = self.run_command(
+ ['showconfig', 'paths.default'],
+ show_stdout=False, cwd=location).strip()
+ if self._is_local_repository(url):
+ url = path_to_url(url)
+ return url.strip()
+
+ def get_revision(self, location):
+ current_revision = self.run_command(
+ ['parents', '--template={rev}'],
+ show_stdout=False, cwd=location).strip()
+ return current_revision
+
+ def get_revision_hash(self, location):
+ current_rev_hash = self.run_command(
+ ['parents', '--template={node}'],
+ show_stdout=False, cwd=location).strip()
+ return current_rev_hash
+
+ def get_src_requirement(self, dist, location):
+ repo = self.get_url(location)
+ if not repo.lower().startswith('hg:'):
+ repo = 'hg+' + repo
+ egg_project_name = dist.egg_name().split('-', 1)[0]
+ if not repo:
+ return None
+ current_rev_hash = self.get_revision_hash(location)
+ return '%s@%s#egg=%s' % (repo, current_rev_hash, egg_project_name)
+
+ def is_commit_id_equal(self, dest, name):
+ """Always assume the versions don't match"""
+ return False
+
+
+vcs.register(Mercurial)
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/vcs/subversion.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/vcs/subversion.py
index 7f369ef..95e5440 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/vcs/subversion.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/vcs/subversion.py
@@ -1,271 +1,271 @@
-from __future__ import absolute_import
-
-import logging
-import os
-import re
-
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-
-from pip._internal.index import Link
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import display_path, rmtree
-from pip._internal.vcs import VersionControl, vcs
-
-_svn_xml_url_re = re.compile('url="([^"]+)"')
-_svn_rev_re = re.compile(r'committed-rev="(\d+)"')
-_svn_url_re = re.compile(r'URL: (.+)')
-_svn_revision_re = re.compile(r'Revision: (.+)')
-_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
-_svn_info_xml_url_re = re.compile(r'(.*)')
-
-
-logger = logging.getLogger(__name__)
-
-
-class Subversion(VersionControl):
- name = 'svn'
- dirname = '.svn'
- repo_name = 'checkout'
- schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn')
-
- def get_base_rev_args(self, rev):
- return ['-r', rev]
-
- def get_info(self, location):
- """Returns (url, revision), where both are strings"""
- assert not location.rstrip('/').endswith(self.dirname), \
- 'Bad directory: %s' % location
- output = self.run_command(
- ['info', location],
- show_stdout=False,
- extra_environ={'LANG': 'C'},
- )
- match = _svn_url_re.search(output)
- if not match:
- logger.warning(
- 'Cannot determine URL of svn checkout %s',
- display_path(location),
- )
- logger.debug('Output that cannot be parsed: \n%s', output)
- return None, None
- url = match.group(1).strip()
- match = _svn_revision_re.search(output)
- if not match:
- logger.warning(
- 'Cannot determine revision of svn checkout %s',
- display_path(location),
- )
- logger.debug('Output that cannot be parsed: \n%s', output)
- return url, None
- return url, match.group(1)
-
- def export(self, location):
- """Export the svn repository at the url to the destination location"""
- url, rev = self.get_url_rev()
- rev_options = get_rev_options(self, url, rev)
- url = self.remove_auth_from_url(url)
- logger.info('Exporting svn repository %s to %s', url, location)
- with indent_log():
- if os.path.exists(location):
- # Subversion doesn't like to check out over an existing
- # directory --force fixes this, but was only added in svn 1.5
- rmtree(location)
- cmd_args = ['export'] + rev_options.to_args() + [url, location]
- self.run_command(cmd_args, show_stdout=False)
-
- def switch(self, dest, url, rev_options):
- cmd_args = ['switch'] + rev_options.to_args() + [url, dest]
- self.run_command(cmd_args)
-
- def update(self, dest, rev_options):
- cmd_args = ['update'] + rev_options.to_args() + [dest]
- self.run_command(cmd_args)
-
- def obtain(self, dest):
- url, rev = self.get_url_rev()
- rev_options = get_rev_options(self, url, rev)
- url = self.remove_auth_from_url(url)
- if self.check_destination(dest, url, rev_options):
- rev_display = rev_options.to_display()
- logger.info(
- 'Checking out %s%s to %s',
- url,
- rev_display,
- display_path(dest),
- )
- cmd_args = ['checkout', '-q'] + rev_options.to_args() + [url, dest]
- self.run_command(cmd_args)
-
- def get_location(self, dist, dependency_links):
- for url in dependency_links:
- egg_fragment = Link(url).egg_fragment
- if not egg_fragment:
- continue
- if '-' in egg_fragment:
- # FIXME: will this work when a package has - in the name?
- key = '-'.join(egg_fragment.split('-')[:-1]).lower()
- else:
- key = egg_fragment
- if key == dist.key:
- return url.split('#', 1)[0]
- return None
-
- def get_revision(self, location):
- """
- Return the maximum revision for all files under a given location
- """
- # Note: taken from setuptools.command.egg_info
- revision = 0
-
- for base, dirs, files in os.walk(location):
- if self.dirname not in dirs:
- dirs[:] = []
- continue # no sense walking uncontrolled subdirs
- dirs.remove(self.dirname)
- entries_fn = os.path.join(base, self.dirname, 'entries')
- if not os.path.exists(entries_fn):
- # FIXME: should we warn?
- continue
-
- dirurl, localrev = self._get_svn_url_rev(base)
-
- if base == location:
- base = dirurl + '/' # save the root url
- elif not dirurl or not dirurl.startswith(base):
- dirs[:] = []
- continue # not part of the same svn tree, skip it
- revision = max(revision, localrev)
- return revision
-
- def get_url_rev(self):
- # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
- url, rev = super(Subversion, self).get_url_rev()
- if url.startswith('ssh://'):
- url = 'svn+' + url
- return url, rev
-
- def get_url(self, location):
- # In cases where the source is in a subdirectory, not alongside
- # setup.py we have to look up in the location until we find a real
- # setup.py
- orig_location = location
- while not os.path.exists(os.path.join(location, 'setup.py')):
- last_location = location
- location = os.path.dirname(location)
- if location == last_location:
- # We've traversed up to the root of the filesystem without
- # finding setup.py
- logger.warning(
- "Could not find setup.py for directory %s (tried all "
- "parent directories)",
- orig_location,
- )
- return None
-
- return self._get_svn_url_rev(location)[0]
-
- def _get_svn_url_rev(self, location):
- from pip._internal.exceptions import InstallationError
-
- entries_path = os.path.join(location, self.dirname, 'entries')
- if os.path.exists(entries_path):
- with open(entries_path) as f:
- data = f.read()
- else: # subversion >= 1.7 does not have the 'entries' file
- data = ''
-
- if (data.startswith('8') or
- data.startswith('9') or
- data.startswith('10')):
- data = list(map(str.splitlines, data.split('\n\x0c\n')))
- del data[0][0] # get rid of the '8'
- url = data[0][3]
- revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0]
- elif data.startswith('= 1.7
- xml = self.run_command(
- ['info', '--xml', location],
- show_stdout=False,
- )
- url = _svn_info_xml_url_re.search(xml).group(1)
- revs = [
- int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)
- ]
- except InstallationError:
- url, revs = None, []
-
- if revs:
- rev = max(revs)
- else:
- rev = 0
-
- return url, rev
-
- def get_src_requirement(self, dist, location):
- repo = self.get_url(location)
- if repo is None:
- return None
- # FIXME: why not project name?
- egg_project_name = dist.egg_name().split('-', 1)[0]
- rev = self.get_revision(location)
- return 'svn+%s@%s#egg=%s' % (repo, rev, egg_project_name)
-
- def is_commit_id_equal(self, dest, name):
- """Always assume the versions don't match"""
- return False
-
- @staticmethod
- def remove_auth_from_url(url):
- # Return a copy of url with 'username:password@' removed.
- # username/pass params are passed to subversion through flags
- # and are not recognized in the url.
-
- # parsed url
- purl = urllib_parse.urlsplit(url)
- stripped_netloc = \
- purl.netloc.split('@')[-1]
-
- # stripped url
- url_pieces = (
- purl.scheme, stripped_netloc, purl.path, purl.query, purl.fragment
- )
- surl = urllib_parse.urlunsplit(url_pieces)
- return surl
-
-
-def get_rev_options(vcs, url, rev):
- """
- Return a RevOptions object.
- """
- r = urllib_parse.urlsplit(url)
- if hasattr(r, 'username'):
- # >= Python-2.5
- username, password = r.username, r.password
- else:
- netloc = r[1]
- if '@' in netloc:
- auth = netloc.split('@')[0]
- if ':' in auth:
- username, password = auth.split(':', 1)
- else:
- username, password = auth, None
- else:
- username, password = None, None
-
- extra_args = []
- if username:
- extra_args += ['--username', username]
- if password:
- extra_args += ['--password', password]
-
- return vcs.make_rev_options(rev, extra_args=extra_args)
-
-
-vcs.register(Subversion)
+from __future__ import absolute_import
+
+import logging
+import os
+import re
+
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+
+from pip._internal.index import Link
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import display_path, rmtree
+from pip._internal.vcs import VersionControl, vcs
+
+_svn_xml_url_re = re.compile('url="([^"]+)"')
+_svn_rev_re = re.compile(r'committed-rev="(\d+)"')
+_svn_url_re = re.compile(r'URL: (.+)')
+_svn_revision_re = re.compile(r'Revision: (.+)')
+_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
+_svn_info_xml_url_re = re.compile(r'(.*)')
+
+
+logger = logging.getLogger(__name__)
+
+
+class Subversion(VersionControl):
+ name = 'svn'
+ dirname = '.svn'
+ repo_name = 'checkout'
+ schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn')
+
+ def get_base_rev_args(self, rev):
+ return ['-r', rev]
+
+ def get_info(self, location):
+ """Returns (url, revision), where both are strings"""
+ assert not location.rstrip('/').endswith(self.dirname), \
+ 'Bad directory: %s' % location
+ output = self.run_command(
+ ['info', location],
+ show_stdout=False,
+ extra_environ={'LANG': 'C'},
+ )
+ match = _svn_url_re.search(output)
+ if not match:
+ logger.warning(
+ 'Cannot determine URL of svn checkout %s',
+ display_path(location),
+ )
+ logger.debug('Output that cannot be parsed: \n%s', output)
+ return None, None
+ url = match.group(1).strip()
+ match = _svn_revision_re.search(output)
+ if not match:
+ logger.warning(
+ 'Cannot determine revision of svn checkout %s',
+ display_path(location),
+ )
+ logger.debug('Output that cannot be parsed: \n%s', output)
+ return url, None
+ return url, match.group(1)
+
+ def export(self, location):
+ """Export the svn repository at the url to the destination location"""
+ url, rev = self.get_url_rev()
+ rev_options = get_rev_options(self, url, rev)
+ url = self.remove_auth_from_url(url)
+ logger.info('Exporting svn repository %s to %s', url, location)
+ with indent_log():
+ if os.path.exists(location):
+ # Subversion doesn't like to check out over an existing
+ # directory --force fixes this, but was only added in svn 1.5
+ rmtree(location)
+ cmd_args = ['export'] + rev_options.to_args() + [url, location]
+ self.run_command(cmd_args, show_stdout=False)
+
+ def switch(self, dest, url, rev_options):
+ cmd_args = ['switch'] + rev_options.to_args() + [url, dest]
+ self.run_command(cmd_args)
+
+ def update(self, dest, rev_options):
+ cmd_args = ['update'] + rev_options.to_args() + [dest]
+ self.run_command(cmd_args)
+
+ def obtain(self, dest):
+ url, rev = self.get_url_rev()
+ rev_options = get_rev_options(self, url, rev)
+ url = self.remove_auth_from_url(url)
+ if self.check_destination(dest, url, rev_options):
+ rev_display = rev_options.to_display()
+ logger.info(
+ 'Checking out %s%s to %s',
+ url,
+ rev_display,
+ display_path(dest),
+ )
+ cmd_args = ['checkout', '-q'] + rev_options.to_args() + [url, dest]
+ self.run_command(cmd_args)
+
+ def get_location(self, dist, dependency_links):
+ for url in dependency_links:
+ egg_fragment = Link(url).egg_fragment
+ if not egg_fragment:
+ continue
+ if '-' in egg_fragment:
+ # FIXME: will this work when a package has - in the name?
+ key = '-'.join(egg_fragment.split('-')[:-1]).lower()
+ else:
+ key = egg_fragment
+ if key == dist.key:
+ return url.split('#', 1)[0]
+ return None
+
+ def get_revision(self, location):
+ """
+ Return the maximum revision for all files under a given location
+ """
+ # Note: taken from setuptools.command.egg_info
+ revision = 0
+
+ for base, dirs, files in os.walk(location):
+ if self.dirname not in dirs:
+ dirs[:] = []
+ continue # no sense walking uncontrolled subdirs
+ dirs.remove(self.dirname)
+ entries_fn = os.path.join(base, self.dirname, 'entries')
+ if not os.path.exists(entries_fn):
+ # FIXME: should we warn?
+ continue
+
+ dirurl, localrev = self._get_svn_url_rev(base)
+
+ if base == location:
+ base = dirurl + '/' # save the root url
+ elif not dirurl or not dirurl.startswith(base):
+ dirs[:] = []
+ continue # not part of the same svn tree, skip it
+ revision = max(revision, localrev)
+ return revision
+
+ def get_url_rev(self):
+ # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
+ url, rev = super(Subversion, self).get_url_rev()
+ if url.startswith('ssh://'):
+ url = 'svn+' + url
+ return url, rev
+
+ def get_url(self, location):
+ # In cases where the source is in a subdirectory, not alongside
+ # setup.py we have to look up in the location until we find a real
+ # setup.py
+ orig_location = location
+ while not os.path.exists(os.path.join(location, 'setup.py')):
+ last_location = location
+ location = os.path.dirname(location)
+ if location == last_location:
+ # We've traversed up to the root of the filesystem without
+ # finding setup.py
+ logger.warning(
+ "Could not find setup.py for directory %s (tried all "
+ "parent directories)",
+ orig_location,
+ )
+ return None
+
+ return self._get_svn_url_rev(location)[0]
+
+ def _get_svn_url_rev(self, location):
+ from pip._internal.exceptions import InstallationError
+
+ entries_path = os.path.join(location, self.dirname, 'entries')
+ if os.path.exists(entries_path):
+ with open(entries_path) as f:
+ data = f.read()
+ else: # subversion >= 1.7 does not have the 'entries' file
+ data = ''
+
+ if (data.startswith('8') or
+ data.startswith('9') or
+ data.startswith('10')):
+ data = list(map(str.splitlines, data.split('\n\x0c\n')))
+ del data[0][0] # get rid of the '8'
+ url = data[0][3]
+ revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0]
+ elif data.startswith('= 1.7
+ xml = self.run_command(
+ ['info', '--xml', location],
+ show_stdout=False,
+ )
+ url = _svn_info_xml_url_re.search(xml).group(1)
+ revs = [
+ int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)
+ ]
+ except InstallationError:
+ url, revs = None, []
+
+ if revs:
+ rev = max(revs)
+ else:
+ rev = 0
+
+ return url, rev
+
+ def get_src_requirement(self, dist, location):
+ repo = self.get_url(location)
+ if repo is None:
+ return None
+ # FIXME: why not project name?
+ egg_project_name = dist.egg_name().split('-', 1)[0]
+ rev = self.get_revision(location)
+ return 'svn+%s@%s#egg=%s' % (repo, rev, egg_project_name)
+
+ def is_commit_id_equal(self, dest, name):
+ """Always assume the versions don't match"""
+ return False
+
+ @staticmethod
+ def remove_auth_from_url(url):
+ # Return a copy of url with 'username:password@' removed.
+ # username/pass params are passed to subversion through flags
+ # and are not recognized in the url.
+
+ # parsed url
+ purl = urllib_parse.urlsplit(url)
+ stripped_netloc = \
+ purl.netloc.split('@')[-1]
+
+ # stripped url
+ url_pieces = (
+ purl.scheme, stripped_netloc, purl.path, purl.query, purl.fragment
+ )
+ surl = urllib_parse.urlunsplit(url_pieces)
+ return surl
+
+
+def get_rev_options(vcs, url, rev):
+ """
+ Return a RevOptions object.
+ """
+ r = urllib_parse.urlsplit(url)
+ if hasattr(r, 'username'):
+ # >= Python-2.5
+ username, password = r.username, r.password
+ else:
+ netloc = r[1]
+ if '@' in netloc:
+ auth = netloc.split('@')[0]
+ if ':' in auth:
+ username, password = auth.split(':', 1)
+ else:
+ username, password = auth, None
+ else:
+ username, password = None, None
+
+ extra_args = []
+ if username:
+ extra_args += ['--username', username]
+ if password:
+ extra_args += ['--password', password]
+
+ return vcs.make_rev_options(rev, extra_args=extra_args)
+
+
+vcs.register(Subversion)
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/wheel.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/wheel.py
index c71f17d..36459dd 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/wheel.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/wheel.py
@@ -1,817 +1,817 @@
-"""
-Support for installing and building the "wheel" binary package format.
-"""
-from __future__ import absolute_import
-
-import collections
-import compileall
-import copy
-import csv
-import hashlib
-import logging
-import os.path
-import re
-import shutil
-import stat
-import sys
-import warnings
-from base64 import urlsafe_b64encode
-from email.parser import Parser
-
-from pip._vendor import pkg_resources
-from pip._vendor.distlib.scripts import ScriptMaker
-from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.six import StringIO
-
-from pip._internal import pep425tags
-from pip._internal.build_env import BuildEnvironment
-from pip._internal.download import path_to_url, unpack_url
-from pip._internal.exceptions import (
- InstallationError, InvalidWheelFilename, UnsupportedWheel,
-)
-from pip._internal.locations import (
- PIP_DELETE_MARKER_FILENAME, distutils_scheme,
-)
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import (
- call_subprocess, captured_stdout, ensure_dir, read_chunks,
-)
-from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.utils.ui import open_spinner
-
-if MYPY_CHECK_RUNNING:
- from typing import Dict, List, Optional
-
-wheel_ext = '.whl'
-
-VERSION_COMPATIBLE = (1, 0)
-
-
-logger = logging.getLogger(__name__)
-
-
-def rehash(path, algo='sha256', blocksize=1 << 20):
- """Return (hash, length) for path using hashlib.new(algo)"""
- h = hashlib.new(algo)
- length = 0
- with open(path, 'rb') as f:
- for block in read_chunks(f, size=blocksize):
- length += len(block)
- h.update(block)
- digest = 'sha256=' + urlsafe_b64encode(
- h.digest()
- ).decode('latin1').rstrip('=')
- return (digest, length)
-
-
-def open_for_csv(name, mode):
- if sys.version_info[0] < 3:
- nl = {}
- bin = 'b'
- else:
- nl = {'newline': ''}
- bin = ''
- return open(name, mode + bin, **nl)
-
-
-def fix_script(path):
- """Replace #!python with #!/path/to/python
- Return True if file was changed."""
- # XXX RECORD hashes will need to be updated
- if os.path.isfile(path):
- with open(path, 'rb') as script:
- firstline = script.readline()
- if not firstline.startswith(b'#!python'):
- return False
- exename = sys.executable.encode(sys.getfilesystemencoding())
- firstline = b'#!' + exename + os.linesep.encode("ascii")
- rest = script.read()
- with open(path, 'wb') as script:
- script.write(firstline)
- script.write(rest)
- return True
-
-
-dist_info_re = re.compile(r"""^(?P(?P.+?)(-(?P.+?))?)
- \.dist-info$""", re.VERBOSE)
-
-
-def root_is_purelib(name, wheeldir):
- """
- Return True if the extracted wheel in wheeldir should go into purelib.
- """
- name_folded = name.replace("-", "_")
- for item in os.listdir(wheeldir):
- match = dist_info_re.match(item)
- if match and match.group('name') == name_folded:
- with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel:
- for line in wheel:
- line = line.lower().rstrip()
- if line == "root-is-purelib: true":
- return True
- return False
-
-
-def get_entrypoints(filename):
- if not os.path.exists(filename):
- return {}, {}
-
- # This is done because you can pass a string to entry_points wrappers which
- # means that they may or may not be valid INI files. The attempt here is to
- # strip leading and trailing whitespace in order to make them valid INI
- # files.
- with open(filename) as fp:
- data = StringIO()
- for line in fp:
- data.write(line.strip())
- data.write("\n")
- data.seek(0)
-
- # get the entry points and then the script names
- entry_points = pkg_resources.EntryPoint.parse_map(data)
- console = entry_points.get('console_scripts', {})
- gui = entry_points.get('gui_scripts', {})
-
- def _split_ep(s):
- """get the string representation of EntryPoint, remove space and split
- on '='"""
- return str(s).replace(" ", "").split("=")
-
- # convert the EntryPoint objects into strings with module:function
- console = dict(_split_ep(v) for v in console.values())
- gui = dict(_split_ep(v) for v in gui.values())
- return console, gui
-
-
-def message_about_scripts_not_on_PATH(scripts):
- # type: (List[str]) -> Optional[str]
- """Determine if any scripts are not on PATH and format a warning.
-
- Returns a warning message if one or more scripts are not on PATH,
- otherwise None.
- """
- if not scripts:
- return None
-
- # Group scripts by the path they were installed in
- grouped_by_dir = collections.defaultdict(set) # type: Dict[str, set]
- for destfile in scripts:
- parent_dir = os.path.dirname(destfile)
- script_name = os.path.basename(destfile)
- grouped_by_dir[parent_dir].add(script_name)
-
- # We don't want to warn for directories that are on PATH.
- not_warn_dirs = [
- os.path.normcase(i) for i in os.environ["PATH"].split(os.pathsep)
- ]
- # If an executable sits with sys.executable, we don't warn for it.
- # This covers the case of venv invocations without activating the venv.
- not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable)))
- warn_for = {
- parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items()
- if os.path.normcase(parent_dir) not in not_warn_dirs
- }
- if not warn_for:
- return None
-
- # Format a message
- msg_lines = []
- for parent_dir, scripts in warn_for.items():
- scripts = sorted(scripts)
- if len(scripts) == 1:
- start_text = "script {} is".format(scripts[0])
- else:
- start_text = "scripts {} are".format(
- ", ".join(scripts[:-1]) + " and " + scripts[-1]
- )
-
- msg_lines.append(
- "The {} installed in '{}' which is not on PATH."
- .format(start_text, parent_dir)
- )
-
- last_line_fmt = (
- "Consider adding {} to PATH or, if you prefer "
- "to suppress this warning, use --no-warn-script-location."
- )
- if len(msg_lines) == 1:
- msg_lines.append(last_line_fmt.format("this directory"))
- else:
- msg_lines.append(last_line_fmt.format("these directories"))
-
- # Returns the formatted multiline message
- return "\n".join(msg_lines)
-
-
-def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None,
- pycompile=True, scheme=None, isolated=False, prefix=None,
- warn_script_location=True):
- """Install a wheel"""
-
- if not scheme:
- scheme = distutils_scheme(
- name, user=user, home=home, root=root, isolated=isolated,
- prefix=prefix,
- )
-
- if root_is_purelib(name, wheeldir):
- lib_dir = scheme['purelib']
- else:
- lib_dir = scheme['platlib']
-
- info_dir = []
- data_dirs = []
- source = wheeldir.rstrip(os.path.sep) + os.path.sep
-
- # Record details of the files moved
- # installed = files copied from the wheel to the destination
- # changed = files changed while installing (scripts #! line typically)
- # generated = files newly generated during the install (script wrappers)
- installed = {}
- changed = set()
- generated = []
-
- # Compile all of the pyc files that we're going to be installing
- if pycompile:
- with captured_stdout() as stdout:
- with warnings.catch_warnings():
- warnings.filterwarnings('ignore')
- compileall.compile_dir(source, force=True, quiet=True)
- logger.debug(stdout.getvalue())
-
- def normpath(src, p):
- return os.path.relpath(src, p).replace(os.path.sep, '/')
-
- def record_installed(srcfile, destfile, modified=False):
- """Map archive RECORD paths to installation RECORD paths."""
- oldpath = normpath(srcfile, wheeldir)
- newpath = normpath(destfile, lib_dir)
- installed[oldpath] = newpath
- if modified:
- changed.add(destfile)
-
- def clobber(source, dest, is_base, fixer=None, filter=None):
- ensure_dir(dest) # common for the 'include' path
-
- for dir, subdirs, files in os.walk(source):
- basedir = dir[len(source):].lstrip(os.path.sep)
- destdir = os.path.join(dest, basedir)
- if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'):
- continue
- for s in subdirs:
- destsubdir = os.path.join(dest, basedir, s)
- if is_base and basedir == '' and destsubdir.endswith('.data'):
- data_dirs.append(s)
- continue
- elif (is_base and
- s.endswith('.dist-info') and
- canonicalize_name(s).startswith(
- canonicalize_name(req.name))):
- assert not info_dir, ('Multiple .dist-info directories: ' +
- destsubdir + ', ' +
- ', '.join(info_dir))
- info_dir.append(destsubdir)
- for f in files:
- # Skip unwanted files
- if filter and filter(f):
- continue
- srcfile = os.path.join(dir, f)
- destfile = os.path.join(dest, basedir, f)
- # directory creation is lazy and after the file filtering above
- # to ensure we don't install empty dirs; empty dirs can't be
- # uninstalled.
- ensure_dir(destdir)
-
- # We use copyfile (not move, copy, or copy2) to be extra sure
- # that we are not moving directories over (copyfile fails for
- # directories) as well as to ensure that we are not copying
- # over any metadata because we want more control over what
- # metadata we actually copy over.
- shutil.copyfile(srcfile, destfile)
-
- # Copy over the metadata for the file, currently this only
- # includes the atime and mtime.
- st = os.stat(srcfile)
- if hasattr(os, "utime"):
- os.utime(destfile, (st.st_atime, st.st_mtime))
-
- # If our file is executable, then make our destination file
- # executable.
- if os.access(srcfile, os.X_OK):
- st = os.stat(srcfile)
- permissions = (
- st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
- )
- os.chmod(destfile, permissions)
-
- changed = False
- if fixer:
- changed = fixer(destfile)
- record_installed(srcfile, destfile, changed)
-
- clobber(source, lib_dir, True)
-
- assert info_dir, "%s .dist-info directory not found" % req
-
- # Get the defined entry points
- ep_file = os.path.join(info_dir[0], 'entry_points.txt')
- console, gui = get_entrypoints(ep_file)
-
- def is_entrypoint_wrapper(name):
- # EP, EP.exe and EP-script.py are scripts generated for
- # entry point EP by setuptools
- if name.lower().endswith('.exe'):
- matchname = name[:-4]
- elif name.lower().endswith('-script.py'):
- matchname = name[:-10]
- elif name.lower().endswith(".pya"):
- matchname = name[:-4]
- else:
- matchname = name
- # Ignore setuptools-generated scripts
- return (matchname in console or matchname in gui)
-
- for datadir in data_dirs:
- fixer = None
- filter = None
- for subdir in os.listdir(os.path.join(wheeldir, datadir)):
- fixer = None
- if subdir == 'scripts':
- fixer = fix_script
- filter = is_entrypoint_wrapper
- source = os.path.join(wheeldir, datadir, subdir)
- dest = scheme[subdir]
- clobber(source, dest, False, fixer=fixer, filter=filter)
-
- maker = ScriptMaker(None, scheme['scripts'])
-
- # Ensure old scripts are overwritten.
- # See https://github.com/pypa/pip/issues/1800
- maker.clobber = True
-
- # Ensure we don't generate any variants for scripts because this is almost
- # never what somebody wants.
- # See https://bitbucket.org/pypa/distlib/issue/35/
- maker.variants = {''}
-
- # This is required because otherwise distlib creates scripts that are not
- # executable.
- # See https://bitbucket.org/pypa/distlib/issue/32/
- maker.set_mode = True
-
- # Simplify the script and fix the fact that the default script swallows
- # every single stack trace.
- # See https://bitbucket.org/pypa/distlib/issue/34/
- # See https://bitbucket.org/pypa/distlib/issue/33/
- def _get_script_text(entry):
- if entry.suffix is None:
- raise InstallationError(
- "Invalid script entry point: %s for req: %s - A callable "
- "suffix is required. Cf https://packaging.python.org/en/"
- "latest/distributing.html#console-scripts for more "
- "information." % (entry, req)
- )
- return maker.script_template % {
- "module": entry.prefix,
- "import_name": entry.suffix.split(".")[0],
- "func": entry.suffix,
- }
-
- maker._get_script_text = _get_script_text
- maker.script_template = r"""# -*- coding: utf-8 -*-
-import re
-import sys
-
-from %(module)s import %(import_name)s
-
-if __name__ == '__main__':
- sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
- sys.exit(%(func)s())
-"""
-
- # Special case pip and setuptools to generate versioned wrappers
- #
- # The issue is that some projects (specifically, pip and setuptools) use
- # code in setup.py to create "versioned" entry points - pip2.7 on Python
- # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
- # the wheel metadata at build time, and so if the wheel is installed with
- # a *different* version of Python the entry points will be wrong. The
- # correct fix for this is to enhance the metadata to be able to describe
- # such versioned entry points, but that won't happen till Metadata 2.0 is
- # available.
- # In the meantime, projects using versioned entry points will either have
- # incorrect versioned entry points, or they will not be able to distribute
- # "universal" wheels (i.e., they will need a wheel per Python version).
- #
- # Because setuptools and pip are bundled with _ensurepip and virtualenv,
- # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
- # override the versioned entry points in the wheel and generate the
- # correct ones. This code is purely a short-term measure until Metadata 2.0
- # is available.
- #
- # To add the level of hack in this section of code, in order to support
- # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
- # variable which will control which version scripts get installed.
- #
- # ENSUREPIP_OPTIONS=altinstall
- # - Only pipX.Y and easy_install-X.Y will be generated and installed
- # ENSUREPIP_OPTIONS=install
- # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
- # that this option is technically if ENSUREPIP_OPTIONS is set and is
- # not altinstall
- # DEFAULT
- # - The default behavior is to install pip, pipX, pipX.Y, easy_install
- # and easy_install-X.Y.
- pip_script = console.pop('pip', None)
- if pip_script:
- if "ENSUREPIP_OPTIONS" not in os.environ:
- spec = 'pip = ' + pip_script
- generated.extend(maker.make(spec))
-
- if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
- spec = 'pip%s = %s' % (sys.version[:1], pip_script)
- generated.extend(maker.make(spec))
-
- spec = 'pip%s = %s' % (sys.version[:3], pip_script)
- generated.extend(maker.make(spec))
- # Delete any other versioned pip entry points
- pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
- for k in pip_ep:
- del console[k]
- easy_install_script = console.pop('easy_install', None)
- if easy_install_script:
- if "ENSUREPIP_OPTIONS" not in os.environ:
- spec = 'easy_install = ' + easy_install_script
- generated.extend(maker.make(spec))
-
- spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script)
- generated.extend(maker.make(spec))
- # Delete any other versioned easy_install entry points
- easy_install_ep = [
- k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
- ]
- for k in easy_install_ep:
- del console[k]
-
- # Generate the console and GUI entry points specified in the wheel
- if len(console) > 0:
- generated_console_scripts = maker.make_multiple(
- ['%s = %s' % kv for kv in console.items()]
- )
- generated.extend(generated_console_scripts)
-
- if warn_script_location:
- msg = message_about_scripts_not_on_PATH(generated_console_scripts)
- if msg is not None:
- logger.warn(msg)
-
- if len(gui) > 0:
- generated.extend(
- maker.make_multiple(
- ['%s = %s' % kv for kv in gui.items()],
- {'gui': True}
- )
- )
-
- # Record pip as the installer
- installer = os.path.join(info_dir[0], 'INSTALLER')
- temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip')
- with open(temp_installer, 'wb') as installer_file:
- installer_file.write(b'pip\n')
- shutil.move(temp_installer, installer)
- generated.append(installer)
-
- # Record details of all files installed
- record = os.path.join(info_dir[0], 'RECORD')
- temp_record = os.path.join(info_dir[0], 'RECORD.pip')
- with open_for_csv(record, 'r') as record_in:
- with open_for_csv(temp_record, 'w+') as record_out:
- reader = csv.reader(record_in)
- writer = csv.writer(record_out)
- for row in reader:
- row[0] = installed.pop(row[0], row[0])
- if row[0] in changed:
- row[1], row[2] = rehash(row[0])
- writer.writerow(row)
- for f in generated:
- h, l = rehash(f)
- writer.writerow((normpath(f, lib_dir), h, l))
- for f in installed:
- writer.writerow((installed[f], '', ''))
- shutil.move(temp_record, record)
-
-
-def wheel_version(source_dir):
- """
- Return the Wheel-Version of an extracted wheel, if possible.
-
- Otherwise, return False if we couldn't parse / extract it.
- """
- try:
- dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0]
-
- wheel_data = dist.get_metadata('WHEEL')
- wheel_data = Parser().parsestr(wheel_data)
-
- version = wheel_data['Wheel-Version'].strip()
- version = tuple(map(int, version.split('.')))
- return version
- except:
- return False
-
-
-def check_compatibility(version, name):
- """
- Raises errors or warns if called with an incompatible Wheel-Version.
-
- Pip should refuse to install a Wheel-Version that's a major series
- ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
- installing a version only minor version ahead (e.g 1.2 > 1.1).
-
- version: a 2-tuple representing a Wheel-Version (Major, Minor)
- name: name of wheel or package to raise exception about
-
- :raises UnsupportedWheel: when an incompatible Wheel-Version is given
- """
- if not version:
- raise UnsupportedWheel(
- "%s is in an unsupported or invalid wheel" % name
- )
- if version[0] > VERSION_COMPATIBLE[0]:
- raise UnsupportedWheel(
- "%s's Wheel-Version (%s) is not compatible with this version "
- "of pip" % (name, '.'.join(map(str, version)))
- )
- elif version > VERSION_COMPATIBLE:
- logger.warning(
- 'Installing from a newer Wheel-Version (%s)',
- '.'.join(map(str, version)),
- )
-
-
-class Wheel(object):
- """A wheel file"""
-
- # TODO: maybe move the install code into this class
-
- wheel_file_re = re.compile(
- r"""^(?P(?P.+?)-(?P.*?))
- ((-(?P\d[^-]*?))?-(?P.+?)-(?P.+?)-(?P.+?)
- \.whl|\.dist-info)$""",
- re.VERBOSE
- )
-
- def __init__(self, filename):
- """
- :raises InvalidWheelFilename: when the filename is invalid for a wheel
- """
- wheel_info = self.wheel_file_re.match(filename)
- if not wheel_info:
- raise InvalidWheelFilename(
- "%s is not a valid wheel filename." % filename
- )
- self.filename = filename
- self.name = wheel_info.group('name').replace('_', '-')
- # we'll assume "_" means "-" due to wheel naming scheme
- # (https://github.com/pypa/pip/issues/1150)
- self.version = wheel_info.group('ver').replace('_', '-')
- self.build_tag = wheel_info.group('build')
- self.pyversions = wheel_info.group('pyver').split('.')
- self.abis = wheel_info.group('abi').split('.')
- self.plats = wheel_info.group('plat').split('.')
-
- # All the tag combinations from this file
- self.file_tags = {
- (x, y, z) for x in self.pyversions
- for y in self.abis for z in self.plats
- }
-
- def support_index_min(self, tags=None):
- """
- Return the lowest index that one of the wheel's file_tag combinations
- achieves in the supported_tags list e.g. if there are 8 supported tags,
- and one of the file tags is first in the list, then return 0. Returns
- None is the wheel is not supported.
- """
- if tags is None: # for mock
- tags = pep425tags.get_supported()
- indexes = [tags.index(c) for c in self.file_tags if c in tags]
- return min(indexes) if indexes else None
-
- def supported(self, tags=None):
- """Is this wheel supported on this system?"""
- if tags is None: # for mock
- tags = pep425tags.get_supported()
- return bool(set(tags).intersection(self.file_tags))
-
-
-class WheelBuilder(object):
- """Build wheels from a RequirementSet."""
-
- def __init__(self, finder, preparer, wheel_cache,
- build_options=None, global_options=None, no_clean=False):
- self.finder = finder
- self.preparer = preparer
- self.wheel_cache = wheel_cache
-
- self._wheel_dir = preparer.wheel_download_dir
-
- self.build_options = build_options or []
- self.global_options = global_options or []
- self.no_clean = no_clean
-
- def _build_one(self, req, output_dir, python_tag=None):
- """Build one wheel.
-
- :return: The filename of the built wheel, or None if the build failed.
- """
- # Install build deps into temporary directory (PEP 518)
- with req.build_env:
- return self._build_one_inside_env(req, output_dir,
- python_tag=python_tag)
-
- def _build_one_inside_env(self, req, output_dir, python_tag=None):
- with TempDirectory(kind="wheel") as temp_dir:
- if self.__build_one(req, temp_dir.path, python_tag=python_tag):
- try:
- wheel_name = os.listdir(temp_dir.path)[0]
- wheel_path = os.path.join(output_dir, wheel_name)
- shutil.move(
- os.path.join(temp_dir.path, wheel_name), wheel_path
- )
- logger.info('Stored in directory: %s', output_dir)
- return wheel_path
- except:
- pass
- # Ignore return, we can't do anything else useful.
- self._clean_one(req)
- return None
-
- def _base_setup_args(self, req):
- # NOTE: Eventually, we'd want to also -S to the flags here, when we're
- # isolating. Currently, it breaks Python in virtualenvs, because it
- # relies on site.py to find parts of the standard library outside the
- # virtualenv.
- return [
- sys.executable, '-u', '-c',
- SETUPTOOLS_SHIM % req.setup_py
- ] + list(self.global_options)
-
- def __build_one(self, req, tempd, python_tag=None):
- base_args = self._base_setup_args(req)
-
- spin_message = 'Running setup.py bdist_wheel for %s' % (req.name,)
- with open_spinner(spin_message) as spinner:
- logger.debug('Destination directory: %s', tempd)
- wheel_args = base_args + ['bdist_wheel', '-d', tempd] \
- + self.build_options
-
- if python_tag is not None:
- wheel_args += ["--python-tag", python_tag]
-
- try:
- call_subprocess(wheel_args, cwd=req.setup_py_dir,
- show_stdout=False, spinner=spinner)
- return True
- except:
- spinner.finish("error")
- logger.error('Failed building wheel for %s', req.name)
- return False
-
- def _clean_one(self, req):
- base_args = self._base_setup_args(req)
-
- logger.info('Running setup.py clean for %s', req.name)
- clean_args = base_args + ['clean', '--all']
- try:
- call_subprocess(clean_args, cwd=req.source_dir, show_stdout=False)
- return True
- except:
- logger.error('Failed cleaning build dir for %s', req.name)
- return False
-
- def build(self, requirements, session, autobuilding=False):
- """Build wheels.
-
- :param unpack: If True, replace the sdist we built from with the
- newly built wheel, in preparation for installation.
- :return: True if all the wheels built correctly.
- """
- from pip._internal import index
-
- building_is_possible = self._wheel_dir or (
- autobuilding and self.wheel_cache.cache_dir
- )
- assert building_is_possible
-
- buildset = []
- for req in requirements:
- if req.constraint:
- continue
- if req.is_wheel:
- if not autobuilding:
- logger.info(
- 'Skipping %s, due to already being wheel.', req.name,
- )
- elif autobuilding and req.editable:
- pass
- elif autobuilding and not req.source_dir:
- pass
- elif autobuilding and req.link and not req.link.is_artifact:
- # VCS checkout. Build wheel just for this run.
- buildset.append((req, True))
- else:
- ephem_cache = False
- if autobuilding:
- link = req.link
- base, ext = link.splitext()
- if index.egg_info_matches(base, None, link) is None:
- # E.g. local directory. Build wheel just for this run.
- ephem_cache = True
- if "binary" not in index.fmt_ctl_formats(
- self.finder.format_control,
- canonicalize_name(req.name)):
- logger.info(
- "Skipping bdist_wheel for %s, due to binaries "
- "being disabled for it.", req.name,
- )
- continue
- buildset.append((req, ephem_cache))
-
- if not buildset:
- return True
-
- # Build the wheels.
- logger.info(
- 'Building wheels for collected packages: %s',
- ', '.join([req.name for (req, _) in buildset]),
- )
- _cache = self.wheel_cache # shorter name
- with indent_log():
- build_success, build_failure = [], []
- for req, ephem in buildset:
- python_tag = None
- if autobuilding:
- python_tag = pep425tags.implementation_tag
- if ephem:
- output_dir = _cache.get_ephem_path_for_link(req.link)
- else:
- output_dir = _cache.get_path_for_link(req.link)
- try:
- ensure_dir(output_dir)
- except OSError as e:
- logger.warning("Building wheel for %s failed: %s",
- req.name, e)
- build_failure.append(req)
- continue
- else:
- output_dir = self._wheel_dir
- wheel_file = self._build_one(
- req, output_dir,
- python_tag=python_tag,
- )
- if wheel_file:
- build_success.append(req)
- if autobuilding:
- # XXX: This is mildly duplicative with prepare_files,
- # but not close enough to pull out to a single common
- # method.
- # The code below assumes temporary source dirs -
- # prevent it doing bad things.
- if req.source_dir and not os.path.exists(os.path.join(
- req.source_dir, PIP_DELETE_MARKER_FILENAME)):
- raise AssertionError(
- "bad source dir - missing marker")
- # Delete the source we built the wheel from
- req.remove_temporary_source()
- # set the build directory again - name is known from
- # the work prepare_files did.
- req.source_dir = req.build_location(
- self.preparer.build_dir
- )
- # Update the link for this.
- req.link = index.Link(path_to_url(wheel_file))
- assert req.link.is_wheel
- # extract the wheel into the dir
- unpack_url(
- req.link, req.source_dir, None, False,
- session=session,
- )
- else:
- build_failure.append(req)
-
- # notify success/failure
- if build_success:
- logger.info(
- 'Successfully built %s',
- ' '.join([req.name for req in build_success]),
- )
- if build_failure:
- logger.info(
- 'Failed to build %s',
- ' '.join([req.name for req in build_failure]),
- )
- # Return True if all builds were successful
- return len(build_failure) == 0
+"""
+Support for installing and building the "wheel" binary package format.
+"""
+from __future__ import absolute_import
+
+import collections
+import compileall
+import copy
+import csv
+import hashlib
+import logging
+import os.path
+import re
+import shutil
+import stat
+import sys
+import warnings
+from base64 import urlsafe_b64encode
+from email.parser import Parser
+
+from pip._vendor import pkg_resources
+from pip._vendor.distlib.scripts import ScriptMaker
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.six import StringIO
+
+from pip._internal import pep425tags
+from pip._internal.build_env import BuildEnvironment
+from pip._internal.download import path_to_url, unpack_url
+from pip._internal.exceptions import (
+ InstallationError, InvalidWheelFilename, UnsupportedWheel,
+)
+from pip._internal.locations import (
+ PIP_DELETE_MARKER_FILENAME, distutils_scheme,
+)
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import (
+ call_subprocess, captured_stdout, ensure_dir, read_chunks,
+)
+from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.ui import open_spinner
+
+if MYPY_CHECK_RUNNING:
+ from typing import Dict, List, Optional
+
+wheel_ext = '.whl'
+
+VERSION_COMPATIBLE = (1, 0)
+
+
+logger = logging.getLogger(__name__)
+
+
+def rehash(path, algo='sha256', blocksize=1 << 20):
+ """Return (hash, length) for path using hashlib.new(algo)"""
+ h = hashlib.new(algo)
+ length = 0
+ with open(path, 'rb') as f:
+ for block in read_chunks(f, size=blocksize):
+ length += len(block)
+ h.update(block)
+ digest = 'sha256=' + urlsafe_b64encode(
+ h.digest()
+ ).decode('latin1').rstrip('=')
+ return (digest, length)
+
+
+def open_for_csv(name, mode):
+ if sys.version_info[0] < 3:
+ nl = {}
+ bin = 'b'
+ else:
+ nl = {'newline': ''}
+ bin = ''
+ return open(name, mode + bin, **nl)
+
+
+def fix_script(path):
+ """Replace #!python with #!/path/to/python
+ Return True if file was changed."""
+ # XXX RECORD hashes will need to be updated
+ if os.path.isfile(path):
+ with open(path, 'rb') as script:
+ firstline = script.readline()
+ if not firstline.startswith(b'#!python'):
+ return False
+ exename = sys.executable.encode(sys.getfilesystemencoding())
+ firstline = b'#!' + exename + os.linesep.encode("ascii")
+ rest = script.read()
+ with open(path, 'wb') as script:
+ script.write(firstline)
+ script.write(rest)
+ return True
+
+
+dist_info_re = re.compile(r"""^(?P(?P.+?)(-(?P.+?))?)
+ \.dist-info$""", re.VERBOSE)
+
+
+def root_is_purelib(name, wheeldir):
+ """
+ Return True if the extracted wheel in wheeldir should go into purelib.
+ """
+ name_folded = name.replace("-", "_")
+ for item in os.listdir(wheeldir):
+ match = dist_info_re.match(item)
+ if match and match.group('name') == name_folded:
+ with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel:
+ for line in wheel:
+ line = line.lower().rstrip()
+ if line == "root-is-purelib: true":
+ return True
+ return False
+
+
+def get_entrypoints(filename):
+ if not os.path.exists(filename):
+ return {}, {}
+
+ # This is done because you can pass a string to entry_points wrappers which
+ # means that they may or may not be valid INI files. The attempt here is to
+ # strip leading and trailing whitespace in order to make them valid INI
+ # files.
+ with open(filename) as fp:
+ data = StringIO()
+ for line in fp:
+ data.write(line.strip())
+ data.write("\n")
+ data.seek(0)
+
+ # get the entry points and then the script names
+ entry_points = pkg_resources.EntryPoint.parse_map(data)
+ console = entry_points.get('console_scripts', {})
+ gui = entry_points.get('gui_scripts', {})
+
+ def _split_ep(s):
+ """get the string representation of EntryPoint, remove space and split
+ on '='"""
+ return str(s).replace(" ", "").split("=")
+
+ # convert the EntryPoint objects into strings with module:function
+ console = dict(_split_ep(v) for v in console.values())
+ gui = dict(_split_ep(v) for v in gui.values())
+ return console, gui
+
+
+def message_about_scripts_not_on_PATH(scripts):
+ # type: (List[str]) -> Optional[str]
+ """Determine if any scripts are not on PATH and format a warning.
+
+ Returns a warning message if one or more scripts are not on PATH,
+ otherwise None.
+ """
+ if not scripts:
+ return None
+
+ # Group scripts by the path they were installed in
+ grouped_by_dir = collections.defaultdict(set) # type: Dict[str, set]
+ for destfile in scripts:
+ parent_dir = os.path.dirname(destfile)
+ script_name = os.path.basename(destfile)
+ grouped_by_dir[parent_dir].add(script_name)
+
+ # We don't want to warn for directories that are on PATH.
+ not_warn_dirs = [
+ os.path.normcase(i) for i in os.environ["PATH"].split(os.pathsep)
+ ]
+ # If an executable sits with sys.executable, we don't warn for it.
+ # This covers the case of venv invocations without activating the venv.
+ not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable)))
+ warn_for = {
+ parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items()
+ if os.path.normcase(parent_dir) not in not_warn_dirs
+ }
+ if not warn_for:
+ return None
+
+ # Format a message
+ msg_lines = []
+ for parent_dir, scripts in warn_for.items():
+ scripts = sorted(scripts)
+ if len(scripts) == 1:
+ start_text = "script {} is".format(scripts[0])
+ else:
+ start_text = "scripts {} are".format(
+ ", ".join(scripts[:-1]) + " and " + scripts[-1]
+ )
+
+ msg_lines.append(
+ "The {} installed in '{}' which is not on PATH."
+ .format(start_text, parent_dir)
+ )
+
+ last_line_fmt = (
+ "Consider adding {} to PATH or, if you prefer "
+ "to suppress this warning, use --no-warn-script-location."
+ )
+ if len(msg_lines) == 1:
+ msg_lines.append(last_line_fmt.format("this directory"))
+ else:
+ msg_lines.append(last_line_fmt.format("these directories"))
+
+ # Returns the formatted multiline message
+ return "\n".join(msg_lines)
+
+
+def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None,
+ pycompile=True, scheme=None, isolated=False, prefix=None,
+ warn_script_location=True):
+ """Install a wheel"""
+
+ if not scheme:
+ scheme = distutils_scheme(
+ name, user=user, home=home, root=root, isolated=isolated,
+ prefix=prefix,
+ )
+
+ if root_is_purelib(name, wheeldir):
+ lib_dir = scheme['purelib']
+ else:
+ lib_dir = scheme['platlib']
+
+ info_dir = []
+ data_dirs = []
+ source = wheeldir.rstrip(os.path.sep) + os.path.sep
+
+ # Record details of the files moved
+ # installed = files copied from the wheel to the destination
+ # changed = files changed while installing (scripts #! line typically)
+ # generated = files newly generated during the install (script wrappers)
+ installed = {}
+ changed = set()
+ generated = []
+
+ # Compile all of the pyc files that we're going to be installing
+ if pycompile:
+ with captured_stdout() as stdout:
+ with warnings.catch_warnings():
+ warnings.filterwarnings('ignore')
+ compileall.compile_dir(source, force=True, quiet=True)
+ logger.debug(stdout.getvalue())
+
+ def normpath(src, p):
+ return os.path.relpath(src, p).replace(os.path.sep, '/')
+
+ def record_installed(srcfile, destfile, modified=False):
+ """Map archive RECORD paths to installation RECORD paths."""
+ oldpath = normpath(srcfile, wheeldir)
+ newpath = normpath(destfile, lib_dir)
+ installed[oldpath] = newpath
+ if modified:
+ changed.add(destfile)
+
+ def clobber(source, dest, is_base, fixer=None, filter=None):
+ ensure_dir(dest) # common for the 'include' path
+
+ for dir, subdirs, files in os.walk(source):
+ basedir = dir[len(source):].lstrip(os.path.sep)
+ destdir = os.path.join(dest, basedir)
+ if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'):
+ continue
+ for s in subdirs:
+ destsubdir = os.path.join(dest, basedir, s)
+ if is_base and basedir == '' and destsubdir.endswith('.data'):
+ data_dirs.append(s)
+ continue
+ elif (is_base and
+ s.endswith('.dist-info') and
+ canonicalize_name(s).startswith(
+ canonicalize_name(req.name))):
+ assert not info_dir, ('Multiple .dist-info directories: ' +
+ destsubdir + ', ' +
+ ', '.join(info_dir))
+ info_dir.append(destsubdir)
+ for f in files:
+ # Skip unwanted files
+ if filter and filter(f):
+ continue
+ srcfile = os.path.join(dir, f)
+ destfile = os.path.join(dest, basedir, f)
+ # directory creation is lazy and after the file filtering above
+ # to ensure we don't install empty dirs; empty dirs can't be
+ # uninstalled.
+ ensure_dir(destdir)
+
+ # We use copyfile (not move, copy, or copy2) to be extra sure
+ # that we are not moving directories over (copyfile fails for
+ # directories) as well as to ensure that we are not copying
+ # over any metadata because we want more control over what
+ # metadata we actually copy over.
+ shutil.copyfile(srcfile, destfile)
+
+ # Copy over the metadata for the file, currently this only
+ # includes the atime and mtime.
+ st = os.stat(srcfile)
+ if hasattr(os, "utime"):
+ os.utime(destfile, (st.st_atime, st.st_mtime))
+
+ # If our file is executable, then make our destination file
+ # executable.
+ if os.access(srcfile, os.X_OK):
+ st = os.stat(srcfile)
+ permissions = (
+ st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
+ )
+ os.chmod(destfile, permissions)
+
+ changed = False
+ if fixer:
+ changed = fixer(destfile)
+ record_installed(srcfile, destfile, changed)
+
+ clobber(source, lib_dir, True)
+
+ assert info_dir, "%s .dist-info directory not found" % req
+
+ # Get the defined entry points
+ ep_file = os.path.join(info_dir[0], 'entry_points.txt')
+ console, gui = get_entrypoints(ep_file)
+
+ def is_entrypoint_wrapper(name):
+ # EP, EP.exe and EP-script.py are scripts generated for
+ # entry point EP by setuptools
+ if name.lower().endswith('.exe'):
+ matchname = name[:-4]
+ elif name.lower().endswith('-script.py'):
+ matchname = name[:-10]
+ elif name.lower().endswith(".pya"):
+ matchname = name[:-4]
+ else:
+ matchname = name
+ # Ignore setuptools-generated scripts
+ return (matchname in console or matchname in gui)
+
+ for datadir in data_dirs:
+ fixer = None
+ filter = None
+ for subdir in os.listdir(os.path.join(wheeldir, datadir)):
+ fixer = None
+ if subdir == 'scripts':
+ fixer = fix_script
+ filter = is_entrypoint_wrapper
+ source = os.path.join(wheeldir, datadir, subdir)
+ dest = scheme[subdir]
+ clobber(source, dest, False, fixer=fixer, filter=filter)
+
+ maker = ScriptMaker(None, scheme['scripts'])
+
+ # Ensure old scripts are overwritten.
+ # See https://github.com/pypa/pip/issues/1800
+ maker.clobber = True
+
+ # Ensure we don't generate any variants for scripts because this is almost
+ # never what somebody wants.
+ # See https://bitbucket.org/pypa/distlib/issue/35/
+ maker.variants = {''}
+
+ # This is required because otherwise distlib creates scripts that are not
+ # executable.
+ # See https://bitbucket.org/pypa/distlib/issue/32/
+ maker.set_mode = True
+
+ # Simplify the script and fix the fact that the default script swallows
+ # every single stack trace.
+ # See https://bitbucket.org/pypa/distlib/issue/34/
+ # See https://bitbucket.org/pypa/distlib/issue/33/
+ def _get_script_text(entry):
+ if entry.suffix is None:
+ raise InstallationError(
+ "Invalid script entry point: %s for req: %s - A callable "
+ "suffix is required. Cf https://packaging.python.org/en/"
+ "latest/distributing.html#console-scripts for more "
+ "information." % (entry, req)
+ )
+ return maker.script_template % {
+ "module": entry.prefix,
+ "import_name": entry.suffix.split(".")[0],
+ "func": entry.suffix,
+ }
+
+ maker._get_script_text = _get_script_text
+ maker.script_template = r"""# -*- coding: utf-8 -*-
+import re
+import sys
+
+from %(module)s import %(import_name)s
+
+if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
+ sys.exit(%(func)s())
+"""
+
+ # Special case pip and setuptools to generate versioned wrappers
+ #
+ # The issue is that some projects (specifically, pip and setuptools) use
+ # code in setup.py to create "versioned" entry points - pip2.7 on Python
+ # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
+ # the wheel metadata at build time, and so if the wheel is installed with
+ # a *different* version of Python the entry points will be wrong. The
+ # correct fix for this is to enhance the metadata to be able to describe
+ # such versioned entry points, but that won't happen till Metadata 2.0 is
+ # available.
+ # In the meantime, projects using versioned entry points will either have
+ # incorrect versioned entry points, or they will not be able to distribute
+ # "universal" wheels (i.e., they will need a wheel per Python version).
+ #
+ # Because setuptools and pip are bundled with _ensurepip and virtualenv,
+ # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
+ # override the versioned entry points in the wheel and generate the
+ # correct ones. This code is purely a short-term measure until Metadata 2.0
+ # is available.
+ #
+ # To add the level of hack in this section of code, in order to support
+ # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
+ # variable which will control which version scripts get installed.
+ #
+ # ENSUREPIP_OPTIONS=altinstall
+ # - Only pipX.Y and easy_install-X.Y will be generated and installed
+ # ENSUREPIP_OPTIONS=install
+ # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
+ # that this option is technically if ENSUREPIP_OPTIONS is set and is
+ # not altinstall
+ # DEFAULT
+ # - The default behavior is to install pip, pipX, pipX.Y, easy_install
+ # and easy_install-X.Y.
+ pip_script = console.pop('pip', None)
+ if pip_script:
+ if "ENSUREPIP_OPTIONS" not in os.environ:
+ spec = 'pip = ' + pip_script
+ generated.extend(maker.make(spec))
+
+ if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
+ spec = 'pip%s = %s' % (sys.version[:1], pip_script)
+ generated.extend(maker.make(spec))
+
+ spec = 'pip%s = %s' % (sys.version[:3], pip_script)
+ generated.extend(maker.make(spec))
+ # Delete any other versioned pip entry points
+ pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
+ for k in pip_ep:
+ del console[k]
+ easy_install_script = console.pop('easy_install', None)
+ if easy_install_script:
+ if "ENSUREPIP_OPTIONS" not in os.environ:
+ spec = 'easy_install = ' + easy_install_script
+ generated.extend(maker.make(spec))
+
+ spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script)
+ generated.extend(maker.make(spec))
+ # Delete any other versioned easy_install entry points
+ easy_install_ep = [
+ k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
+ ]
+ for k in easy_install_ep:
+ del console[k]
+
+ # Generate the console and GUI entry points specified in the wheel
+ if len(console) > 0:
+ generated_console_scripts = maker.make_multiple(
+ ['%s = %s' % kv for kv in console.items()]
+ )
+ generated.extend(generated_console_scripts)
+
+ if warn_script_location:
+ msg = message_about_scripts_not_on_PATH(generated_console_scripts)
+ if msg is not None:
+ logger.warn(msg)
+
+ if len(gui) > 0:
+ generated.extend(
+ maker.make_multiple(
+ ['%s = %s' % kv for kv in gui.items()],
+ {'gui': True}
+ )
+ )
+
+ # Record pip as the installer
+ installer = os.path.join(info_dir[0], 'INSTALLER')
+ temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip')
+ with open(temp_installer, 'wb') as installer_file:
+ installer_file.write(b'pip\n')
+ shutil.move(temp_installer, installer)
+ generated.append(installer)
+
+ # Record details of all files installed
+ record = os.path.join(info_dir[0], 'RECORD')
+ temp_record = os.path.join(info_dir[0], 'RECORD.pip')
+ with open_for_csv(record, 'r') as record_in:
+ with open_for_csv(temp_record, 'w+') as record_out:
+ reader = csv.reader(record_in)
+ writer = csv.writer(record_out)
+ for row in reader:
+ row[0] = installed.pop(row[0], row[0])
+ if row[0] in changed:
+ row[1], row[2] = rehash(row[0])
+ writer.writerow(row)
+ for f in generated:
+ h, l = rehash(f)
+ writer.writerow((normpath(f, lib_dir), h, l))
+ for f in installed:
+ writer.writerow((installed[f], '', ''))
+ shutil.move(temp_record, record)
+
+
+def wheel_version(source_dir):
+ """
+ Return the Wheel-Version of an extracted wheel, if possible.
+
+ Otherwise, return False if we couldn't parse / extract it.
+ """
+ try:
+ dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0]
+
+ wheel_data = dist.get_metadata('WHEEL')
+ wheel_data = Parser().parsestr(wheel_data)
+
+ version = wheel_data['Wheel-Version'].strip()
+ version = tuple(map(int, version.split('.')))
+ return version
+ except:
+ return False
+
+
+def check_compatibility(version, name):
+ """
+ Raises errors or warns if called with an incompatible Wheel-Version.
+
+ Pip should refuse to install a Wheel-Version that's a major series
+ ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
+ installing a version only minor version ahead (e.g 1.2 > 1.1).
+
+ version: a 2-tuple representing a Wheel-Version (Major, Minor)
+ name: name of wheel or package to raise exception about
+
+ :raises UnsupportedWheel: when an incompatible Wheel-Version is given
+ """
+ if not version:
+ raise UnsupportedWheel(
+ "%s is in an unsupported or invalid wheel" % name
+ )
+ if version[0] > VERSION_COMPATIBLE[0]:
+ raise UnsupportedWheel(
+ "%s's Wheel-Version (%s) is not compatible with this version "
+ "of pip" % (name, '.'.join(map(str, version)))
+ )
+ elif version > VERSION_COMPATIBLE:
+ logger.warning(
+ 'Installing from a newer Wheel-Version (%s)',
+ '.'.join(map(str, version)),
+ )
+
+
+class Wheel(object):
+ """A wheel file"""
+
+ # TODO: maybe move the install code into this class
+
+ wheel_file_re = re.compile(
+ r"""^(?P(?P.+?)-(?P.*?))
+ ((-(?P\d[^-]*?))?-(?P.+?)-(?P.+?)-(?P.+?)
+ \.whl|\.dist-info)$""",
+ re.VERBOSE
+ )
+
+ def __init__(self, filename):
+ """
+ :raises InvalidWheelFilename: when the filename is invalid for a wheel
+ """
+ wheel_info = self.wheel_file_re.match(filename)
+ if not wheel_info:
+ raise InvalidWheelFilename(
+ "%s is not a valid wheel filename." % filename
+ )
+ self.filename = filename
+ self.name = wheel_info.group('name').replace('_', '-')
+ # we'll assume "_" means "-" due to wheel naming scheme
+ # (https://github.com/pypa/pip/issues/1150)
+ self.version = wheel_info.group('ver').replace('_', '-')
+ self.build_tag = wheel_info.group('build')
+ self.pyversions = wheel_info.group('pyver').split('.')
+ self.abis = wheel_info.group('abi').split('.')
+ self.plats = wheel_info.group('plat').split('.')
+
+ # All the tag combinations from this file
+ self.file_tags = {
+ (x, y, z) for x in self.pyversions
+ for y in self.abis for z in self.plats
+ }
+
+ def support_index_min(self, tags=None):
+ """
+ Return the lowest index that one of the wheel's file_tag combinations
+ achieves in the supported_tags list e.g. if there are 8 supported tags,
+ and one of the file tags is first in the list, then return 0. Returns
+ None is the wheel is not supported.
+ """
+ if tags is None: # for mock
+ tags = pep425tags.get_supported()
+ indexes = [tags.index(c) for c in self.file_tags if c in tags]
+ return min(indexes) if indexes else None
+
+ def supported(self, tags=None):
+ """Is this wheel supported on this system?"""
+ if tags is None: # for mock
+ tags = pep425tags.get_supported()
+ return bool(set(tags).intersection(self.file_tags))
+
+
+class WheelBuilder(object):
+ """Build wheels from a RequirementSet."""
+
+ def __init__(self, finder, preparer, wheel_cache,
+ build_options=None, global_options=None, no_clean=False):
+ self.finder = finder
+ self.preparer = preparer
+ self.wheel_cache = wheel_cache
+
+ self._wheel_dir = preparer.wheel_download_dir
+
+ self.build_options = build_options or []
+ self.global_options = global_options or []
+ self.no_clean = no_clean
+
+ def _build_one(self, req, output_dir, python_tag=None):
+ """Build one wheel.
+
+ :return: The filename of the built wheel, or None if the build failed.
+ """
+ # Install build deps into temporary directory (PEP 518)
+ with req.build_env:
+ return self._build_one_inside_env(req, output_dir,
+ python_tag=python_tag)
+
+ def _build_one_inside_env(self, req, output_dir, python_tag=None):
+ with TempDirectory(kind="wheel") as temp_dir:
+ if self.__build_one(req, temp_dir.path, python_tag=python_tag):
+ try:
+ wheel_name = os.listdir(temp_dir.path)[0]
+ wheel_path = os.path.join(output_dir, wheel_name)
+ shutil.move(
+ os.path.join(temp_dir.path, wheel_name), wheel_path
+ )
+ logger.info('Stored in directory: %s', output_dir)
+ return wheel_path
+ except:
+ pass
+ # Ignore return, we can't do anything else useful.
+ self._clean_one(req)
+ return None
+
+ def _base_setup_args(self, req):
+ # NOTE: Eventually, we'd want to also -S to the flags here, when we're
+ # isolating. Currently, it breaks Python in virtualenvs, because it
+ # relies on site.py to find parts of the standard library outside the
+ # virtualenv.
+ return [
+ sys.executable, '-u', '-c',
+ SETUPTOOLS_SHIM % req.setup_py
+ ] + list(self.global_options)
+
+ def __build_one(self, req, tempd, python_tag=None):
+ base_args = self._base_setup_args(req)
+
+ spin_message = 'Running setup.py bdist_wheel for %s' % (req.name,)
+ with open_spinner(spin_message) as spinner:
+ logger.debug('Destination directory: %s', tempd)
+ wheel_args = base_args + ['bdist_wheel', '-d', tempd] \
+ + self.build_options
+
+ if python_tag is not None:
+ wheel_args += ["--python-tag", python_tag]
+
+ try:
+ call_subprocess(wheel_args, cwd=req.setup_py_dir,
+ show_stdout=False, spinner=spinner)
+ return True
+ except:
+ spinner.finish("error")
+ logger.error('Failed building wheel for %s', req.name)
+ return False
+
+ def _clean_one(self, req):
+ base_args = self._base_setup_args(req)
+
+ logger.info('Running setup.py clean for %s', req.name)
+ clean_args = base_args + ['clean', '--all']
+ try:
+ call_subprocess(clean_args, cwd=req.source_dir, show_stdout=False)
+ return True
+ except:
+ logger.error('Failed cleaning build dir for %s', req.name)
+ return False
+
+ def build(self, requirements, session, autobuilding=False):
+ """Build wheels.
+
+ :param unpack: If True, replace the sdist we built from with the
+ newly built wheel, in preparation for installation.
+ :return: True if all the wheels built correctly.
+ """
+ from pip._internal import index
+
+ building_is_possible = self._wheel_dir or (
+ autobuilding and self.wheel_cache.cache_dir
+ )
+ assert building_is_possible
+
+ buildset = []
+ for req in requirements:
+ if req.constraint:
+ continue
+ if req.is_wheel:
+ if not autobuilding:
+ logger.info(
+ 'Skipping %s, due to already being wheel.', req.name,
+ )
+ elif autobuilding and req.editable:
+ pass
+ elif autobuilding and not req.source_dir:
+ pass
+ elif autobuilding and req.link and not req.link.is_artifact:
+ # VCS checkout. Build wheel just for this run.
+ buildset.append((req, True))
+ else:
+ ephem_cache = False
+ if autobuilding:
+ link = req.link
+ base, ext = link.splitext()
+ if index.egg_info_matches(base, None, link) is None:
+ # E.g. local directory. Build wheel just for this run.
+ ephem_cache = True
+ if "binary" not in index.fmt_ctl_formats(
+ self.finder.format_control,
+ canonicalize_name(req.name)):
+ logger.info(
+ "Skipping bdist_wheel for %s, due to binaries "
+ "being disabled for it.", req.name,
+ )
+ continue
+ buildset.append((req, ephem_cache))
+
+ if not buildset:
+ return True
+
+ # Build the wheels.
+ logger.info(
+ 'Building wheels for collected packages: %s',
+ ', '.join([req.name for (req, _) in buildset]),
+ )
+ _cache = self.wheel_cache # shorter name
+ with indent_log():
+ build_success, build_failure = [], []
+ for req, ephem in buildset:
+ python_tag = None
+ if autobuilding:
+ python_tag = pep425tags.implementation_tag
+ if ephem:
+ output_dir = _cache.get_ephem_path_for_link(req.link)
+ else:
+ output_dir = _cache.get_path_for_link(req.link)
+ try:
+ ensure_dir(output_dir)
+ except OSError as e:
+ logger.warning("Building wheel for %s failed: %s",
+ req.name, e)
+ build_failure.append(req)
+ continue
+ else:
+ output_dir = self._wheel_dir
+ wheel_file = self._build_one(
+ req, output_dir,
+ python_tag=python_tag,
+ )
+ if wheel_file:
+ build_success.append(req)
+ if autobuilding:
+ # XXX: This is mildly duplicative with prepare_files,
+ # but not close enough to pull out to a single common
+ # method.
+ # The code below assumes temporary source dirs -
+ # prevent it doing bad things.
+ if req.source_dir and not os.path.exists(os.path.join(
+ req.source_dir, PIP_DELETE_MARKER_FILENAME)):
+ raise AssertionError(
+ "bad source dir - missing marker")
+ # Delete the source we built the wheel from
+ req.remove_temporary_source()
+ # set the build directory again - name is known from
+ # the work prepare_files did.
+ req.source_dir = req.build_location(
+ self.preparer.build_dir
+ )
+ # Update the link for this.
+ req.link = index.Link(path_to_url(wheel_file))
+ assert req.link.is_wheel
+ # extract the wheel into the dir
+ unpack_url(
+ req.link, req.source_dir, None, False,
+ session=session,
+ )
+ else:
+ build_failure.append(req)
+
+ # notify success/failure
+ if build_success:
+ logger.info(
+ 'Successfully built %s',
+ ' '.join([req.name for req in build_success]),
+ )
+ if build_failure:
+ logger.info(
+ 'Failed to build %s',
+ ' '.join([req.name for req in build_failure]),
+ )
+ # Return True if all builds were successful
+ return len(build_failure) == 0
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/__init__.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/__init__.py
index 1387dba..607757f 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/__init__.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/__init__.py
@@ -1,109 +1,109 @@
-"""
-pip._vendor is for vendoring dependencies of pip to prevent needing pip to
-depend on something external.
-
-Files inside of pip._vendor should be considered immutable and should only be
-updated to versions from upstream.
-"""
-from __future__ import absolute_import
-
-import glob
-import os.path
-import sys
-
-# Downstream redistributors which have debundled our dependencies should also
-# patch this value to be true. This will trigger the additional patching
-# to cause things like "six" to be available as pip.
-DEBUNDLED = False
-
-# By default, look in this directory for a bunch of .whl files which we will
-# add to the beginning of sys.path before attempting to import anything. This
-# is done to support downstream re-distributors like Debian and Fedora who
-# wish to create their own Wheels for our dependencies to aid in debundling.
-WHEEL_DIR = os.path.abspath(os.path.dirname(__file__))
-
-
-# Define a small helper function to alias our vendored modules to the real ones
-# if the vendored ones do not exist. This idea of this was taken from
-# https://github.com/kennethreitz/requests/pull/2567.
-def vendored(modulename):
- vendored_name = "{0}.{1}".format(__name__, modulename)
-
- try:
- __import__(vendored_name, globals(), locals(), level=0)
- except ImportError:
- try:
- __import__(modulename, globals(), locals(), level=0)
- except ImportError:
- # We can just silently allow import failures to pass here. If we
- # got to this point it means that ``import pip._vendor.whatever``
- # failed and so did ``import whatever``. Since we're importing this
- # upfront in an attempt to alias imports, not erroring here will
- # just mean we get a regular import error whenever pip *actually*
- # tries to import one of these modules to use it, which actually
- # gives us a better error message than we would have otherwise
- # gotten.
- pass
- else:
- sys.modules[vendored_name] = sys.modules[modulename]
- base, head = vendored_name.rsplit(".", 1)
- setattr(sys.modules[base], head, sys.modules[modulename])
-
-
-# If we're operating in a debundled setup, then we want to go ahead and trigger
-# the aliasing of our vendored libraries as well as looking for wheels to add
-# to our sys.path. This will cause all of this code to be a no-op typically
-# however downstream redistributors can enable it in a consistent way across
-# all platforms.
-if DEBUNDLED:
- # Actually look inside of WHEEL_DIR to find .whl files and add them to the
- # front of our sys.path.
- sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path
-
- # Actually alias all of our vendored dependencies.
- vendored("cachecontrol")
- vendored("colorama")
- vendored("distlib")
- vendored("distro")
- vendored("html5lib")
- vendored("lockfile")
- vendored("six")
- vendored("six.moves")
- vendored("six.moves.urllib")
- vendored("six.moves.urllib.parse")
- vendored("packaging")
- vendored("packaging.version")
- vendored("packaging.specifiers")
- vendored("pkg_resources")
- vendored("progress")
- vendored("pytoml")
- vendored("retrying")
- vendored("requests")
- vendored("requests.packages")
- vendored("requests.packages.urllib3")
- vendored("requests.packages.urllib3._collections")
- vendored("requests.packages.urllib3.connection")
- vendored("requests.packages.urllib3.connectionpool")
- vendored("requests.packages.urllib3.contrib")
- vendored("requests.packages.urllib3.contrib.ntlmpool")
- vendored("requests.packages.urllib3.contrib.pyopenssl")
- vendored("requests.packages.urllib3.exceptions")
- vendored("requests.packages.urllib3.fields")
- vendored("requests.packages.urllib3.filepost")
- vendored("requests.packages.urllib3.packages")
- vendored("requests.packages.urllib3.packages.ordered_dict")
- vendored("requests.packages.urllib3.packages.six")
- vendored("requests.packages.urllib3.packages.ssl_match_hostname")
- vendored("requests.packages.urllib3.packages.ssl_match_hostname."
- "_implementation")
- vendored("requests.packages.urllib3.poolmanager")
- vendored("requests.packages.urllib3.request")
- vendored("requests.packages.urllib3.response")
- vendored("requests.packages.urllib3.util")
- vendored("requests.packages.urllib3.util.connection")
- vendored("requests.packages.urllib3.util.request")
- vendored("requests.packages.urllib3.util.response")
- vendored("requests.packages.urllib3.util.retry")
- vendored("requests.packages.urllib3.util.ssl_")
- vendored("requests.packages.urllib3.util.timeout")
- vendored("requests.packages.urllib3.util.url")
+"""
+pip._vendor is for vendoring dependencies of pip to prevent needing pip to
+depend on something external.
+
+Files inside of pip._vendor should be considered immutable and should only be
+updated to versions from upstream.
+"""
+from __future__ import absolute_import
+
+import glob
+import os.path
+import sys
+
+# Downstream redistributors which have debundled our dependencies should also
+# patch this value to be true. This will trigger the additional patching
+# to cause things like "six" to be available as pip.
+DEBUNDLED = False
+
+# By default, look in this directory for a bunch of .whl files which we will
+# add to the beginning of sys.path before attempting to import anything. This
+# is done to support downstream re-distributors like Debian and Fedora who
+# wish to create their own Wheels for our dependencies to aid in debundling.
+WHEEL_DIR = os.path.abspath(os.path.dirname(__file__))
+
+
+# Define a small helper function to alias our vendored modules to the real ones
+# if the vendored ones do not exist. This idea of this was taken from
+# https://github.com/kennethreitz/requests/pull/2567.
+def vendored(modulename):
+ vendored_name = "{0}.{1}".format(__name__, modulename)
+
+ try:
+ __import__(vendored_name, globals(), locals(), level=0)
+ except ImportError:
+ try:
+ __import__(modulename, globals(), locals(), level=0)
+ except ImportError:
+ # We can just silently allow import failures to pass here. If we
+ # got to this point it means that ``import pip._vendor.whatever``
+ # failed and so did ``import whatever``. Since we're importing this
+ # upfront in an attempt to alias imports, not erroring here will
+ # just mean we get a regular import error whenever pip *actually*
+ # tries to import one of these modules to use it, which actually
+ # gives us a better error message than we would have otherwise
+ # gotten.
+ pass
+ else:
+ sys.modules[vendored_name] = sys.modules[modulename]
+ base, head = vendored_name.rsplit(".", 1)
+ setattr(sys.modules[base], head, sys.modules[modulename])
+
+
+# If we're operating in a debundled setup, then we want to go ahead and trigger
+# the aliasing of our vendored libraries as well as looking for wheels to add
+# to our sys.path. This will cause all of this code to be a no-op typically
+# however downstream redistributors can enable it in a consistent way across
+# all platforms.
+if DEBUNDLED:
+ # Actually look inside of WHEEL_DIR to find .whl files and add them to the
+ # front of our sys.path.
+ sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path
+
+ # Actually alias all of our vendored dependencies.
+ vendored("cachecontrol")
+ vendored("colorama")
+ vendored("distlib")
+ vendored("distro")
+ vendored("html5lib")
+ vendored("lockfile")
+ vendored("six")
+ vendored("six.moves")
+ vendored("six.moves.urllib")
+ vendored("six.moves.urllib.parse")
+ vendored("packaging")
+ vendored("packaging.version")
+ vendored("packaging.specifiers")
+ vendored("pkg_resources")
+ vendored("progress")
+ vendored("pytoml")
+ vendored("retrying")
+ vendored("requests")
+ vendored("requests.packages")
+ vendored("requests.packages.urllib3")
+ vendored("requests.packages.urllib3._collections")
+ vendored("requests.packages.urllib3.connection")
+ vendored("requests.packages.urllib3.connectionpool")
+ vendored("requests.packages.urllib3.contrib")
+ vendored("requests.packages.urllib3.contrib.ntlmpool")
+ vendored("requests.packages.urllib3.contrib.pyopenssl")
+ vendored("requests.packages.urllib3.exceptions")
+ vendored("requests.packages.urllib3.fields")
+ vendored("requests.packages.urllib3.filepost")
+ vendored("requests.packages.urllib3.packages")
+ vendored("requests.packages.urllib3.packages.ordered_dict")
+ vendored("requests.packages.urllib3.packages.six")
+ vendored("requests.packages.urllib3.packages.ssl_match_hostname")
+ vendored("requests.packages.urllib3.packages.ssl_match_hostname."
+ "_implementation")
+ vendored("requests.packages.urllib3.poolmanager")
+ vendored("requests.packages.urllib3.request")
+ vendored("requests.packages.urllib3.response")
+ vendored("requests.packages.urllib3.util")
+ vendored("requests.packages.urllib3.util.connection")
+ vendored("requests.packages.urllib3.util.request")
+ vendored("requests.packages.urllib3.util.response")
+ vendored("requests.packages.urllib3.util.retry")
+ vendored("requests.packages.urllib3.util.ssl_")
+ vendored("requests.packages.urllib3.util.timeout")
+ vendored("requests.packages.urllib3.util.url")
diff --git a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/appdirs.py b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/appdirs.py
index 2bd3911..7ff6a07 100644
--- a/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/appdirs.py
+++ b/json/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/appdirs.py
@@ -1,604 +1,604 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# Copyright (c) 2005-2010 ActiveState Software Inc.
-# Copyright (c) 2013 Eddy Petrișor
-
-"""Utilities for determining application-specific dirs.
-
-See for details and usage.
-"""
-# Dev Notes:
-# - MSDN on where to store app data files:
-# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120
-# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html
-# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
-
-__version_info__ = (1, 4, 3)
-__version__ = '.'.join(map(str, __version_info__))
-
-
-import sys
-import os
-
-PY3 = sys.version_info[0] == 3
-
-if PY3:
- unicode = str
-
-if sys.platform.startswith('java'):
- import platform
- os_name = platform.java_ver()[3][0]
- if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc.
- system = 'win32'
- elif os_name.startswith('Mac'): # "Mac OS X", etc.
- system = 'darwin'
- else: # "Linux", "SunOS", "FreeBSD", etc.
- # Setting this to "linux2" is not ideal, but only Windows or Mac
- # are actually checked for and the rest of the module expects
- # *sys.platform* style strings.
- system = 'linux2'
-else:
- system = sys.platform
-
-
-
-def user_data_dir(appname=None, appauthor=None, version=None, roaming=False):
- r"""Return full path to the user-specific data dir for this application.
-
- "appname" is the name of application.
- If None, just the system directory is returned.
- "appauthor" (only used on Windows) is the name of the
- appauthor or distributing body for this application. Typically
- it is the owning company name. This falls back to appname. You may
- pass False to disable it.
- "version" is an optional version path element to append to the
- path. You might want to use this if you want multiple versions
- of your app to be able to run independently. If used, this
- would typically be ".".
- Only applied when appname is present.
- "roaming" (boolean, default False) can be set True to use the Windows
- roaming appdata directory. That means that for users on a Windows
- network setup for roaming profiles, this user data will be
- sync'd on login. See
-
- for a discussion of issues.
-
- Typical user data directories are:
- Mac OS X: ~/Library/Application Support/
- Unix: ~/.local/share/ # or in $XDG_DATA_HOME, if defined
- Win XP (not roaming): C:\Documents and Settings\\Application Data\\
- Win XP (roaming): C:\Documents and Settings\\Local Settings\Application Data\\
- Win 7 (not roaming): C:\Users\\AppData\Local\\
- Win 7 (roaming): C:\Users\\AppData\Roaming\\
-
- For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
- That means, by default "~/.local/share/".
- """
- if system == "win32":
- if appauthor is None:
- appauthor = appname
- const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
- path = os.path.normpath(_get_win_folder(const))
- if appname:
- if appauthor is not False:
- path = os.path.join(path, appauthor, appname)
- else:
- path = os.path.join(path, appname)
- elif system == 'darwin':
- path = os.path.expanduser('~/Library/Application Support/')
- if appname:
- path = os.path.join(path, appname)
- else:
- path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share"))
- if appname:
- path = os.path.join(path, appname)
- if appname and version:
- path = os.path.join(path, version)
- return path
-
-
-def site_data_dir(appname=None, appauthor=None, version=None, multipath=False):
- r"""Return full path to the user-shared data dir for this application.
-
- "appname" is the name of application.
- If None, just the system directory is returned.
- "appauthor" (only used on Windows) is the name of the
- appauthor or distributing body for this application. Typically
- it is the owning company name. This falls back to appname. You may
- pass False to disable it.
- "version" is an optional version path element to append to the
- path. You might want to use this if you want multiple versions
- of your app to be able to run independently. If used, this
- would typically be ".".
- Only applied when appname is present.
- "multipath" is an optional parameter only applicable to *nix
- which indicates that the entire list of data dirs should be
- returned. By default, the first item from XDG_DATA_DIRS is
- returned, or '/usr/local/share/',
- if XDG_DATA_DIRS is not set
-
- Typical site data directories are:
- Mac OS X: /Library/Application Support/
- Unix: /usr/local/share/ or /usr/share/
- Win XP: C:\Documents and Settings\All Users\Application Data\\
- Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
- Win 7: C:\ProgramData\\ # Hidden, but writeable on Win 7.
-
- For Unix, this is using the $XDG_DATA_DIRS[0] default.
-
- WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
- """
- if system == "win32":
- if appauthor is None:
- appauthor = appname
- path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
- if appname:
- if appauthor is not False:
- path = os.path.join(path, appauthor, appname)
- else:
- path = os.path.join(path, appname)
- elif system == 'darwin':
- path = os.path.expanduser('/Library/Application Support')
- if appname:
- path = os.path.join(path, appname)
- else:
- # XDG default for $XDG_DATA_DIRS
- # only first, if multipath is False
- path = os.getenv('XDG_DATA_DIRS',
- os.pathsep.join(['/usr/local/share', '/usr/share']))
- pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
- if appname:
- if version:
- appname = os.path.join(appname, version)
- pathlist = [os.sep.join([x, appname]) for x in pathlist]
-
- if multipath:
- path = os.pathsep.join(pathlist)
- else:
- path = pathlist[0]
- return path
-
- if appname and version:
- path = os.path.join(path, version)
- return path
-
-
-def user_config_dir(appname=None, appauthor=None, version=None, roaming=False):
- r"""Return full path to the user-specific config dir for this application.
-
- "appname" is the name of application.
- If None, just the system directory is returned.
- "appauthor" (only used on Windows) is the name of the
- appauthor or distributing body for this application. Typically
- it is the owning company name. This falls back to appname. You may
- pass False to disable it.
- "version" is an optional version path element to append to the
- path. You might want to use this if you want multiple versions
- of your app to be able to run independently. If used, this
- would typically be ".".
- Only applied when appname is present.
- "roaming" (boolean, default False) can be set True to use the Windows
- roaming appdata directory. That means that for users on a Windows
- network setup for roaming profiles, this user data will be
- sync'd on login. See
-
- for a discussion of issues.
-
- Typical user config directories are:
- Mac OS X: same as user_data_dir
- Unix: ~/.config/ # or in $XDG_CONFIG_HOME, if defined
- Win *: same as user_data_dir
-
- For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
- That means, by default "~/.config/".
- """
- if system in ["win32", "darwin"]:
- path = user_data_dir(appname, appauthor, None, roaming)
- else:
- path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config"))
- if appname:
- path = os.path.join(path, appname)
- if appname and version:
- path = os.path.join(path, version)
- return path
-
-
-def site_config_dir(appname=None, appauthor=None, version=None, multipath=False):
- r"""Return full path to the user-shared data dir for this application.
-
- "appname" is the name of application.
- If None, just the system directory is returned.
- "appauthor" (only used on Windows) is the name of the
- appauthor or distributing body for this application. Typically
- it is the owning company name. This falls back to appname. You may
- pass False to disable it.
- "version" is an optional version path element to append to the
- path. You might want to use this if you want multiple versions
- of your app to be able to run independently. If used, this
- would typically be ".".
- Only applied when appname is present.
- "multipath" is an optional parameter only applicable to *nix
- which indicates that the entire list of config dirs should be
- returned. By default, the first item from XDG_CONFIG_DIRS is
- returned, or '/etc/xdg/', if XDG_CONFIG_DIRS is not set
-
- Typical site config directories are:
- Mac OS X: same as site_data_dir
- Unix: /etc/xdg/ or $XDG_CONFIG_DIRS[i]/ for each value in
- $XDG_CONFIG_DIRS
- Win *: same as site_data_dir
- Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
-
- For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False
-
- WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
- """
- if system in ["win32", "darwin"]:
- path = site_data_dir(appname, appauthor)
- if appname and version:
- path = os.path.join(path, version)
- else:
- # XDG default for $XDG_CONFIG_DIRS
- # only first, if multipath is False
- path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
- pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
- if appname:
- if version:
- appname = os.path.join(appname, version)
- pathlist = [os.sep.join([x, appname]) for x in pathlist]
-
- if multipath:
- path = os.pathsep.join(pathlist)
- else:
- path = pathlist[0]
- return path
-
-
-def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True):
- r"""Return full path to the user-specific cache dir for this application.
-
- "appname" is the name of application.
- If None, just the system directory is returned.
- "appauthor" (only used on Windows) is the name of the
- appauthor or distributing body for this application. Typically
- it is the owning company name. This falls back to appname. You may
- pass False to disable it.
- "version" is an optional version path element to append to the
- path. You might want to use this if you want multiple versions
- of your app to be able to run independently. If used, this
- would typically be ".".
- Only applied when appname is present.
- "opinion" (boolean) can be False to disable the appending of
- "Cache" to the base app data dir for Windows. See
- discussion below.
-
- Typical user cache directories are:
- Mac OS X: ~/Library/Caches/
- Unix: ~/.cache/ (XDG default)
- Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Cache
- Vista: C:\Users\\AppData\Local\\\Cache
-
- On Windows the only suggestion in the MSDN docs is that local settings go in
- the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming
- app data dir (the default returned by `user_data_dir` above). Apps typically
- put cache data somewhere *under* the given dir here. Some examples:
- ...\Mozilla\Firefox\Profiles\\Cache
- ...\Acme\SuperApp\Cache\1.0
- OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
- This can be disabled with the `opinion=False` option.
- """
- if system == "win32":
- if appauthor is None:
- appauthor = appname
- path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
- if appname:
- if appauthor is not False:
- path = os.path.join(path, appauthor, appname)
- else:
- path = os.path.join(path, appname)
- if opinion:
- path = os.path.join(path, "Cache")
- elif system == 'darwin':
- path = os.path.expanduser('~/Library/Caches')
- if appname:
- path = os.path.join(path, appname)
- else:
- path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
- if appname:
- path = os.path.join(path, appname)
- if appname and version:
- path = os.path.join(path, version)
- return path
-
-
-def user_state_dir(appname=None, appauthor=None, version=None, roaming=False):
- r"""Return full path to the user-specific state dir for this application.
-
- "appname" is the name of application.
- If None, just the system directory is returned.
- "appauthor" (only used on Windows) is the name of the
- appauthor or distributing body for this application. Typically
- it is the owning company name. This falls back to appname. You may
- pass False to disable it.
- "version" is an optional version path element to append to the
- path. You might want to use this if you want multiple versions
- of your app to be able to run independently. If used, this
- would typically be ".".
- Only applied when appname is present.
- "roaming" (boolean, default False) can be set True to use the Windows
- roaming appdata directory. That means that for users on a Windows
- network setup for roaming profiles, this user data will be
- sync'd on login. See
-
- for a discussion of issues.
-
- Typical user state directories are:
- Mac OS X: same as user_data_dir
- Unix: ~/.local/state/ # or in $XDG_STATE_HOME, if defined
- Win *: same as user_data_dir
-
- For Unix, we follow this Debian proposal
- to extend the XDG spec and support $XDG_STATE_HOME.
-
- That means, by default "~/.local/state/".
- """
- if system in ["win32", "darwin"]:
- path = user_data_dir(appname, appauthor, None, roaming)
- else:
- path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state"))
- if appname:
- path = os.path.join(path, appname)
- if appname and version:
- path = os.path.join(path, version)
- return path
-
-
-def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
- r"""Return full path to the user-specific log dir for this application.
-
- "appname" is the name of application.
- If None, just the system directory is returned.
- "appauthor" (only used on Windows) is the name of the
- appauthor or distributing body for this application. Typically
- it is the owning company name. This falls back to appname. You may
- pass False to disable it.
- "version" is an optional version path element to append to the
- path. You might want to use this if you want multiple versions
- of your app to be able to run independently. If used, this
- would typically be ".".
- Only applied when appname is present.
- "opinion" (boolean) can be False to disable the appending of
- "Logs" to the base app data dir for Windows, and "log" to the
- base cache dir for Unix. See discussion below.
-
- Typical user log directories are:
- Mac OS X: ~/Library/Logs/
- Unix: ~/.cache//log # or under $XDG_CACHE_HOME if defined
- Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Logs
- Vista: C:\Users\\AppData\Local\\\Logs
-
- On Windows the only suggestion in the MSDN docs is that local settings
- go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in
- examples of what some windows apps use for a logs dir.)
-
- OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA`
- value for Windows and appends "log" to the user cache dir for Unix.
- This can be disabled with the `opinion=False` option.
- """
- if system == "darwin":
- path = os.path.join(
- os.path.expanduser('~/Library/Logs'),
- appname)
- elif system == "win32":
- path = user_data_dir(appname, appauthor, version)
- version = False
- if opinion:
- path = os.path.join(path, "Logs")
- else:
- path = user_cache_dir(appname, appauthor, version)
- version = False
- if opinion:
- path = os.path.join(path, "log")
- if appname and version:
- path = os.path.join(path, version)
- return path
-
-
-class AppDirs(object):
- """Convenience wrapper for getting application dirs."""
- def __init__(self, appname=None, appauthor=None, version=None,
- roaming=False, multipath=False):
- self.appname = appname
- self.appauthor = appauthor
- self.version = version
- self.roaming = roaming
- self.multipath = multipath
-
- @property
- def user_data_dir(self):
- return user_data_dir(self.appname, self.appauthor,
- version=self.version, roaming=self.roaming)
-
- @property
- def site_data_dir(self):
- return site_data_dir(self.appname, self.appauthor,
- version=self.version, multipath=self.multipath)
-
- @property
- def user_config_dir(self):
- return user_config_dir(self.appname, self.appauthor,
- version=self.version, roaming=self.roaming)
-
- @property
- def site_config_dir(self):
- return site_config_dir(self.appname, self.appauthor,
- version=self.version, multipath=self.multipath)
-
- @property
- def user_cache_dir(self):
- return user_cache_dir(self.appname, self.appauthor,
- version=self.version)
-
- @property
- def user_state_dir(self):
- return user_state_dir(self.appname, self.appauthor,
- version=self.version)
-
- @property
- def user_log_dir(self):
- return user_log_dir(self.appname, self.appauthor,
- version=self.version)
-
-
-#---- internal support stuff
-
-def _get_win_folder_from_registry(csidl_name):
- """This is a fallback technique at best. I'm not sure if using the
- registry for this guarantees us the correct answer for all CSIDL_*
- names.
- """
- if PY3:
- import winreg as _winreg
- else:
- import _winreg
-
- shell_folder_name = {
- "CSIDL_APPDATA": "AppData",
- "CSIDL_COMMON_APPDATA": "Common AppData",
- "CSIDL_LOCAL_APPDATA": "Local AppData",
- }[csidl_name]
-
- key = _winreg.OpenKey(
- _winreg.HKEY_CURRENT_USER,
- r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
- )
- dir, type = _winreg.QueryValueEx(key, shell_folder_name)
- return dir
-
-
-def _get_win_folder_with_pywin32(csidl_name):
- from win32com.shell import shellcon, shell
- dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0)
- # Try to make this a unicode path because SHGetFolderPath does
- # not return unicode strings when there is unicode data in the
- # path.
- try:
- dir = unicode(dir)
-
- # Downgrade to short path name if have highbit chars. See
- # .
- has_high_char = False
- for c in dir:
- if ord(c) > 255:
- has_high_char = True
- break
- if has_high_char:
- try:
- import win32api
- dir = win32api.GetShortPathName(dir)
- except ImportError:
- pass
- except UnicodeError:
- pass
- return dir
-
-
-def _get_win_folder_with_ctypes(csidl_name):
- import ctypes
-
- csidl_const = {
- "CSIDL_APPDATA": 26,
- "CSIDL_COMMON_APPDATA": 35,
- "CSIDL_LOCAL_APPDATA": 28,
- }[csidl_name]
-
- buf = ctypes.create_unicode_buffer(1024)
- ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
-
- # Downgrade to short path name if have highbit chars. See
- # .
- has_high_char = False
- for c in buf:
- if ord(c) > 255:
- has_high_char = True
- break
- if has_high_char:
- buf2 = ctypes.create_unicode_buffer(1024)
- if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
- buf = buf2
-
- return buf.value
-
-def _get_win_folder_with_jna(csidl_name):
- import array
- from com.sun import jna
- from com.sun.jna.platform import win32
-
- buf_size = win32.WinDef.MAX_PATH * 2
- buf = array.zeros('c', buf_size)
- shell = win32.Shell32.INSTANCE
- shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf)
- dir = jna.Native.toString(buf.tostring()).rstrip("\0")
-
- # Downgrade to short path name if have highbit chars. See
- # .
- has_high_char = False
- for c in dir:
- if ord(c) > 255:
- has_high_char = True
- break
- if has_high_char:
- buf = array.zeros('c', buf_size)
- kernel = win32.Kernel32.INSTANCE
- if kernel.GetShortPathName(dir, buf, buf_size):
- dir = jna.Native.toString(buf.tostring()).rstrip("\0")
-
- return dir
-
-if system == "win32":
- try:
- from ctypes import windll
- _get_win_folder = _get_win_folder_with_ctypes
- except ImportError:
- try:
- import com.sun.jna
- _get_win_folder = _get_win_folder_with_jna
- except ImportError:
- _get_win_folder = _get_win_folder_from_registry
-
-
-#---- self test code
-
-if __name__ == "__main__":
- appname = "MyApp"
- appauthor = "MyCompany"
-
- props = ("user_data_dir",
- "user_config_dir",
- "user_cache_dir",
- "user_state_dir",
- "user_log_dir",
- "site_data_dir",
- "site_config_dir")
-
- print("-- app dirs %s --" % __version__)
-
- print("-- app dirs (with optional 'version')")
- dirs = AppDirs(appname, appauthor, version="1.0")
- for prop in props:
- print("%s: %s" % (prop, getattr(dirs, prop)))
-
- print("\n-- app dirs (without optional 'version')")
- dirs = AppDirs(appname, appauthor)
- for prop in props:
- print("%s: %s" % (prop, getattr(dirs, prop)))
-
- print("\n-- app dirs (without optional 'appauthor')")
- dirs = AppDirs(appname)
- for prop in props:
- print("%s: %s" % (prop, getattr(dirs, prop)))
-
- print("\n-- app dirs (with disabled 'appauthor')")
- dirs = AppDirs(appname, appauthor=False)
- for prop in props:
- print("%s: %s" % (prop, getattr(dirs, prop)))
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2005-2010 ActiveState Software Inc.
+# Copyright (c) 2013 Eddy Petrișor
+
+"""Utilities for determining application-specific dirs.
+
+See for details and usage.
+"""
+# Dev Notes:
+# - MSDN on where to store app data files:
+# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120
+# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html
+# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
+
+__version_info__ = (1, 4, 3)
+__version__ = '.'.join(map(str, __version_info__))
+
+
+import sys
+import os
+
+PY3 = sys.version_info[0] == 3
+
+if PY3:
+ unicode = str
+
+if sys.platform.startswith('java'):
+ import platform
+ os_name = platform.java_ver()[3][0]
+ if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc.
+ system = 'win32'
+ elif os_name.startswith('Mac'): # "Mac OS X", etc.
+ system = 'darwin'
+ else: # "Linux", "SunOS", "FreeBSD", etc.
+ # Setting this to "linux2" is not ideal, but only Windows or Mac
+ # are actually checked for and the rest of the module expects
+ # *sys.platform* style strings.
+ system = 'linux2'
+else:
+ system = sys.platform
+
+
+
+def user_data_dir(appname=None, appauthor=None, version=None, roaming=False):
+ r"""Return full path to the user-specific data dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be ".".
+ Only applied when appname is present.
+ "roaming" (boolean, default False) can be set True to use the Windows
+ roaming appdata directory. That means that for users on a Windows
+ network setup for roaming profiles, this user data will be
+ sync'd on login. See
+