From 947cba18bd2531fe28a002b6d41344e794f1d96f Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 13 Aug 2018 18:30:05 -0400 Subject: [PATCH 01/26] Vendor new libraries: vistir and pip_shims - Vendors new libraries for maintainability - Separates responsibiltiies and adds coverage Signed-off-by: Dan Ryan --- news/2639.vendor | 1 + pipenv/vendor/pip_shims/__init__.py | 75 +++++ pipenv/vendor/pip_shims/shims.py | 207 ++++++++++++ pipenv/vendor/pip_shims/utils.py | 35 ++ pipenv/vendor/vendor.txt | 2 + pipenv/vendor/vistir/__init__.py | 34 ++ pipenv/vendor/vistir/backports/__init__.py | 11 + pipenv/vendor/vistir/backports/functools.py | 84 +++++ pipenv/vendor/vistir/backports/tempfile.py | 211 +++++++++++++ pipenv/vendor/vistir/cmdparse.py | 78 +++++ pipenv/vendor/vistir/compat.py | 136 ++++++++ pipenv/vendor/vistir/contextmanagers.py | 208 ++++++++++++ pipenv/vendor/vistir/misc.py | 134 ++++++++ pipenv/vendor/vistir/path.py | 333 ++++++++++++++++++++ 14 files changed, 1549 insertions(+) create mode 100644 news/2639.vendor create mode 100644 pipenv/vendor/pip_shims/__init__.py create mode 100644 pipenv/vendor/pip_shims/shims.py create mode 100644 pipenv/vendor/pip_shims/utils.py create mode 100644 pipenv/vendor/vistir/__init__.py create mode 100644 pipenv/vendor/vistir/backports/__init__.py create mode 100644 pipenv/vendor/vistir/backports/functools.py create mode 100644 pipenv/vendor/vistir/backports/tempfile.py create mode 100644 pipenv/vendor/vistir/cmdparse.py create mode 100644 pipenv/vendor/vistir/compat.py create mode 100644 pipenv/vendor/vistir/contextmanagers.py create mode 100644 pipenv/vendor/vistir/misc.py create mode 100644 pipenv/vendor/vistir/path.py diff --git a/news/2639.vendor b/news/2639.vendor new file mode 100644 index 0000000000..777350aacb --- /dev/null +++ b/news/2639.vendor @@ -0,0 +1 @@ +Vendored new libraries ``vistir`` and ``pip-shims``. diff --git a/pipenv/vendor/pip_shims/__init__.py b/pipenv/vendor/pip_shims/__init__.py new file mode 100644 index 0000000000..5f6ba2c4fb --- /dev/null +++ b/pipenv/vendor/pip_shims/__init__.py @@ -0,0 +1,75 @@ +# -*- coding=utf-8 -*- +from __future__ import absolute_import + +__version__ = "0.1.0" + +__all__ = [ + "__version__", + "_strip_extras", + "cmdoptions", + "Command", + "ConfigOptionParser", + "DistributionNotFound", + "FAVORITE_HASH", + "FormatControl", + "get_installed_distributions", + "index_group", + "InstallRequirement", + "is_archive_file", + "is_file_url", + "is_installable_dir", + "Link", + "make_abstract_dist", + "make_option_group", + "PackageFinder", + "parse_requirements", + "parse_version", + "path_to_url", + "pip_version", + "PipError", + "RequirementPreparer", + "RequirementSet", + "RequirementTracker", + "Resolver", + "SafeFileCache", + "url_to_path", + "USER_CACHE_DIR", + "VcsSupport", + "Wheel", + "WheelCache", +] + +from .shims import ( + _strip_extras, + cmdoptions, + Command, + ConfigOptionParser, + DistributionNotFound, + FAVORITE_HASH, + FormatControl, + get_installed_distributions, + index_group, + InstallRequirement, + is_archive_file, + is_file_url, + is_installable_dir, + Link, + make_abstract_dist, + make_option_group, + PackageFinder, + parse_requirements, + parse_version, + path_to_url, + pip_version, + PipError, + RequirementPreparer, + RequirementSet, + RequirementTracker, + Resolver, + SafeFileCache, + url_to_path, + USER_CACHE_DIR, + VcsSupport, + Wheel, + WheelCache, +) diff --git a/pipenv/vendor/pip_shims/shims.py b/pipenv/vendor/pip_shims/shims.py new file mode 100644 index 0000000000..159b22236c --- /dev/null +++ b/pipenv/vendor/pip_shims/shims.py @@ -0,0 +1,207 @@ +# -*- coding=utf-8 -*- +from collections import namedtuple +from contextlib import contextmanager +from .utils import _parse, get_package, STRING_TYPES +import importlib +import os +from pip import __version__ as pip_version +import sys + + +has_modutil = False +if sys.version_info[:2] >= (3, 7): + try: + import modutil + except ImportError: + has_modutil = False + else: + has_modutil = True + + +BASE_IMPORT_PATH = os.environ.get("PIP_SHIMS_BASE_MODULE", "pip") +path_info = namedtuple("PathInfo", "path start_version end_version") +parsed_pip_version = _parse(pip_version) + + +def is_valid(path_info_tuple): + if ( + path_info_tuple.start_version >= parsed_pip_version + and path_info_tuple.end_version <= parsed_pip_version + ): + return 1 + return 0 + + +def do_import(module_paths, base_path=BASE_IMPORT_PATH): + if not isinstance(module_paths, list): + module_paths = [module_paths] + prefix_order = [pth.format(base_path) for pth in ["{0}._internal", "{0}"]] + if _parse(pip_version) < _parse("10.0.0"): + prefix_order = reversed(prefix_order) + paths = sorted(module_paths, key=is_valid, reverse=True) + search_order = [ + "{0}.{1}".format(p, pth.path) + for p in prefix_order + for pth in paths + if pth is not None + ] + imported = None + if has_modutil: + pkgs = [get_package(pkg) for pkg in search_order] + imports = [ + modutil.lazy_import(__name__, {to_import}) for to_import, pkg in pkgs + ] + imp_getattrs = [imp_getattr for mod, imp_getattr in imports] + chained = modutil.chained___getattr__(__name__, *imp_getattrs) + imported = None + for to_import, pkg in pkgs: + _, _, module_name = to_import.rpartition(".") + try: + imported = chained(module_name) + except (modutil.ModuleAttributeError, ImportError): + continue + else: + if not imported: + continue + return getattr(imported, pkg) + if not imported: + return + return imported + for to_import in search_order: + to_import, package = get_package(to_import) + try: + imported = importlib.import_module(to_import) + except ImportError: + continue + else: + return getattr(imported, package) + return imported + + +parse_version = do_import( + [path_info("index.parse_version", _parse("7.0.0"), _parse("9999"))] +) +_strip_extras = do_import( + [path_info("req.req_install._strip_extras", _parse("7.0.0"), _parse("9999"))] +) +cmdoptions = do_import( + [ + path_info("cli.cmdoptions", _parse("18.1"), _parse("9999")), + path_info("cmdoptions", _parse("7.0.0"), _parse("18.0")), + ] +) +Command = do_import( + [ + path_info("cli.base_command.Command", _parse("18.1"), _parse("9999")), + path_info("basecommand.Command", _parse("7.0.0"), _parse("18.0")), + ] +) +ConfigOptionParser = do_import( + [ + path_info("cli.parser.ConfigOptionParser", _parse("18.1"), _parse("9999")), + path_info("baseparser.ConfigOptionParser", _parse("7.0.0"), _parse("18.0")), + ] +) +DistributionNotFound = do_import( + [path_info("exceptions.DistributionNotFound", _parse("7.0.0"), _parse("9999"))] +) +FAVORITE_HASH = do_import( + [path_info("utils.hashes.FAVORITE_HASH", _parse("7.0.0"), _parse("9999"))] +) +FormatControl = do_import( + [path_info("index.FormatControl", _parse("7.0.0"), _parse("9999"))] +) +get_installed_distributions = do_import( + [ + path_info( + "utils.misc.get_installed_distributions", _parse("10.0.0"), _parse("9999") + ), + path_info( + "utils.get_installed_distributions", _parse("7.0.0"), _parse("9.0.3") + ), + ] +) +index_group = do_import( + [ + path_info("cli.cmdoptions.index_group", _parse("18.1"), _parse("9999")), + path_info("cmdoptions.index_group", _parse("7.0.0"), _parse("18.0")), + ] +) +InstallRequirement = do_import( + [path_info("req.req_install.InstallRequirement", _parse("7.0.0"), _parse("9999"))] +) +is_archive_file = do_import( + [path_info("download.is_archive_file", _parse("7.0.0"), _parse("9999"))] +) +is_file_url = do_import( + [path_info("download.is_file_url", _parse("7.0.0"), _parse("9999"))] +) +is_installable_dir = do_import( + [ + path_info("utils.misc.is_installable_dir", _parse("10.0.0"), _parse("9999")), + path_info("utils.is_installable_dir", _parse("7.0.0"), _parse("9.0.3")), + ] +) +Link = do_import([path_info("index.Link", _parse("7.0.0"), _parse("9999"))]) +make_abstract_dist = do_import( + [ + path_info( + "operations.prepare.make_abstract_dist", _parse("10.0.0"), _parse("9999") + ), + path_info("req.req_set.make_abstract_dist", _parse("7.0.0"), _parse("9.0.3")), + ] +) +make_option_group = do_import( + [ + path_info("cli.cmdoptions.make_option_group", _parse("18.1"), _parse("9999")), + path_info("cmdoptions.make_option_group", _parse("7.0.0"), _parse("18.0")), + ] +) +PackageFinder = do_import( + [path_info("index.PackageFinder", _parse("7.0.0"), _parse("9999"))] +) +parse_requirements = do_import( + [path_info("req.req_file.parse_requirements", _parse("7.0.0"), _parse("9999"))] +) +parse_version = do_import( + [path_info("index.parse_version", _parse("7.0.0"), _parse("9999"))] +) +path_to_url = do_import( + [path_info("download.path_to_url", _parse("7.0.0"), _parse("9999"))] +) +PipError = do_import( + [path_info("exceptions.PipError", _parse("7.0.0"), _parse("9999"))] +) +RequirementPreparer = do_import( + [ + path_info( + "operations.prepare.RequirementPreparer", _parse("7.0.0"), _parse("9999") + ) + ] +) +RequirementSet = do_import( + [path_info("req.req_set.RequirementSet", _parse("7.0.0"), _parse("9999"))] +) +RequirementTracker = do_import( + [path_info("req.req_tracker.RequirementTracker", _parse("7.0.0"), _parse("9999"))] +) +Resolver = do_import([path_info("resolve.Resolver", _parse("7.0.0"), _parse("9999"))]) +SafeFileCache = do_import( + [path_info("download.SafeFileCache", _parse("7.0.0"), _parse("9999"))] +) +url_to_path = do_import( + [path_info("download.url_to_path", _parse("7.0.0"), _parse("9999"))] +) +USER_CACHE_DIR = do_import( + [path_info("locations.USER_CACHE_DIR", _parse("7.0.0"), _parse("9999"))] +) +VcsSupport = do_import([path_info("vcs.VcsSupport", _parse("7.0.0"), _parse("9999"))]) +Wheel = do_import([path_info("wheel.Wheel", _parse("7.0.0"), _parse("9999"))]) +WheelCache = do_import([path_info("cache.WheelCache", _parse("7.0.0"), _parse("9999"))]) + + +if not RequirementTracker: + + @contextmanager + def RequirementTracker(): + yield diff --git a/pipenv/vendor/pip_shims/utils.py b/pipenv/vendor/pip_shims/utils.py new file mode 100644 index 0000000000..a8389bbe6c --- /dev/null +++ b/pipenv/vendor/pip_shims/utils.py @@ -0,0 +1,35 @@ +# -*- coding=utf-8 -*- +from functools import wraps +import sys + +STRING_TYPES = (str,) +if sys.version_info < (3, 0): + STRING_TYPES = STRING_TYPES + (unicode,) + + +def memoize(obj): + cache = obj.cache = {} + + @wraps(obj) + def memoizer(*args, **kwargs): + key = str(args) + str(kwargs) + if key not in cache: + cache[key] = obj(*args, **kwargs) + return cache[key] + return memoizer + + +@memoize +def _parse(version): + if isinstance(version, STRING_TYPES): + return tuple(version.split(".")) + return version + + +def get_package(module, subimport=None): + package = None + if subimport: + package = subimport + else: + module, _, package = module.rpartition(".") + return module, package diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index 47253dd375..dd6df701f2 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -40,3 +40,5 @@ semver==2.8.0 shutilwhich==1.1.0 toml==0.9.4 cached-property==1.4.3 +vistir==0.1.0 +pip-shims==0.1.0 diff --git a/pipenv/vendor/vistir/__init__.py b/pipenv/vendor/vistir/__init__.py new file mode 100644 index 0000000000..c7f776d55d --- /dev/null +++ b/pipenv/vendor/vistir/__init__.py @@ -0,0 +1,34 @@ +# -*- coding=utf-8 -*- +from __future__ import absolute_import, unicode_literals + +from .compat import NamedTemporaryFile, TemporaryDirectory, partialmethod +from .contextmanagers import ( + atomic_open_for_write, + cd, + open_file, + temp_environ, + temp_path, +) +from .misc import load_path, partialclass, run, shell_escape +from .path import mkdir_p, rmtree + + +__version__ = '0.1.0' + + +__all__ = [ + "shell_escape", + "load_path", + "run", + "partialclass", + "temp_environ", + "temp_path", + "cd", + "atomic_open_for_write", + "open_file", + "rmtree", + "mkdir_p", + "TemporaryDirectory", + "NamedTemporaryFile", + "partialmethod", +] diff --git a/pipenv/vendor/vistir/backports/__init__.py b/pipenv/vendor/vistir/backports/__init__.py new file mode 100644 index 0000000000..0bdac1eaac --- /dev/null +++ b/pipenv/vendor/vistir/backports/__init__.py @@ -0,0 +1,11 @@ +# -*- coding=utf-8 -*- +from __future__ import absolute_import, unicode_literals + +from .functools import partialmethod +from .tempfile import NamedTemporaryFile + + +__all__ = [ + "NamedTemporaryFile", + "partialmethod" +] diff --git a/pipenv/vendor/vistir/backports/functools.py b/pipenv/vendor/vistir/backports/functools.py new file mode 100644 index 0000000000..8060d1836c --- /dev/null +++ b/pipenv/vendor/vistir/backports/functools.py @@ -0,0 +1,84 @@ +# -*- coding=utf-8 -*- +from __future__ import absolute_import, unicode_literals + +from functools import partial + + +__all__ = ["partialmethod",] + + +class partialmethod(object): + """Method descriptor with partial application of the given arguments + and keywords. + Supports wrapping existing descriptors and handles non-descriptor + callables as instance methods. + """ + + def __init__(self, func, *args, **keywords): + if not callable(func) and not hasattr(func, "__get__"): + raise TypeError("{!r} is not callable or a descriptor" + .format(func)) + + # func could be a descriptor like classmethod which isn't callable, + # so we can't inherit from partial (it verifies func is callable) + if isinstance(func, partialmethod): + # flattening is mandatory in order to place cls/self before all + # other arguments + # it's also more efficient since only one function will be called + self.func = func.func + self.args = func.args + args + self.keywords = func.keywords.copy() + self.keywords.update(keywords) + else: + self.func = func + self.args = args + self.keywords = keywords + + def __repr__(self): + args = ", ".join(map(repr, self.args)) + keywords = ", ".join("{}={!r}".format(k, v) + for k, v in self.keywords.items()) + format_string = "{module}.{cls}({func}, {args}, {keywords})" + return format_string.format(module=self.__class__.__module__, + cls=self.__class__.__qualname__, + func=self.func, + args=args, + keywords=keywords) + + def _make_unbound_method(self): + def _method(*args, **keywords): + call_keywords = self.keywords.copy() + call_keywords.update(keywords) + if len(args) > 1: + cls_or_self, rest = args[0], tuple(args[1:],) + else: + cls_or_self = args[0] + rest = tuple() + call_args = (cls_or_self,) + self.args + tuple(rest) + return self.func(*call_args, **call_keywords) + _method.__isabstractmethod__ = self.__isabstractmethod__ + _method._partialmethod = self + return _method + + def __get__(self, obj, cls): + get = getattr(self.func, "__get__", None) + result = None + if get is not None: + new_func = get(obj, cls) + if new_func is not self.func: + # Assume __get__ returning something new indicates the + # creation of an appropriate callable + result = partial(new_func, *self.args, **self.keywords) + try: + result.__self__ = new_func.__self__ + except AttributeError: + pass + if result is None: + # If the underlying descriptor didn't do anything, treat this + # like an instance method + result = self._make_unbound_method().__get__(obj, cls) + return result + + @property + def __isabstractmethod__(self): + return getattr(self.func, "__isabstractmethod__", False) diff --git a/pipenv/vendor/vistir/backports/tempfile.py b/pipenv/vendor/vistir/backports/tempfile.py new file mode 100644 index 0000000000..483a479a71 --- /dev/null +++ b/pipenv/vendor/vistir/backports/tempfile.py @@ -0,0 +1,211 @@ +# -*- coding=utf-8 -*- +from __future__ import absolute_import, unicode_literals + +import functools +import io +import os +import sys + +from tempfile import _bin_openflags, _mkstemp_inner, gettempdir + +import six + +try: + from weakref import finalize +except ImportError: + from backports.weakref import finalize + + +__all__ = ["finalize", "NamedTemporaryFile"] + + +try: + from tempfile import _infer_return_type +except ImportError: + + def _infer_return_type(*args): + _types = set() + for arg in args: + if isinstance(type(arg), six.string_types): + _types.add(str) + elif isinstance(type(arg), bytes): + _types.add(bytes) + elif arg: + _types.add(type(arg)) + return _types.pop() + + +def _sanitize_params(prefix, suffix, dir): + """Common parameter processing for most APIs in this module.""" + output_type = _infer_return_type(prefix, suffix, dir) + if suffix is None: + suffix = output_type() + if prefix is None: + if output_type is str: + prefix = "tmp" + else: + prefix = os.fsencode("tmp") + if dir is None: + if output_type is str: + dir = gettempdir() + else: + dir = os.fsencode(gettempdir()) + return prefix, suffix, dir, output_type + + +class _TemporaryFileCloser: + """A separate object allowing proper closing of a temporary file's + underlying file object, without adding a __del__ method to the + temporary file.""" + + file = None # Set here since __del__ checks it + close_called = False + + def __init__(self, file, name, delete=True): + self.file = file + self.name = name + self.delete = delete + + # NT provides delete-on-close as a primitive, so we don't need + # the wrapper to do anything special. We still use it so that + # file.name is useful (i.e. not "(fdopen)") with NamedTemporaryFile. + if os.name != "nt": + + # Cache the unlinker so we don't get spurious errors at + # shutdown when the module-level "os" is None'd out. Note + # that this must be referenced as self.unlink, because the + # name TemporaryFileWrapper may also get None'd out before + # __del__ is called. + + def close(self, unlink=os.unlink): + if not self.close_called and self.file is not None: + self.close_called = True + try: + self.file.close() + finally: + if self.delete: + unlink(self.name) + + # Need to ensure the file is deleted on __del__ + + def __del__(self): + self.close() + + else: + + def close(self): + if not self.close_called: + self.close_called = True + self.file.close() + + +class _TemporaryFileWrapper: + """Temporary file wrapper + This class provides a wrapper around files opened for + temporary use. In particular, it seeks to automatically + remove the file when it is no longer needed. + """ + + def __init__(self, file, name, delete=True): + self.file = file + self.name = name + self.delete = delete + self._closer = _TemporaryFileCloser(file, name, delete) + + def __getattr__(self, name): + # Attribute lookups are delegated to the underlying file + # and cached for non-numeric results + # (i.e. methods are cached, closed and friends are not) + file = self.__dict__["file"] + a = getattr(file, name) + if hasattr(a, "__call__"): + func = a + + @functools.wraps(func) + def func_wrapper(*args, **kwargs): + return func(*args, **kwargs) + + # Avoid closing the file as long as the wrapper is alive, + # see issue #18879. + func_wrapper._closer = self._closer + a = func_wrapper + if not isinstance(a, int): + setattr(self, name, a) + return a + + # The underlying __enter__ method returns the wrong object + # (self.file) so override it to return the wrapper + + def __enter__(self): + self.file.__enter__() + return self + + # Need to trap __exit__ as well to ensure the file gets + # deleted when used in a with statement + + def __exit__(self, exc, value, tb): + result = self.file.__exit__(exc, value, tb) + self.close() + return result + + def close(self): + """ + Close the temporary file, possibly deleting it. + """ + self._closer.close() + + # iter() doesn't use __getattr__ to find the __iter__ method + + def __iter__(self): + # Don't return iter(self.file), but yield from it to avoid closing + # file as long as it's being used as iterator (see issue #23700). We + # can't use 'yield from' here because iter(file) returns the file + # object itself, which has a close method, and thus the file would get + # closed when the generator is finalized, due to PEP380 semantics. + for line in self.file: + yield line + + +def NamedTemporaryFile( + mode="w+b", + buffering=-1, + encoding=None, + newline=None, + suffix=None, + prefix=None, + dir=None, + delete=True, +): + """Create and return a temporary file. + Arguments: + 'prefix', 'suffix', 'dir' -- as for mkstemp. + 'mode' -- the mode argument to io.open (default "w+b"). + 'buffering' -- the buffer size argument to io.open (default -1). + 'encoding' -- the encoding argument to io.open (default None) + 'newline' -- the newline argument to io.open (default None) + 'delete' -- whether the file is deleted on close (default True). + The file is created as mkstemp() would do it. + Returns an object with a file-like interface; the name of the file + is accessible as its 'name' attribute. The file will be automatically + deleted when it is closed unless the 'delete' argument is set to False. + """ + prefix, suffix, dir, output_type = _sanitize_params(prefix, suffix, dir) + flags = _bin_openflags + # Setting O_TEMPORARY in the flags causes the OS to delete + # the file when it is closed. This is only supported by Windows. + if os.name == "nt" and delete: + flags |= os.O_TEMPORARY + if sys.version_info < (3, 5): + (fd, name) = _mkstemp_inner(dir, prefix, suffix, flags) + else: + (fd, name) = _mkstemp_inner(dir, prefix, suffix, flags, output_type) + try: + file = io.open( + fd, mode, buffering=buffering, newline=newline, encoding=encoding + ) + return _TemporaryFileWrapper(file, name, delete) + + except BaseException: + os.unlink(name) + os.close(fd) + raise diff --git a/pipenv/vendor/vistir/cmdparse.py b/pipenv/vendor/vistir/cmdparse.py new file mode 100644 index 0000000000..07326c937f --- /dev/null +++ b/pipenv/vendor/vistir/cmdparse.py @@ -0,0 +1,78 @@ +# -*- coding=utf-8 -*- +from __future__ import absolute_import, unicode_literals + +import re +import shlex + +import six + + +__all__ = ["ScriptEmptyError", "Script"] + + +class ScriptEmptyError(ValueError): + pass + + +class Script(object): + """Parse a script line (in Pipfile's [scripts] section). + + This always works in POSIX mode, even on Windows. + """ + + def __init__(self, command, args=None): + self._parts = [command] + if args: + self._parts.extend(args) + + @classmethod + def parse(cls, value): + if isinstance(value, six.string_types): + value = shlex.split(value) + if not value: + raise ScriptEmptyError(value) + return cls(value[0], value[1:]) + + def __repr__(self): + return "Script({0!r})".format(self._parts) + + @property + def command(self): + return self._parts[0] + + @property + def args(self): + return self._parts[1:] + + def extend(self, extra_args): + self._parts.extend(extra_args) + + def cmdify(self): + """Encode into a cmd-executable string. + + This re-implements CreateProcess's quoting logic to turn a list of + arguments into one single string for the shell to interpret. + + * All double quotes are escaped with a backslash. + * Existing backslashes before a quote are doubled, so they are all + escaped properly. + * Backslashes elsewhere are left as-is; cmd will interpret them + literally. + + The result is then quoted into a pair of double quotes to be grouped. + + An argument is intentionally not quoted if it does not contain + whitespaces. This is done to be compatible with Windows built-in + commands that don't work well with quotes, e.g. everything with `echo`, + and DOS-style (forward slash) switches. + + The intended use of this function is to pre-process an argument list + before passing it into ``subprocess.Popen(..., shell=True)``. + + See also: https://docs.python.org/3/library/subprocess.html#converting-argument-sequence + """ + return " ".join( + arg if not next(re.finditer(r'\s', arg), None) + else '"{0}"'.format(re.sub(r'(\\*)"', r'\1\1\\"', arg)) + for arg in self._parts + ) diff --git a/pipenv/vendor/vistir/compat.py b/pipenv/vendor/vistir/compat.py new file mode 100644 index 0000000000..454cc3c85a --- /dev/null +++ b/pipenv/vendor/vistir/compat.py @@ -0,0 +1,136 @@ +# -*- coding=utf-8 -*- +from __future__ import absolute_import, unicode_literals + +import os +import sys + +import six +import warnings +from tempfile import mkdtemp + + +__all__ = [ + "Path", + "get_terminal_size", + "finalize", + "partialmethod", + "JSONDecodeError", + "ResourceWarning", + "FileNotFoundError", + "fs_str", + "TemporaryDirectory", + "NamedTemporaryFile", +] + +if sys.version_info >= (3, 5): + from pathlib import Path + +else: + from pathlib2 import Path + +if sys.version_info < (3, 3): + from backports.shutil_get_terminal_size import get_terminal_size + from .backports.tempfile import NamedTemporaryFile +else: + from tempfile import NamedTemporaryFile + from shutil import get_terminal_size + +try: + from weakref import finalize +except ImportError: + from backports.weakref import finalize + +try: + from functools import partialmethod +except Exception: + from .backports.functools import partialmethod + +try: + from json import JSONDecodeError +except ImportError: # Old Pythons. + JSONDecodeError = ValueError + +if six.PY2: + + class ResourceWarning(Warning): + pass + + class FileNotFoundError(IOError): + pass + +else: + from builtins import ResourceWarning, FileNotFoundError + + class ResourceWarning(ResourceWarning): + pass + + class FileNotFoundError(FileNotFoundError): + pass + + +class TemporaryDirectory(object): + """Create and return a temporary directory. This has the same + behavior as mkdtemp but can be used as a context manager. For + example: + + with TemporaryDirectory() as tmpdir: + ... + + Upon exiting the context, the directory and everything contained + in it are removed. + """ + + def __init__(self, suffix="", prefix=None, dir=None): + if "RAM_DISK" in os.environ: + import uuid + + name = uuid.uuid4().hex + dir_name = os.path.join(os.environ["RAM_DISK"].strip(), name) + os.mkdir(dir_name) + self.name = dir_name + else: + suffix = suffix if suffix else "" + if not prefix: + self.name = mkdtemp(suffix=suffix, dir=dir) + else: + self.name = mkdtemp(suffix, prefix, dir) + self._finalizer = finalize( + self, + self._cleanup, + self.name, + warn_message="Implicitly cleaning up {!r}".format(self), + ) + + @classmethod + def _cleanup(cls, name, warn_message): + from .path import rmtree + rmtree(name) + warnings.warn(warn_message, ResourceWarning) + + def __repr__(self): + return "<{} {!r}>".format(self.__class__.__name__, self.name) + + def __enter__(self): + return self + + def __exit__(self, exc, value, tb): + self.cleanup() + + def cleanup(self): + from .path import rmtree + if self._finalizer.detach(): + rmtree(self.name) + + +def fs_str(string): + """Encodes a string into the proper filesystem encoding + + Borrowed from pip-tools + """ + if isinstance(string, str): + return string + assert not isinstance(string, bytes) + return string.encode(_fs_encoding) + + +_fs_encoding = sys.getfilesystemencoding() or sys.getdefaultencoding() diff --git a/pipenv/vendor/vistir/contextmanagers.py b/pipenv/vendor/vistir/contextmanagers.py new file mode 100644 index 0000000000..80f1f897d3 --- /dev/null +++ b/pipenv/vendor/vistir/contextmanagers.py @@ -0,0 +1,208 @@ +# -*- coding=utf-8 -*- +from __future__ import absolute_import, unicode_literals + +import os +import stat +import sys + +from contextlib import contextmanager + +import six + +from .compat import NamedTemporaryFile, Path +from .path import is_file_url, is_valid_url, path_to_url, url_to_path + + +__all__ = ["temp_environ", "temp_path", "cd", "atomic_open_for_write", "open_file"] + + +# Borrowed from Pew. +# See https://github.com/berdario/pew/blob/master/pew/_utils.py#L82 +@contextmanager +def temp_environ(): + """Allow the ability to set os.environ temporarily""" + environ = dict(os.environ) + try: + yield + finally: + os.environ.clear() + os.environ.update(environ) + + +@contextmanager +def temp_path(): + """A context manager which allows the ability to set sys.path temporarily + + >>> path_from_virtualenv = load_path("/path/to/venv/bin/python") + >>> print(sys.path) + ['/home/user/.pyenv/versions/3.7.0/bin', '/home/user/.pyenv/versions/3.7.0/lib/python37.zip', '/home/user/.pyenv/versions/3.7.0/lib/python3.7', '/home/user/.pyenv/versions/3.7.0/lib/python3.7/lib-dynload', '/home/user/.pyenv/versions/3.7.0/lib/python3.7/site-packages'] + >>> with temp_path(): + sys.path = path_from_virtualenv + # Running in the context of the path above + run(["pip", "install", "stuff"]) + >>> print(sys.path) + ['/home/user/.pyenv/versions/3.7.0/bin', '/home/user/.pyenv/versions/3.7.0/lib/python37.zip', '/home/user/.pyenv/versions/3.7.0/lib/python3.7', '/home/user/.pyenv/versions/3.7.0/lib/python3.7/lib-dynload', '/home/user/.pyenv/versions/3.7.0/lib/python3.7/site-packages'] + + """ + path = [p for p in sys.path] + try: + yield + finally: + sys.path = [p for p in path] + + +@contextmanager +def cd(path): + """Context manager to temporarily change working directories + + :param str path: The directory to move into + + >>> print(os.path.abspath(os.curdir)) + '/home/user/code/myrepo' + >>> with cd("/home/user/code/otherdir/subdir"): + print("Changed directory: %s" % os.path.abspath(os.curdir)) + Changed directory: /home/user/code/otherdir/subdir + >>> print(os.path.abspath(os.curdir)) + '/home/user/code/myrepo' + """ + if not path: + return + prev_cwd = Path.cwd().as_posix() + if isinstance(path, Path): + path = path.as_posix() + os.chdir(str(path)) + try: + yield + finally: + os.chdir(prev_cwd) + + +@contextmanager +def atomic_open_for_write(target, binary=False, newline=None, encoding=None): + """Atomically open `target` for writing. + + This is based on Lektor's `atomic_open()` utility, but simplified a lot + to handle only writing, and skip many multi-process/thread edge cases + handled by Werkzeug. + + :param str target: Target filename to write + :param bool binary: Whether to open in binary mode, default False + :param str newline: The newline character to use when writing, determined from system if not supplied + :param str encoding: The encoding to use when writing, defaults to system encoding + + How this works: + + * Create a temp file (in the same directory of the actual target), and + yield for surrounding code to write to it. + * If some thing goes wrong, try to remove the temp file. The actual target + is not touched whatsoever. + * If everything goes well, close the temp file, and replace the actual + target with this new file. + + .. code:: python + + >>> fn = "test_file.txt" + >>> def read_test_file(filename=fn): + with open(filename, 'r') as fh: + print(fh.read().strip()) + + >>> with open(fn, "w") as fh: + fh.write("this is some test text") + >>> read_test_file() + this is some test text + + >>> def raise_exception_while_writing(filename): + with open(filename, "w") as fh: + fh.write("writing some new text") + raise RuntimeError("Uh oh, hope your file didn't get overwritten") + + >>> raise_exception_while_writing(fn) + Traceback (most recent call last): + ... + RuntimeError: Uh oh, hope your file didn't get overwritten + >>> read_test_file() + writing some new text + + # Now try with vistir + >>> def raise_exception_while_writing(filename): + with vistir.contextmanagers.atomic_open_for_write(filename) as fh: + fh.write("Overwriting all the text from before with even newer text") + raise RuntimeError("But did it get overwritten now?") + + >>> raise_exception_while_writing(fn) + Traceback (most recent call last): + ... + RuntimeError: But did it get overwritten now? + + >>> read_test_file() + writing some new text + """ + + mode = "w+b" if binary else "w" + f = NamedTemporaryFile( + dir=os.path.dirname(target), + prefix=".__atomic-write", + mode=mode, + encoding=encoding, + newline=newline, + delete=False, + ) + # set permissions to 0644 + os.chmod(f.name, stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH) + try: + yield f + except BaseException: + f.close() + try: + os.remove(f.name) + except OSError: + pass + raise + else: + f.close() + try: + os.remove(target) # This is needed on Windows. + except OSError: + pass + os.rename(f.name, target) # No os.replace() on Python 2. + + +@contextmanager +def open_file(link, session=None): + """ + Open local or remote file for reading. + + :type link: pip._internal.index.Link or str + :type session: requests.Session + :raises ValueError: If link points to a local directory. + :return: a context manager to the opened file-like object + """ + if not isinstance(link, six.string_types): + try: + link = link.url_without_fragment + except AttributeError: + raise ValueError("Cannot parse url from unkown type: {0!r}".format(link)) + + if not is_valid_url(link) and os.path.exists(link): + link = path_to_url(link) + + if is_file_url(link): + # Local URL + local_path = url_to_path(link) + if os.path.isdir(local_path): + raise ValueError("Cannot open directory for read: {}".format(link)) + else: + with open(local_path, "rb") as local_file: + yield local_file + else: + # Remote URL + headers = {"Accept-Encoding": "identity"} + if not session: + from requests import Session + + session = Session() + response = session.get(link, headers=headers, stream=True) + try: + yield response.raw + finally: + response.close() diff --git a/pipenv/vendor/vistir/misc.py b/pipenv/vendor/vistir/misc.py new file mode 100644 index 0000000000..5d7855aaad --- /dev/null +++ b/pipenv/vendor/vistir/misc.py @@ -0,0 +1,134 @@ +# -*- coding=utf-8 -*- +from __future__ import absolute_import, unicode_literals + +import json +import locale +import os +import subprocess +import sys + +from collections import OrderedDict + +from .cmdparse import Script +from .compat import Path, partialmethod + + +__all__ = [ + "shell_escape", "unnest", "dedup", "run", "load_path", "partialclass" +] + + +def shell_escape(cmd): + """Escape strings for use in :func:`~subprocess.Popen` and :func:`run`. + + This is a passthrough method for instantiating a :class:`~vistir.cmdparse.Script` + object which can be used to escape commands to output as a single string. + """ + cmd = Script.parse(cmd) + return cmd.cmdify() + + +def unnest(elem): + """Flatten an arbitrarily nested iterable + + :param elem: An iterable to flatten + :type elem: :class:`~collections.Iterable` + + >>> nested_iterable = (1234, (3456, 4398345, (234234)), (2396, (23895750, 9283798, 29384, (289375983275, 293759, 2347, (2098, 7987, 27599))))) + >>> list(vistir.misc.unnest(nested_iterable)) + [1234, 3456, 4398345, 234234, 2396, 23895750, 9283798, 29384, 289375983275, 293759, 2347, 2098, 7987, 27599] + """ + + if _is_iterable(elem): + for item in elem: + if _is_iterable(item): + for sub_item in unnest(item): + yield sub_item + else: + yield item + else: + raise ValueError("Expecting an iterable, got %r" % elem) + + +def _is_iterable(elem): + if getattr(elem, "__iter__", False): + return True + return False + + +def dedup(iterable): + """Deduplicate an iterable object like iter(set(iterable)) but + order-reserved. + """ + return iter(OrderedDict.fromkeys(iterable)) + + +def run(cmd): + """Use `subprocess.Popen` to get the output of a command and decode it. + + :param list cmd: A list representing the command you want to run. + :returns: A 2-tuple of (output, error) + """ + encoding = locale.getdefaultlocale()[1] or "utf-8" + c = subprocess.Popen( + cmd, env=os.environ.copy(), stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + out, err = c.communicate() + return out.decode(encoding).strip(), err.decode(encoding).strip() + + +def load_path(python): + """Load the :mod:`sys.path` from the given python executable's environment as json + + :param str python: Path to a valid python executable + :return: A python representation of the `sys.path` value of the given python executable. + :rtype: list + + >>> load_path("/home/user/.virtualenvs/requirementslib-5MhGuG3C/bin/python") + ['', '/home/user/.virtualenvs/requirementslib-5MhGuG3C/lib/python37.zip', '/home/user/.virtualenvs/requirementslib-5MhGuG3C/lib/python3.7', '/home/user/.virtualenvs/requirementslib-5MhGuG3C/lib/python3.7/lib-dynload', '/home/user/.pyenv/versions/3.7.0/lib/python3.7', '/home/user/.virtualenvs/requirementslib-5MhGuG3C/lib/python3.7/site-packages', '/home/user/git/requirementslib/src'] + """ + + python = Path(python).as_posix() + out, err = run([python, "-c", "import json, sys; print(json.dumps(sys.path))"]) + if out: + return json.loads(out) + else: + return [] + + +def partialclass(cls, *args, **kwargs): + """Returns a partially instantiated class + + :return: A partial class instance + :rtype: cls + + >>> source = partialclass(Source, url="https://pypi.org/simple") + >>> source + + >>> source(name="pypi") + >>> source.__dict__ + mappingproxy({'__module__': '__main__', '__dict__': , '__weakref__': , '__doc__': None, '__init__': functools.partialmethod(, , url='https://pypi.org/simple')}) + >>> new_source = source(name="pypi") + >>> new_source + <__main__.Source object at 0x7f23af189b38> + >>> new_source.__dict__ + {'url': 'https://pypi.org/simple', 'verify_ssl': True, 'name': 'pypi'} + """ + + name_attrs = [n for n in (getattr(cls, name, str(cls)) for name in ("__name__", "__qualname__")) if n is not None] + name_attrs = name_attrs[0] + type_ = type( + name_attrs, + (cls,), + { + "__init__": partialmethod(cls.__init__, *args, **kwargs), + } + ) + # Swiped from attrs.make_class + try: + type_.__module__ = sys._getframe(1).f_globals.get( + "__name__", "__main__", + ) + except (AttributeError, ValueError): + pass + return type_ diff --git a/pipenv/vendor/vistir/path.py b/pipenv/vendor/vistir/path.py new file mode 100644 index 0000000000..857902449b --- /dev/null +++ b/pipenv/vendor/vistir/path.py @@ -0,0 +1,333 @@ +# -*- coding=utf-8 -*- +from __future__ import absolute_import, unicode_literals + +import errno +import os +import posixpath +import shutil +import stat +import warnings + +import six + +from six.moves.urllib import request as urllib_request +from six.moves import urllib_parse + +from .compat import Path, _fs_encoding + + +__all__ = [ + "check_for_unc_path", + "get_converted_relative_path", + "handle_remove_readonly", + "is_file_url", + "is_readonly_path", + "is_valid_url", + "mkdir_p", + "path_to_url", + "rmtree", + "safe_expandvars", + "set_write_bit", + "url_to_path", + "walk_up", +] + + +def _decode_path(path): + if not isinstance(path, six.text_type): + try: + return path.decode(_fs_encoding, 'ignore') + except (UnicodeError, LookupError): + return path.decode('utf-8', 'ignore') + return path + + +def _encode_path(path): + """Transform the provided path to a text encoding.""" + if not isinstance(path, six.string_types + (six.binary_type,)): + try: + path = getattr(path, "__fspath__") + except AttributeError: + try: + path = getattr(path, "as_posix") + except AttributeError: + raise RuntimeError("Failed encoding path, unknown object type: %r" % path) + else: + path() + else: + path = path() + path = Path(_decode_path(path)) + return _decode_path(path.as_posix()) + + +def normalize_drive(path): + """Normalize drive in path so they stay consistent. + + This currently only affects local drives on Windows, which can be + identified with either upper or lower cased drive names. The case is + always converted to uppercase because it seems to be preferred. + """ + if os.name != "nt" or not isinstance(path, six.string_types): + return path + + drive, tail = os.path.splitdrive(path) + # Only match (lower cased) local drives (e.g. 'c:'), not UNC mounts. + if drive.islower() and len(drive) == 2 and drive[1] == ":": + return "{}{}".format(drive.upper(), tail) + + return path + + +def path_to_url(path): + """Convert the supplied local path to a file uri. + + :param str path: A string pointing to or representing a local path + :return: A `file://` uri for the same location + :rtype: str + + >>> path_to_url("/home/user/code/myrepo/myfile.zip") + 'file:///home/user/code/myrepo/myfile.zip' + """ + + if not path: + return path + path = _encode_path(path) + return Path(normalize_drive(os.path.abspath(path))).as_uri() + + +def url_to_path(url): + """Convert a valid file url to a local filesystem path + + Follows logic taken from pip's equivalent function + """ + assert is_file_url(url), "Only file: urls can be converted to local paths" + _, netloc, path, _, _ = urllib_parse.urlsplit(url) + # Netlocs are UNC paths + if netloc: + netloc = "\\\\" + netloc + + path = urllib_request.url2pathname(netloc + path) + return path + + +def is_valid_url(url): + """Checks if a given string is an url""" + if not url: + return url + pieces = urllib_parse.urlparse(url) + return all([pieces.scheme, pieces.netloc]) + + +def is_file_url(url): + """Returns true if the given url is a file url""" + if not url: + return False + if not isinstance(url, six.string_types): + try: + url = getattr(url, "url") + except AttributeError: + raise ValueError("Cannot parse url from unknown type: {0!r}".format(url)) + return urllib_parse.urlparse(url.lower()).scheme == "file" + + +def is_readonly_path(fn): + """Check if a provided path exists and is readonly. + + Permissions check is `bool(path.stat & stat.S_IREAD)` or `not os.access(path, os.W_OK)` + """ + fn = _encode_path(fn) + if os.path.exists(fn): + return bool(os.stat(fn).st_mode & stat.S_IREAD) and not os.access(fn, os.W_OK) + return False + + +def mkdir_p(newdir): + """Recursively creates the target directory and all of its parents if they do not + already exist. Fails silently if they do. + + :param str newdir: The directory path to ensure + :raises: OSError if a file is encountered along the way + """ + # http://code.activestate.com/recipes/82465-a-friendly-mkdir/ + if os.path.exists(newdir): + if not os.path.isdir(newdir): + raise OSError( + "a file with the same name as the desired dir, '{0}', already exists.".format( + newdir + ) + ) + pass + else: + head, tail = os.path.split(newdir) + if head and not os.path.isdir(head): + mkdir_p(head) + if tail and not os.path.isdir(newdir): + os.mkdir(newdir) + + +def set_write_bit(fn): + """Set read-write permissions for the current user on the target path. Fail silently + if the path doesn't exist. + + :param str fn: The target filename or path + """ + + fn = _encode_path(fn) + if isinstance(fn, six.string_types) and not os.path.exists(fn): + return + os.chmod(fn, stat.S_IWRITE | stat.S_IWUSR | stat.S_IRUSR) + + +def rmtree(directory, ignore_errors=False): + """Stand-in for :func:`~shutil.rmtree` with additional error-handling. + + This version of `rmtree` handles read-only paths, especially in the case of index + files written by certain source control systems. + + :param str directory: The target directory to remove + :param bool ignore_errors: Whether to ignore errors, defaults to False + + .. note:: + + Setting `ignore_errors=True` may cause this to silently fail to delete the path + """ + + directory = _encode_path(directory) + shutil.rmtree( + directory, ignore_errors=ignore_errors, onerror=handle_remove_readonly + ) + + +def handle_remove_readonly(func, path, exc): + """Error handler for shutil.rmtree. + + Windows source repo folders are read-only by default, so this error handler + attempts to set them as writeable and then proceed with deletion. + + :param function func: The caller function + :param str path: The target path for removal + :param Exception exc: The raised exception + + This function will call check :func:`is_readonly_path` before attempting to call + :func:`set_write_bit` on the target path and try again. + """ + # Check for read-only attribute + from .compat import ResourceWarning + default_warning_message = ( + "Unable to remove file due to permissions restriction: {!r}" + ) + # split the initial exception out into its type, exception, and traceback + exc_type, exc_exception, exc_tb = exc + if is_readonly_path(path): + # Apply write permission and call original function + set_write_bit(path) + try: + func(path) + except (OSError, IOError) as e: + if e.errno in [errno.EACCES, errno.EPERM]: + warnings.warn(default_warning_message.format(path), ResourceWarning) + return + + if exc_exception.errno in [errno.EACCES, errno.EPERM]: + warnings.warn(default_warning_message.format(path), ResourceWarning) + return + + raise + + +def walk_up(bottom): + """Mimic os.walk, but walk 'up' instead of down the directory tree. + From: https://gist.github.com/zdavkeos/1098474 + """ + bottom = os.path.realpath(bottom) + # Get files in current dir. + try: + names = os.listdir(bottom) + except Exception: + return + + dirs, nondirs = [], [] + for name in names: + if os.path.isdir(os.path.join(bottom, name)): + dirs.append(name) + else: + nondirs.append(name) + yield bottom, dirs, nondirs + + new_path = os.path.realpath(os.path.join(bottom, "..")) + # See if we are at the top. + if new_path == bottom: + return + + for x in walk_up(new_path): + yield x + + +def check_for_unc_path(path): + """ Checks to see if a pathlib `Path` object is a unc path or not""" + if ( + os.name == "nt" + and len(path.drive) > 2 + and not path.drive[0].isalpha() + and path.drive[1] != ":" + ): + return True + else: + return False + + +def get_converted_relative_path(path, relative_to=os.curdir): + """Convert `path` to be relative. + + Given a vague relative path, return the path relative to the given + location. + + :param str path: The location of a target path + :param str relative_to: The starting path to build against, optional + :returns: A relative posix-style path with a leading `./` + + This performs additional conversion to ensure the result is of POSIX form, + and starts with `./`, or is precisely `.`. + + >>> os.chdir('/home/user/code/myrepo/myfolder') + >>> vistir.path.get_converted_relative_path('/home/user/code/file.zip') + './../../file.zip' + >>> vistir.path.get_converted_relative_path('/home/user/code/myrepo/myfolder/mysubfolder') + './mysubfolder' + >>> vistir.path.get_converted_relative_path('/home/user/code/myrepo/myfolder') + '.' + """ + + path = _encode_path(path) + relative_to = _encode_path(relative_to) + start_path = Path(relative_to) + try: + start = start_path.resolve() + except OSError: + start = start_path.absolute() + + # check if there is a drive letter or mount point + # if it is a mountpoint use the original absolute path + # instead of the unc path + if check_for_unc_path(start): + start = start_path.absolute() + + path = start.joinpath(path).relative_to(start) + + # check and see if the path that was passed into the function is a UNC path + # and raise value error if it is not. + if check_for_unc_path(path): + raise ValueError("The path argument does not currently accept UNC paths") + + relpath_s = _encode_path(posixpath.normpath(path.as_posix())) + if not (relpath_s == "." or relpath_s.startswith("./")): + relpath_s = posixpath.join(".", relpath_s) + return relpath_s + + +def safe_expandvars(value): + """Call os.path.expandvars if value is a string, otherwise do nothing. + """ + if isinstance(value, six.string_types): + return os.path.expandvars(value) + return value From 2eb3d1dcc51e8852bfaa2b82384af2e537368541 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 25 Aug 2018 02:24:19 -0400 Subject: [PATCH 02/26] Update vendored libraries Signed-off-by: Dan Ryan --- pipenv/vendor/vendor.txt | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index dd6df701f2..943a1b4408 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -3,42 +3,42 @@ backports.shutil_get_terminal_size==1.0.0 backports.weakref==1.0.post1 blindspin==2.0.1 click==6.7 -click-completion==0.3.1 +click-completion==0.4.1 click-didyoumean==0.0.3 colorama==0.3.9 delegator.py==0.1.0 pexpect==4.6.0 - ptyprocess==0.5.2 -python-dotenv==0.8.2 + ptyprocess==0.6.0 +python-dotenv==0.9.1 first==2.0.1 iso8601==0.1.12 jinja2==2.10 markupsafe==1.0 parse==1.8.4 pathlib2==2.3.2 - scandir==1.7 -git+https://github.com/naiquevin/pipdeptree.git@ee5eaf86ed0f49ea97601475e048d81e5b381902#egg=pipdeptree + scandir==1.9 +pipdeptree==0.13.0 pipreqs==0.4.9 docopt==0.6.2 yarg==0.1.9 -pythonfinder==1.0.0 +pythonfinder==1.0.2 requests==2.19.1 chardet==3.0.4 idna==2.7 urllib3==1.23 - certifi==2018.4.16 -requirementslib==1.0.11 + certifi==2018.8.13 +requirementslib==1.0.1 attrs==18.1.0 distlib==0.2.7 packaging==17.1 pyparsing==2.2.0 - pytoml==0.1.16 + pytoml==0.1.18 requirements-parser==0.2.0 -shellingham==1.2.3 -six==1.11.0 -semver==2.8.0 +shellingham==1.2.4 +six==1.1.2 +semver==2.8.1 shutilwhich==1.1.0 toml==0.9.4 cached-property==1.4.3 -vistir==0.1.0 -pip-shims==0.1.0 +vistir==0.1.4 +pip-shims==0.1.2 From 2418bd04a27615000d2ce8fe03bd62055c23cf1f Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 25 Aug 2018 02:25:56 -0400 Subject: [PATCH 03/26] One more library Signed-off-by: Dan Ryan --- pipenv/vendor/vendor.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index 943a1b4408..582ec3aa0d 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -27,7 +27,7 @@ requests==2.19.1 idna==2.7 urllib3==1.23 certifi==2018.8.13 -requirementslib==1.0.1 +requirementslib==1.1.2 attrs==18.1.0 distlib==0.2.7 packaging==17.1 @@ -35,7 +35,7 @@ requirementslib==1.0.1 pytoml==0.1.18 requirements-parser==0.2.0 shellingham==1.2.4 -six==1.1.2 +six==1.11.0 semver==2.8.1 shutilwhich==1.1.0 toml==0.9.4 From 993c452fdc5e05ebeea63546526369bd6a93af5a Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 25 Aug 2018 02:33:23 -0400 Subject: [PATCH 04/26] Update scandir patch Signed-off-by: Dan Ryan --- tasks/vendoring/patches/vendor/drop_scandir_import.patch | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tasks/vendoring/patches/vendor/drop_scandir_import.patch b/tasks/vendoring/patches/vendor/drop_scandir_import.patch index 66799efc6e..e80b314b71 100644 --- a/tasks/vendoring/patches/vendor/drop_scandir_import.patch +++ b/tasks/vendoring/patches/vendor/drop_scandir_import.patch @@ -1,9 +1,9 @@ diff --git a/pipenv/vendor/scandir.py b/pipenv/vendor/scandir.py -index 2e47a397..2b2ade30 100644 +index aac7208..8bbae2c 100644 --- a/pipenv/vendor/scandir.py +++ b/pipenv/vendor/scandir.py -@@ -26,10 +26,7 @@ import collections - import os +@@ -25,10 +25,7 @@ from stat import S_IFDIR, S_IFLNK, S_IFREG + import collections import sys -try: From 637b0abb6a0ee3c50d9fc18b141636bbb1c86850 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 25 Aug 2018 13:00:00 -0400 Subject: [PATCH 05/26] Update pip-shims vendoring Signed-off-by: Dan Ryan --- pipenv/vendor/pip_shims/LICENSE | 13 ++ pipenv/vendor/pip_shims/__init__.py | 6 +- pipenv/vendor/pip_shims/shims.py | 204 +++++++++++----------------- pipenv/vendor/pip_shims/utils.py | 2 +- 4 files changed, 99 insertions(+), 126 deletions(-) create mode 100644 pipenv/vendor/pip_shims/LICENSE diff --git a/pipenv/vendor/pip_shims/LICENSE b/pipenv/vendor/pip_shims/LICENSE new file mode 100644 index 0000000000..e1a278e7b3 --- /dev/null +++ b/pipenv/vendor/pip_shims/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2018, Dan Ryan + +Permission to use, copy, modify, and distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/pipenv/vendor/pip_shims/__init__.py b/pipenv/vendor/pip_shims/__init__.py index 5f6ba2c4fb..acaeee25f6 100644 --- a/pipenv/vendor/pip_shims/__init__.py +++ b/pipenv/vendor/pip_shims/__init__.py @@ -1,7 +1,7 @@ # -*- coding=utf-8 -*- from __future__ import absolute_import -__version__ = "0.1.0" +__version__ = '0.1.2' __all__ = [ "__version__", @@ -18,6 +18,7 @@ "is_archive_file", "is_file_url", "is_installable_dir", + "unpack_url", "Link", "make_abstract_dist", "make_option_group", @@ -37,6 +38,7 @@ "VcsSupport", "Wheel", "WheelCache", + "WheelBuilder" ] from .shims import ( @@ -52,6 +54,7 @@ InstallRequirement, is_archive_file, is_file_url, + unpack_url, is_installable_dir, Link, make_abstract_dist, @@ -72,4 +75,5 @@ VcsSupport, Wheel, WheelCache, + WheelBuilder, ) diff --git a/pipenv/vendor/pip_shims/shims.py b/pipenv/vendor/pip_shims/shims.py index 159b22236c..5fc58beb1d 100644 --- a/pipenv/vendor/pip_shims/shims.py +++ b/pipenv/vendor/pip_shims/shims.py @@ -4,7 +4,7 @@ from .utils import _parse, get_package, STRING_TYPES import importlib import os -from pip import __version__ as pip_version +from pipenv.patched.notpip import __version__ as pip_version import sys @@ -25,14 +25,14 @@ def is_valid(path_info_tuple): if ( - path_info_tuple.start_version >= parsed_pip_version - and path_info_tuple.end_version <= parsed_pip_version + path_info_tuple.start_version <= parsed_pip_version + and path_info_tuple.end_version >= parsed_pip_version ): return 1 return 0 -def do_import(module_paths, base_path=BASE_IMPORT_PATH): +def get_ordered_paths(module_paths, base_path): if not isinstance(module_paths, list): module_paths = [module_paths] prefix_order = [pth.format(base_path) for pth in ["{0}._internal", "{0}"]] @@ -45,6 +45,11 @@ def do_import(module_paths, base_path=BASE_IMPORT_PATH): for pth in paths if pth is not None ] + return search_order + + +def do_import(module_paths, base_path=BASE_IMPORT_PATH): + search_order = get_ordered_paths(module_paths, base_path) imported = None if has_modutil: pkgs = [get_package(pkg) for pkg in search_order] @@ -78,126 +83,77 @@ def do_import(module_paths, base_path=BASE_IMPORT_PATH): return imported -parse_version = do_import( - [path_info("index.parse_version", _parse("7.0.0"), _parse("9999"))] -) -_strip_extras = do_import( - [path_info("req.req_install._strip_extras", _parse("7.0.0"), _parse("9999"))] -) -cmdoptions = do_import( - [ - path_info("cli.cmdoptions", _parse("18.1"), _parse("9999")), - path_info("cmdoptions", _parse("7.0.0"), _parse("18.0")), - ] -) -Command = do_import( - [ - path_info("cli.base_command.Command", _parse("18.1"), _parse("9999")), - path_info("basecommand.Command", _parse("7.0.0"), _parse("18.0")), - ] -) -ConfigOptionParser = do_import( - [ - path_info("cli.parser.ConfigOptionParser", _parse("18.1"), _parse("9999")), - path_info("baseparser.ConfigOptionParser", _parse("7.0.0"), _parse("18.0")), - ] -) -DistributionNotFound = do_import( - [path_info("exceptions.DistributionNotFound", _parse("7.0.0"), _parse("9999"))] -) -FAVORITE_HASH = do_import( - [path_info("utils.hashes.FAVORITE_HASH", _parse("7.0.0"), _parse("9999"))] -) -FormatControl = do_import( - [path_info("index.FormatControl", _parse("7.0.0"), _parse("9999"))] -) -get_installed_distributions = do_import( - [ - path_info( - "utils.misc.get_installed_distributions", _parse("10.0.0"), _parse("9999") - ), - path_info( - "utils.get_installed_distributions", _parse("7.0.0"), _parse("9.0.3") - ), - ] -) -index_group = do_import( - [ - path_info("cli.cmdoptions.index_group", _parse("18.1"), _parse("9999")), - path_info("cmdoptions.index_group", _parse("7.0.0"), _parse("18.0")), - ] -) -InstallRequirement = do_import( - [path_info("req.req_install.InstallRequirement", _parse("7.0.0"), _parse("9999"))] -) -is_archive_file = do_import( - [path_info("download.is_archive_file", _parse("7.0.0"), _parse("9999"))] -) -is_file_url = do_import( - [path_info("download.is_file_url", _parse("7.0.0"), _parse("9999"))] -) -is_installable_dir = do_import( - [ - path_info("utils.misc.is_installable_dir", _parse("10.0.0"), _parse("9999")), - path_info("utils.is_installable_dir", _parse("7.0.0"), _parse("9.0.3")), - ] -) -Link = do_import([path_info("index.Link", _parse("7.0.0"), _parse("9999"))]) -make_abstract_dist = do_import( - [ - path_info( - "operations.prepare.make_abstract_dist", _parse("10.0.0"), _parse("9999") - ), - path_info("req.req_set.make_abstract_dist", _parse("7.0.0"), _parse("9.0.3")), - ] -) -make_option_group = do_import( - [ - path_info("cli.cmdoptions.make_option_group", _parse("18.1"), _parse("9999")), - path_info("cmdoptions.make_option_group", _parse("7.0.0"), _parse("18.0")), - ] -) -PackageFinder = do_import( - [path_info("index.PackageFinder", _parse("7.0.0"), _parse("9999"))] -) -parse_requirements = do_import( - [path_info("req.req_file.parse_requirements", _parse("7.0.0"), _parse("9999"))] -) -parse_version = do_import( - [path_info("index.parse_version", _parse("7.0.0"), _parse("9999"))] -) -path_to_url = do_import( - [path_info("download.path_to_url", _parse("7.0.0"), _parse("9999"))] -) -PipError = do_import( - [path_info("exceptions.PipError", _parse("7.0.0"), _parse("9999"))] -) -RequirementPreparer = do_import( - [ - path_info( - "operations.prepare.RequirementPreparer", _parse("7.0.0"), _parse("9999") - ) - ] -) -RequirementSet = do_import( - [path_info("req.req_set.RequirementSet", _parse("7.0.0"), _parse("9999"))] -) -RequirementTracker = do_import( - [path_info("req.req_tracker.RequirementTracker", _parse("7.0.0"), _parse("9999"))] -) -Resolver = do_import([path_info("resolve.Resolver", _parse("7.0.0"), _parse("9999"))]) -SafeFileCache = do_import( - [path_info("download.SafeFileCache", _parse("7.0.0"), _parse("9999"))] -) -url_to_path = do_import( - [path_info("download.url_to_path", _parse("7.0.0"), _parse("9999"))] -) -USER_CACHE_DIR = do_import( - [path_info("locations.USER_CACHE_DIR", _parse("7.0.0"), _parse("9999"))] -) -VcsSupport = do_import([path_info("vcs.VcsSupport", _parse("7.0.0"), _parse("9999"))]) -Wheel = do_import([path_info("wheel.Wheel", _parse("7.0.0"), _parse("9999"))]) -WheelCache = do_import([path_info("cache.WheelCache", _parse("7.0.0"), _parse("9999"))]) +def pip_import(import_name, *module_paths): + paths = [] + for pip_path in module_paths: + if not isinstance(pip_path, (list, tuple)): + module_path, start_version, end_version = module_paths + new_path = path_info(module_path, _parse(start_version), _parse(end_version)) + paths.append(new_path) + break + else: + module_path, start_version, end_version = pip_path + paths.append(path_info(module_path, _parse(start_version), _parse(end_version))) + return do_import(paths) + + +parse_version = pip_import("parse_version", "index.parse_version", "7", "9999") +_strip_extras = pip_import("_strip_extras", "req.req_install._strip_extras", "7", "9999") +cmdoptions = pip_import( + "", ("cli.cmdoptions", "18.1", "9999"), ("cmdoptions", "7.0.0", "18.0"), +) +Command = pip_import("Command", + ("cli.base_command.Command", "18.1", "9999"), + ("basecommand.Command", "7.0.0", "18.0"), +) +ConfigOptionParser = pip_import("ConfigOptionParser", + ("cli.parser.ConfigOptionParser", "18.1", "9999"), + ("baseparser.ConfigOptionParser", "7.0.0", "18.0"), +) +DistributionNotFound = pip_import("DistributionNotFound", "exceptions.DistributionNotFound", "7.0.0", "9999") +FAVORITE_HASH = pip_import("FAVORITE_HASH", "utils.hashes.FAVORITE_HASH", "7.0.0", "9999") +FormatControl = pip_import("FormatControl", "index.FormatControl", "7.0.0", "9999") +get_installed_distributions = pip_import("get_installed_distributions", + ("utils.misc.get_installed_distributions", "10", "9999"), + ("utils.get_installed_distributions", "7", "9.0.3") +) +index_group = pip_import("index_group", + ("cli.cmdoptions.index_group", "18.1", "9999"), + ("cmdoptions.index_group", "7.0.0", "18.0"), +) +InstallRequirement = pip_import("InstallRequirement", "req.req_install.InstallRequirement", "7.0.0", "9999") +is_archive_file = pip_import("is_archive_file", "download.is_archive_file", "7.0.0", "9999") +is_file_url = pip_import("is_file_url", "download.is_file_url", "7.0.0", "9999") +unpack_url = pip_import("unpack_url", "download.unpack_url", "7.0.0", "9999") +is_installable_dir = pip_import("is_installable_dir", + ("utils.misc.is_installable_dir", "10.0.0", "9999"), + ("utils.is_installable_dir", "7.0.0", "9.0.3"), +) +Link = pip_import("Link", "index.Link", "7.0.0", "9999") +make_abstract_dist = pip_import("make_abstract_dist", + ("operations.prepare.make_abstract_dist", "10.0.0", "9999"), + ("req.req_set.make_abstract_dist", "7.0.0", "9.0.3"), +) +make_option_group = pip_import("make_option_group", + ("cli.cmdoptions.make_option_group", "18.1", "9999"), + ("cmdoptions.make_option_group", "7.0.0", "18.0"), +) +PackageFinder = pip_import("PackageFinder", "index.PackageFinder", "7.0.0", "9999") +parse_requirements = pip_import("parse_requirements", "req.req_file.parse_requirements", "7.0.0", "9999") +parse_version = pip_import("parse_version", "index.parse_version", "7.0.0", "9999") +path_to_url = pip_import("path_to_url", "download.path_to_url", "7.0.0", "9999") +PipError = pip_import("PipError", "exceptions.PipError", "7.0.0", "9999") +RequirementPreparer = pip_import("RequirementPreparer", "operations.prepare.RequirementPreparer", "7", "9999") +RequirementSet = pip_import("RequirementSet", "req.req_set.RequirementSet", "7.0.0", "9999") +RequirementTracker = pip_import("RequirementTracker", "req.req_tracker.RequirementTracker", "7.0.0", "9999") +Resolver = pip_import("Resolver", "resolve.Resolver", "7.0.0", "9999") +SafeFileCache = pip_import("SafeFileCache", "download.SafeFileCache", "7.0.0", "9999") +url_to_path = pip_import("url_to_path", "download.url_to_path", "7.0.0", "9999") +USER_CACHE_DIR = pip_import("USER_CACHE_DIR", "locations.USER_CACHE_DIR", "7.0.0", "9999") +VcsSupport = pip_import("VcsSupport", "vcs.VcsSupport", "7.0.0", "9999") +Wheel = pip_import("Wheel", "wheel.Wheel", "7.0.0", "9999") +WheelCache = pip_import("WheelCache", ("cache.WheelCache", "10.0.0", "9999"), ("wheel.WheelCache", "7", "9.0.3")) +WheelBuilder = pip_import("WheelBuilder", "wheel.WheelBuilder", "7.0.0", "9999") if not RequirementTracker: diff --git a/pipenv/vendor/pip_shims/utils.py b/pipenv/vendor/pip_shims/utils.py index a8389bbe6c..d6101e21a2 100644 --- a/pipenv/vendor/pip_shims/utils.py +++ b/pipenv/vendor/pip_shims/utils.py @@ -22,7 +22,7 @@ def memoizer(*args, **kwargs): @memoize def _parse(version): if isinstance(version, STRING_TYPES): - return tuple(version.split(".")) + return tuple((int(i) for i in version.split("."))) return version From bed2990f40528be6cad88291ad80a6ef86897433 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 25 Aug 2018 13:00:21 -0400 Subject: [PATCH 06/26] Update vistir vendoring Signed-off-by: Dan Ryan --- pipenv/vendor/vistir/LICENSE | 13 +++ pipenv/vendor/vistir/__init__.py | 2 +- pipenv/vendor/vistir/compat.py | 5 +- pipenv/vendor/vistir/misc.py | 140 ++++++++++++++++++++++++++-- pipenv/vendor/vistir/path.py | 154 ++++++++++++++++++++++--------- 5 files changed, 258 insertions(+), 56 deletions(-) create mode 100644 pipenv/vendor/vistir/LICENSE diff --git a/pipenv/vendor/vistir/LICENSE b/pipenv/vendor/vistir/LICENSE new file mode 100644 index 0000000000..e1a278e7b3 --- /dev/null +++ b/pipenv/vendor/vistir/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2018, Dan Ryan + +Permission to use, copy, modify, and distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/pipenv/vendor/vistir/__init__.py b/pipenv/vendor/vistir/__init__.py index c7f776d55d..eeb8344cb0 100644 --- a/pipenv/vendor/vistir/__init__.py +++ b/pipenv/vendor/vistir/__init__.py @@ -13,7 +13,7 @@ from .path import mkdir_p, rmtree -__version__ = '0.1.0' +__version__ = '0.1.4' __all__ = [ diff --git a/pipenv/vendor/vistir/compat.py b/pipenv/vendor/vistir/compat.py index 454cc3c85a..1f1b7a96d5 100644 --- a/pipenv/vendor/vistir/compat.py +++ b/pipenv/vendor/vistir/compat.py @@ -3,11 +3,12 @@ import os import sys - -import six import warnings + from tempfile import mkdtemp +import six + __all__ = [ "Path", diff --git a/pipenv/vendor/vistir/misc.py b/pipenv/vendor/vistir/misc.py index 5d7855aaad..82bdf5ed47 100644 --- a/pipenv/vendor/vistir/misc.py +++ b/pipenv/vendor/vistir/misc.py @@ -8,13 +8,17 @@ import sys from collections import OrderedDict +from functools import partial + +import six from .cmdparse import Script -from .compat import Path, partialmethod +from .compat import Path, fs_str, partialmethod __all__ = [ - "shell_escape", "unnest", "dedup", "run", "load_path", "partialclass" + "shell_escape", "unnest", "dedup", "run", "load_path", "partialclass", "to_text", + "to_bytes", "locale_encoding" ] @@ -63,18 +67,57 @@ def dedup(iterable): return iter(OrderedDict.fromkeys(iterable)) -def run(cmd): +def _spawn_subprocess(script, env={}): + from distutils.spawn import find_executable + command = find_executable(script.command) + options = { + "env": env, + "universal_newlines": True, + "stdout": subprocess.PIPE, + "stderr": subprocess.PIPE, + } + # Command not found, maybe this is a shell built-in? + if not command: # Try to use CreateProcess directly if possible. + return subprocess.Popen(script.cmdify(), shell=True, **options) + # Try to use CreateProcess directly if possible. Specifically catch + # Windows error 193 "Command is not a valid Win32 application" to handle + # a "command" that is non-executable. See pypa/pipenv#2727. + try: + return subprocess.Popen([command] + script.args, **options) + except WindowsError as e: + if e.winerror != 193: + raise + # Try shell mode to use Windows's file association for file launch. + return subprocess.Popen(script.cmdify(), shell=True, **options) + + +def run(cmd, env={}, return_object=False): """Use `subprocess.Popen` to get the output of a command and decode it. :param list cmd: A list representing the command you want to run. + :param dict env: Additional environment settings to pass through to the subprocess. + :param bool return_object: When True, returns the whole subprocess instance :returns: A 2-tuple of (output, error) """ - encoding = locale.getdefaultlocale()[1] or "utf-8" - c = subprocess.Popen( - cmd, env=os.environ.copy(), stdout=subprocess.PIPE, stderr=subprocess.PIPE - ) + if six.PY2: + fs_encode = partial(to_bytes, encoding=locale_encoding) + _env = {fs_encode(k): fs_encode(v) for k, v in os.environ.items()} + for key, val in env.items(): + _env[fs_encode(key)] = fs_encode(val) + else: + _env = {k: fs_str(v) for k, v in os.environ.items()} + if six.PY2: + if isinstance(cmd, six.string_types): + cmd = cmd.encode("utf-8") + elif isinstance(cmd, (list, tuple)): + cmd = [c.encode("utf-8") for c in cmd] + if not isinstance(cmd, Script): + cmd = Script.parse(cmd) + c = _spawn_subprocess(cmd, env=_env) out, err = c.communicate() - return out.decode(encoding).strip(), err.decode(encoding).strip() + if not return_object: + return out.strip(), err.strip() + return c def load_path(python): @@ -132,3 +175,84 @@ def partialclass(cls, *args, **kwargs): except (AttributeError, ValueError): pass return type_ + + +# Borrowed from django -- force bytes and decode -- see link for details: +# https://github.com/django/django/blob/fc6b90b/django/utils/encoding.py#L112 +def to_bytes(string, encoding="utf-8", errors="ignore"): + """Force a value to bytes. + + :param string: Some input that can be converted to a bytes. + :type string: str or bytes unicode or a memoryview subclass + :param encoding: The encoding to use for conversions, defaults to "utf-8" + :param encoding: str, optional + :return: Corresponding byte representation (for use in filesystem operations) + :rtype: bytes + """ + + if not errors: + if encoding.lower() == "utf-8": + errors = "surrogateescape" if six.PY3 else "ignore" + else: + errors = "strict" + if isinstance(string, bytes): + if encoding.lower() == "utf-8": + return string + else: + return string.decode('utf-8').encode(encoding, errors) + elif isinstance(string, memoryview): + return bytes(string) + elif not isinstance(string, six.string_types): + try: + if six.PY3: + return six.text_type(string).encode(encoding, errors) + else: + return bytes(string) + except UnicodeEncodeError: + if isinstance(string, Exception): + return b' '.join(to_bytes(arg, encoding, errors) for arg in string) + return six.text_type(string).encode(encoding, errors) + else: + return string.encode(encoding, errors) + + +def to_text(string, encoding="utf-8", errors=None): + """Force a value to a text-type. + + :param string: Some input that can be converted to a unicode representation. + :type string: str or bytes unicode + :param encoding: The encoding to use for conversions, defaults to "utf-8" + :param encoding: str, optional + :return: The unicode representation of the string + :rtype: str + """ + + if not errors: + if encoding.lower() == "utf-8": + errors = "surrogateescape" if six.PY3 else "ignore" + else: + errors = "strict" + if issubclass(type(string), six.text_type): + return string + try: + if not issubclass(type(string), six.string_types): + if six.PY3: + if isinstance(string, bytes): + string = six.text_type(string, encoding, errors) + else: + string = six.text_type(string) + elif hasattr(string, '__unicode__'): + string = six.text_type(string) + else: + string = six.text_type(bytes(string), encoding, errors) + else: + string = string.decode(encoding, errors) + except UnicodeDecodeError as e: + string = ' '.join(to_text(arg, encoding, errors) for arg in string) + return string + + +try: + locale_encoding = locale.getdefaultencoding()[1] or 'ascii' +except Exception: + locale_encoding = 'ascii' diff --git a/pipenv/vendor/vistir/path.py b/pipenv/vendor/vistir/path.py index 857902449b..166282e864 100644 --- a/pipenv/vendor/vistir/path.py +++ b/pipenv/vendor/vistir/path.py @@ -1,7 +1,9 @@ # -*- coding=utf-8 -*- from __future__ import absolute_import, unicode_literals +import atexit import errno +import functools import os import posixpath import shutil @@ -10,10 +12,11 @@ import six -from six.moves.urllib import request as urllib_request from six.moves import urllib_parse +from six.moves.urllib import request as urllib_request -from .compat import Path, _fs_encoding +from .compat import Path, _fs_encoding, TemporaryDirectory +from .misc import locale_encoding, to_bytes, to_text __all__ = [ @@ -24,6 +27,8 @@ "is_readonly_path", "is_valid_url", "mkdir_p", + "ensure_mkdir_p", + "create_tracked_tempdir", "path_to_url", "rmtree", "safe_expandvars", @@ -33,31 +38,33 @@ ] -def _decode_path(path): - if not isinstance(path, six.text_type): - try: - return path.decode(_fs_encoding, 'ignore') - except (UnicodeError, LookupError): - return path.decode('utf-8', 'ignore') +def unicode_path(path): + # Paths are supposed to be represented as unicode here + if six.PY2 and not isinstance(path, six.text_type): + return path.decode(_fs_encoding) return path -def _encode_path(path): - """Transform the provided path to a text encoding.""" - if not isinstance(path, six.string_types + (six.binary_type,)): - try: - path = getattr(path, "__fspath__") - except AttributeError: - try: - path = getattr(path, "as_posix") - except AttributeError: - raise RuntimeError("Failed encoding path, unknown object type: %r" % path) - else: - path() - else: - path = path() - path = Path(_decode_path(path)) - return _decode_path(path.as_posix()) +def native_path(path): + if six.PY2 and not isinstance(path, bytes): + return path.encode(_fs_encoding) + return path + + +# once again thank you django... +# https://github.com/django/django/blob/fc6b90b/django/utils/_os.py +if six.PY3 or os.name == 'nt': + abspathu = os.path.abspath +else: + def abspathu(path): + """ + Version of os.path.abspath that uses the unicode representation + of the current working directory, thus avoiding a UnicodeDecodeError + in join when the cwd has non-ASCII characters. + """ + if not os.path.isabs(path): + path = os.path.join(os.getcwdu(), path) + return os.path.normpath(path) def normalize_drive(path): @@ -75,7 +82,7 @@ def normalize_drive(path): if drive.islower() and len(drive) == 2 and drive[1] == ":": return "{}{}".format(drive.upper(), tail) - return path + return to_text(path, encoding="utf-8") def path_to_url(path): @@ -91,8 +98,9 @@ def path_to_url(path): if not path: return path - path = _encode_path(path) - return Path(normalize_drive(os.path.abspath(path))).as_uri() + path = to_bytes(path, encoding="utf-8") + normalized_path = to_text(normalize_drive(os.path.abspath(path)), encoding="utf-8") + return to_text(Path(normalized_path).as_uri(), encoding="utf-8") def url_to_path(url): @@ -107,7 +115,7 @@ def url_to_path(url): netloc = "\\\\" + netloc path = urllib_request.url2pathname(netloc + path) - return path + return to_bytes(path, encoding="utf-8") def is_valid_url(url): @@ -127,6 +135,7 @@ def is_file_url(url): url = getattr(url, "url") except AttributeError: raise ValueError("Cannot parse url from unknown type: {0!r}".format(url)) + url = to_text(url, encoding="utf-8") return urllib_parse.urlparse(url.lower()).scheme == "file" @@ -135,13 +144,13 @@ def is_readonly_path(fn): Permissions check is `bool(path.stat & stat.S_IREAD)` or `not os.access(path, os.W_OK)` """ - fn = _encode_path(fn) + fn = to_bytes(fn, encoding="utf-8") if os.path.exists(fn): return bool(os.stat(fn).st_mode & stat.S_IREAD) and not os.access(fn, os.W_OK) return False -def mkdir_p(newdir): +def mkdir_p(newdir, mode=0o777): """Recursively creates the target directory and all of its parents if they do not already exist. Fails silently if they do. @@ -149,6 +158,7 @@ def mkdir_p(newdir): :raises: OSError if a file is encountered along the way """ # http://code.activestate.com/recipes/82465-a-friendly-mkdir/ + newdir = abspathu(to_bytes(newdir, "utf-8")) if os.path.exists(newdir): if not os.path.isdir(newdir): raise OSError( @@ -159,10 +169,51 @@ def mkdir_p(newdir): pass else: head, tail = os.path.split(newdir) - if head and not os.path.isdir(head): - mkdir_p(head) - if tail and not os.path.isdir(newdir): - os.mkdir(newdir) + # Make sure the tail doesn't point to the asame place as the head + tail_and_head_match = os.path.relpath(tail, start=os.path.basename(head)) == "." + if tail and not tail_and_head_match and not os.path.isdir(newdir): + target = os.path.join(head, tail) + if os.path.exists(target) and os.path.isfile(target): + raise OSError( + "A file with the same name as the desired dir, '{0}', already exists.".format( + newdir + ) + ) + os.makedirs(os.path.join(head, tail), mode) + + +def ensure_mkdir_p(mode=0o777): + """Decorator to ensure `mkdir_p` is called to the function's return value. + """ + def decorator(f): + + @functools.wraps(f) + def decorated(*args, **kwargs): + path = f(*args, **kwargs) + mkdir_p(path, mode=mode) + return path + + return decorated + + return decorator + + +TRACKED_TEMPORARY_DIRECTORIES = [] + + +def create_tracked_tempdir(*args, **kwargs): + """Create a tracked temporary directory. + + This uses `TemporaryDirectory`, but does not remove the directory when + the return value goes out of scope, instead registers a handler to cleanup + on program exit. + + The return value is the path to the created directory. + """ + tempdir = TemporaryDirectory(*args, **kwargs) + TRACKED_TEMPORARY_DIRECTORIES.append(tempdir) + atexit.register(tempdir.cleanup) + return tempdir.name def set_write_bit(fn): @@ -172,8 +223,8 @@ def set_write_bit(fn): :param str fn: The target filename or path """ - fn = _encode_path(fn) - if isinstance(fn, six.string_types) and not os.path.exists(fn): + fn = to_bytes(fn, encoding=locale_encoding) + if not os.path.exists(fn): return os.chmod(fn, stat.S_IWRITE | stat.S_IWUSR | stat.S_IRUSR) @@ -192,7 +243,7 @@ def rmtree(directory, ignore_errors=False): Setting `ignore_errors=True` may cause this to silently fail to delete the path """ - directory = _encode_path(directory) + directory = to_bytes(directory, encoding=locale_encoding) shutil.rmtree( directory, ignore_errors=ignore_errors, onerror=handle_remove_readonly ) @@ -218,6 +269,7 @@ def handle_remove_readonly(func, path, exc): ) # split the initial exception out into its type, exception, and traceback exc_type, exc_exception, exc_tb = exc + path = to_bytes(path) if is_readonly_path(path): # Apply write permission and call original function set_write_bit(path) @@ -225,11 +277,18 @@ def handle_remove_readonly(func, path, exc): func(path) except (OSError, IOError) as e: if e.errno in [errno.EACCES, errno.EPERM]: - warnings.warn(default_warning_message.format(path), ResourceWarning) + warnings.warn( + default_warning_message.format( + to_text(path, encoding=locale_encoding) + ), ResourceWarning + ) return if exc_exception.errno in [errno.EACCES, errno.EPERM]: - warnings.warn(default_warning_message.format(path), ResourceWarning) + warnings.warn( + default_warning_message.format(to_text(path)), + ResourceWarning + ) return raise @@ -276,7 +335,7 @@ def check_for_unc_path(path): return False -def get_converted_relative_path(path, relative_to=os.curdir): +def get_converted_relative_path(path, relative_to=None): """Convert `path` to be relative. Given a vague relative path, return the path relative to the given @@ -298,8 +357,13 @@ def get_converted_relative_path(path, relative_to=os.curdir): '.' """ - path = _encode_path(path) - relative_to = _encode_path(relative_to) + if not relative_to: + relative_to = os.getcwdu() if six.PY2 else os.getcwd() + if six.PY2: + path = to_bytes(path, encoding="utf-8") + else: + path = to_text(path, encoding="utf-8") + relative_to = to_text(relative_to, encoding="utf-8") start_path = Path(relative_to) try: start = start_path.resolve() @@ -319,9 +383,9 @@ def get_converted_relative_path(path, relative_to=os.curdir): if check_for_unc_path(path): raise ValueError("The path argument does not currently accept UNC paths") - relpath_s = _encode_path(posixpath.normpath(path.as_posix())) - if not (relpath_s == "." or relpath_s.startswith("./")): - relpath_s = posixpath.join(".", relpath_s) + relpath_s = to_text(posixpath.normpath(path.as_posix())) + if not (relpath_s == u"." or relpath_s.startswith(u"./")): + relpath_s = posixpath.join(u".", relpath_s) return relpath_s From 30a3c541c6bdc09ef072aa8d940fdaefb00e0002 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 25 Aug 2018 13:09:00 -0400 Subject: [PATCH 07/26] Update dotenv, semver, and scandir Signed-off-by: Dan Ryan --- pipenv/vendor/dotenv/cli.py | 18 +++- pipenv/vendor/dotenv/main.py | 52 ++++++++++- pipenv/vendor/dotenv/version.py | 1 + pipenv/vendor/scandir.py | 17 +++- pipenv/vendor/semver.py | 155 +++++++++++++++++++++++++++++--- 5 files changed, 226 insertions(+), 17 deletions(-) create mode 100644 pipenv/vendor/dotenv/version.py diff --git a/pipenv/vendor/dotenv/cli.py b/pipenv/vendor/dotenv/cli.py index dd7c2418f6..4e03c12a5b 100644 --- a/pipenv/vendor/dotenv/cli.py +++ b/pipenv/vendor/dotenv/cli.py @@ -8,7 +8,8 @@ 'Run pip install "python-dotenv[cli]" to fix this.') sys.exit(1) -from .main import dotenv_values, get_key, set_key, unset_key +from .main import dotenv_values, get_key, set_key, unset_key, run_command +from .version import __version__ @click.group() @@ -18,6 +19,7 @@ @click.option('-q', '--quote', default='always', type=click.Choice(['always', 'never', 'auto']), help="Whether to quote or not the variable values. Default mode is always. This does not affect parsing.") +@click.version_option(version=__version__) @click.pass_context def cli(ctx, file, quote): '''This script is used to set, get or unset values from a .env file.''' @@ -78,5 +80,19 @@ def unset(ctx, key): exit(1) +@cli.command(context_settings={'ignore_unknown_options': True}) +@click.pass_context +@click.argument('commandline', nargs=-1, type=click.UNPROCESSED) +def run(ctx, commandline): + """Run command with environment variables present.""" + file = ctx.obj['FILE'] + dotenv_as_dict = dotenv_values(file) + if not commandline: + click.echo('No command given.') + exit(1) + ret = run_command(commandline, dotenv_as_dict) + exit(ret) + + if __name__ == "__main__": cli() diff --git a/pipenv/vendor/dotenv/main.py b/pipenv/vendor/dotenv/main.py index 75f49c4acd..6ba28bbbc3 100644 --- a/pipenv/vendor/dotenv/main.py +++ b/pipenv/vendor/dotenv/main.py @@ -7,6 +7,7 @@ import os import re import sys +from subprocess import Popen, PIPE, STDOUT import warnings from collections import OrderedDict @@ -30,7 +31,7 @@ def parse_line(line): k, v = line.split('=', 1) if k.startswith('export '): - k = k.lstrip('export ') + (_, _, k) = k.partition('export ') # Remove any leading and trailing spaces in key, value k, v = k.strip(), v.strip() @@ -238,7 +239,11 @@ def find_dotenv(filename='.env', raise_error_if_not_found=False, usecwd=False): path = os.getcwd() else: # will work for .py files - frame_filename = sys._getframe().f_back.f_code.co_filename + frame = sys._getframe() + # find first frame that is outside of this file + while frame.f_code.co_filename == __file__: + frame = frame.f_back + frame_filename = frame.f_code.co_filename path = os.path.dirname(os.path.abspath(frame_filename)) for dirname in _walk_to_root(path): @@ -260,3 +265,46 @@ def load_dotenv(dotenv_path=None, stream=None, verbose=False, override=False): def dotenv_values(dotenv_path=None, stream=None, verbose=False): f = dotenv_path or stream or find_dotenv() return DotEnv(f, verbose=verbose).dict() + + +def run_command(command, env): + """Run command in sub process. + + Runs the command in a sub process with the variables from `env` + added in the current environment variables. + + Parameters + ---------- + command: List[str] + The command and it's parameters + env: Dict + The additional environment variables + + Returns + ------- + int + The return code of the command + + """ + # copy the current environment variables and add the vales from + # `env` + cmd_env = os.environ.copy() + cmd_env.update(env) + + p = Popen(command, + stdin=PIPE, + stdout=PIPE, + stderr=STDOUT, + universal_newlines=True, + bufsize=0, + shell=False, + env=cmd_env) + try: + out, _ = p.communicate() + print(out) + except Exception: + warnings.warn('An error occured, running the command:') + out, _ = p.communicate() + warnings.warn(out) + + return p.returncode diff --git a/pipenv/vendor/dotenv/version.py b/pipenv/vendor/dotenv/version.py new file mode 100644 index 0000000000..d69d16e980 --- /dev/null +++ b/pipenv/vendor/dotenv/version.py @@ -0,0 +1 @@ +__version__ = "0.9.1" diff --git a/pipenv/vendor/scandir.py b/pipenv/vendor/scandir.py index 2b2ade30d0..8bbae2c5cc 100644 --- a/pipenv/vendor/scandir.py +++ b/pipenv/vendor/scandir.py @@ -23,7 +23,6 @@ from os.path import join, islink from stat import S_IFDIR, S_IFLNK, S_IFREG import collections -import os import sys _scandir = None @@ -38,7 +37,7 @@ warnings.warn("scandir can't find the compiled _scandir C module " "or ctypes, using slow generic fallback") -__version__ = '1.7' +__version__ = '1.9.0' __all__ = ['scandir', 'walk'] # Windows FILE_ATTRIBUTE constants for interpreting the @@ -93,6 +92,10 @@ def stat(self, follow_symlinks=True): self._lstat = lstat(self.path) return self._lstat + # The code duplication below is intentional: this is for slightly + # better performance on systems that fall back to GenericDirEntry. + # It avoids an additional attribute lookup and method call, which + # are relatively slow on CPython. def is_dir(self, follow_symlinks=True): try: st = self.stat(follow_symlinks=follow_symlinks) @@ -416,6 +419,16 @@ class Dirent(ctypes.Structure): ('d_type', ctypes.c_byte), ('d_name', ctypes.c_char * 256), ) + elif 'openbsd' in sys.platform: + _fields_ = ( + ('d_ino', ctypes.c_uint64), + ('d_off', ctypes.c_uint64), + ('d_reclen', ctypes.c_uint16), + ('d_type', ctypes.c_uint8), + ('d_namlen', ctypes.c_uint8), + ('__d_padding', ctypes.c_uint8 * 4), + ('d_name', ctypes.c_char * 256), + ) else: _fields_ = ( ('d_ino', ctypes.c_uint32), # must be uint32, not ulong diff --git a/pipenv/vendor/semver.py b/pipenv/vendor/semver.py index dfeb431df5..5f5be2c272 100644 --- a/pipenv/vendor/semver.py +++ b/pipenv/vendor/semver.py @@ -6,7 +6,7 @@ import re -__version__ = '2.8.0' +__version__ = '2.8.1' __author__ = 'Kostiantyn Rybnikov' __author_email__ = 'k-bx@k-bx.com' __maintainer__ = 'Sebastien Celles' @@ -46,6 +46,19 @@ def parse(version): and 'prerelease'. The prerelease or build keys can be None if not provided :rtype: dict + + >>> import semver + >>> ver = semver.parse('3.4.5-pre.2+build.4') + >>> ver['major'] + 3 + >>> ver['minor'] + 4 + >>> ver['patch'] + 5 + >>> ver['prerelease'] + 'pre.2' + >>> ver['build'] + 'build.4' """ match = _REGEX.match(version) if match is None: @@ -60,8 +73,7 @@ def parse(version): return version_parts -class VersionInfo(collections.namedtuple( - 'VersionInfo', 'major minor patch prerelease build')): +class VersionInfo(object): """ :param int major: version when you make incompatible API changes. :param int minor: version when you add functionality in @@ -69,14 +81,48 @@ class VersionInfo(collections.namedtuple( :param int patch: version when you make backwards-compatible bug fixes. :param str prerelease: an optional prerelease string :param str build: an optional build string - - >>> import semver - >>> ver = semver.parse('3.4.5-pre.2+build.4') - >>> ver - {'build': 'build.4', 'major': 3, 'minor': 4, 'patch': 5, - 'prerelease': 'pre.2'} """ - __slots__ = () + __slots__ = ('_major', '_minor', '_patch', '_prerelease', '_build') + + def __init__(self, major, minor, patch, prerelease=None, build=None): + self._major = major + self._minor = minor + self._patch = patch + self._prerelease = prerelease + self._build = build + + @property + def major(self): + return self._major + + @property + def minor(self): + return self._minor + + @property + def patch(self): + return self._patch + + @property + def prerelease(self): + return self._prerelease + + @property + def build(self): + return self._build + + def _astuple(self): + return (self.major, self.minor, self.patch, + self.prerelease, self.build) + + def _asdict(self): + return collections.OrderedDict(( + ("major", self.major), + ("minor", self.minor), + ("patch", self.patch), + ("prerelease", self.prerelease), + ("build", self.build) + )) def __eq__(self, other): if not isinstance(other, (VersionInfo, dict)): @@ -108,11 +154,31 @@ def __ge__(self, other): return NotImplemented return _compare_by_keys(self._asdict(), _to_dict(other)) >= 0 + def __repr__(self): + s = ", ".join("%s=%r" % (key, val) + for key, val in self._asdict().items()) + return "VersionInfo(%s)" % s + def __str__(self): - return format_version(*self) + return format_version(*(self._astuple())) def __hash__(self): - return hash(tuple(self)) + return hash(self._astuple()) + + @staticmethod + def parse(version): + """Parse version string to a VersionInfo instance. + + >>> from semver import VersionInfo + >>> VersionInfo.parse('3.4.5-pre.2+build.4') + VersionInfo(major=3, minor=4, patch=5, \ +prerelease='pre.2', build='build.4') + + :param version: version string + :return: a :class:`VersionInfo` instance + :rtype: :class:`VersionInfo` + """ + return parse_version_info(version) def _to_dict(obj): @@ -127,6 +193,19 @@ def parse_version_info(version): :param version: version string :return: a :class:`VersionInfo` instance :rtype: :class:`VersionInfo` + + >>> import semver + >>> version_info = semver.parse_version_info("3.4.5-pre.2+build.4") + >>> version_info.major + 3 + >>> version_info.minor + 4 + >>> version_info.patch + 5 + >>> version_info.prerelease + 'pre.2' + >>> version_info.build + 'build.4' """ parts = parse(version) version_info = VersionInfo( @@ -190,6 +269,14 @@ def compare(ver1, ver2): :return: The return value is negative if ver1 < ver2, zero if ver1 == ver2 and strictly positive if ver1 > ver2 :rtype: int + + >>> import semver + >>> semver.compare("1.0.0", "2.0.0") + -1 + >>> semver.compare("2.0.0", "1.0.0") + 1 + >>> semver.compare("2.0.0", "2.0.0") + 0 """ v1, v2 = parse(ver1), parse(ver2) @@ -210,6 +297,12 @@ def match(version, match_expr): != not equal :return: True if the expression matches the version, otherwise False :rtype: bool + + >>> import semver + >>> semver.match("2.0.0", ">=1.0.0") + True + >>> semver.match("1.0.0", ">1.0.0") + False """ prefix = match_expr[:2] if prefix in ('>=', '<=', '==', '!='): @@ -245,6 +338,10 @@ def max_ver(ver1, ver2): :param ver2: version string 2 :return: the greater version of the two :rtype: :class:`VersionInfo` + + >>> import semver + >>> semver.max_ver("1.0.0", "2.0.0") + '2.0.0' """ cmp_res = compare(ver1, ver2) if cmp_res == 0 or cmp_res == 1: @@ -260,6 +357,10 @@ def min_ver(ver1, ver2): :param ver2: version string 2 :return: the smaller version of the two :rtype: :class:`VersionInfo` + + >>> import semver + >>> semver.min_ver("1.0.0", "2.0.0") + '1.0.0' """ cmp_res = compare(ver1, ver2) if cmp_res == 0 or cmp_res == -1: @@ -278,6 +379,10 @@ def format_version(major, minor, patch, prerelease=None, build=None): :param str build: the optional build part of a version :return: the formatted string :rtype: str + + >>> import semver + >>> semver.format_version(3, 4, 5, 'pre.2', 'build.4') + '3.4.5-pre.2+build.4' """ version = "%d.%d.%d" % (major, minor, patch) if prerelease is not None: @@ -308,6 +413,10 @@ def bump_major(version): :param: version string :return: the raised version string :rtype: str + + >>> import semver + >>> semver.bump_major("3.4.5") + '4.0.0' """ verinfo = parse(version) return format_version(verinfo['major'] + 1, 0, 0) @@ -319,6 +428,10 @@ def bump_minor(version): :param: version string :return: the raised version string :rtype: str + + >>> import semver + >>> semver.bump_minor("3.4.5") + '3.5.0' """ verinfo = parse(version) return format_version(verinfo['major'], verinfo['minor'] + 1, 0) @@ -330,6 +443,10 @@ def bump_patch(version): :param: version string :return: the raised version string :rtype: str + + >>> import semver + >>> semver.bump_patch("3.4.5") + '3.4.6' """ verinfo = parse(version) return format_version(verinfo['major'], verinfo['minor'], @@ -343,6 +460,9 @@ def bump_prerelease(version, token='rc'): :param token: defaults to 'rc' :return: the raised version string :rtype: str + + >>> bump_prerelease('3.4.5', 'dev') + '3.4.5-dev.1' """ verinfo = parse(version) verinfo['prerelease'] = _increment_string( @@ -359,6 +479,9 @@ def bump_build(version, token='build'): :param token: defaults to 'build' :return: the raised version string :rtype: str + + >>> bump_build('3.4.5-rc.1+build.9') + '3.4.5-rc.1+build.10' """ verinfo = parse(version) verinfo['build'] = _increment_string( @@ -374,6 +497,14 @@ def finalize_version(version): :param version: version string :return: the finalized version string :rtype: str + + >>> finalize_version('1.2.3-rc.5') + '1.2.3' """ verinfo = parse(version) return format_version(verinfo['major'], verinfo['minor'], verinfo['patch']) + + +if __name__ == "__main__": + import doctest + doctest.testmod() From 8356a3e1828fceab40c2c07ae624dfb5aa83fc2e Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 25 Aug 2018 13:13:13 -0400 Subject: [PATCH 08/26] Update click_completion which now uses shellingham! Signed-off-by: Dan Ryan --- pipenv/vendor/click_completion/__init__.py | 391 ++------------------- pipenv/vendor/click_completion/core.py | 384 ++++++++++++++++++++ pipenv/vendor/click_completion/lib.py | 122 +++++++ pipenv/vendor/click_completion/patch.py | 142 ++++++++ pipenv/vendor/click_completion/zsh.j2 | 2 +- 5 files changed, 678 insertions(+), 363 deletions(-) create mode 100644 pipenv/vendor/click_completion/core.py create mode 100644 pipenv/vendor/click_completion/lib.py create mode 100644 pipenv/vendor/click_completion/patch.py diff --git a/pipenv/vendor/click_completion/__init__.py b/pipenv/vendor/click_completion/__init__.py index 3049ea860b..1443c8f27c 100644 --- a/pipenv/vendor/click_completion/__init__.py +++ b/pipenv/vendor/click_completion/__init__.py @@ -3,300 +3,38 @@ from __future__ import print_function, absolute_import -import os -import platform -import re -import sys -import shlex -import subprocess - -import click import six -from click import echo, MultiCommand, Option, Argument, ParamType - -__version__ = '0.3.1' - -_invalid_ident_char_re = re.compile(r'[^a-zA-Z0-9_]') - - -class CompletionConfiguration(object): - def __init__(self): - self.complete_options = False - - -completion_configuration = CompletionConfiguration() - - -def resolve_ctx(cli, prog_name, args): - ctx = cli.make_context(prog_name, list(args), resilient_parsing=True) - while ctx.args + ctx.protected_args and isinstance(ctx.command, MultiCommand): - a = ctx.protected_args + ctx.args - cmd = ctx.command.get_command(ctx, a[0]) - if cmd is None: - return None - ctx = cmd.make_context(a[0], a[1:], parent=ctx, resilient_parsing=True) - return ctx - - -def startswith(string, incomplete): - """Returns True when string starts with incomplete - - It might be overridden with a fuzzier version - for example a case insensitive version""" - return string.startswith(incomplete) - - -def get_choices(cli, prog_name, args, incomplete): - ctx = resolve_ctx(cli, prog_name, args) - if ctx is None: - return - - optctx = None - if args: - for param in ctx.command.get_params(ctx): - if isinstance(param, Option) and not param.is_flag and args[-1] in param.opts + param.secondary_opts: - optctx = param - - choices = [] - if optctx: - choices += [c if isinstance(c, tuple) else (c, None) for c in optctx.type.complete(ctx, incomplete)] - else: - for param in ctx.command.get_params(ctx): - if isinstance(param, Argument): - choices += [c if isinstance(c, tuple) else (c, None) for c in param.type.complete(ctx, incomplete)] - if (completion_configuration.complete_options or incomplete and not incomplete[:1].isalnum()) and isinstance(param, Option): - for opt in param.opts: - if startswith(opt, incomplete): - choices.append((opt, param.help)) - for opt in param.secondary_opts: - if startswith(opt, incomplete): - # don't put the doc so fish won't group the primary and - # and secondary options - choices.append((opt, None)) - if isinstance(ctx.command, MultiCommand): - for name in ctx.command.list_commands(ctx): - if startswith(name, incomplete): - choices.append((name, ctx.command.get_command_short_help(ctx, name))) - - for item, help in choices: - yield (item, help) - - -def split_args(line): - """Version of shlex.split that silently accept incomplete strings.""" - lex = shlex.shlex(line, posix=True) - lex.whitespace_split = True - lex.commenters = '' - res = [] - try: - while True: - res.append(next(lex)) - except ValueError: # No closing quotation - pass - except StopIteration: # End of loop - pass - if lex.token: - res.append(lex.token) - return res - - -def decode_args(strings): - res = [] - for s in strings: - s = split_args(s) - s = s[0] if s else '' - res.append(s) - return res - - -def do_bash_complete(cli, prog_name): - comp_words = os.environ['COMP_WORDS'] - try: - cwords = shlex.split(comp_words) - quoted = False - except ValueError: # No closing quotation - cwords = split_args(comp_words) - quoted = True - cword = int(os.environ['COMP_CWORD']) - args = cwords[1:cword] - try: - incomplete = cwords[cword] - except IndexError: - incomplete = '' - choices = get_choices(cli, prog_name, args, incomplete) - - if quoted: - echo('\t'.join(opt for opt, _ in choices), nl=False) - else: - echo('\t'.join(re.sub(r"""([\s\\"'])""", r'\\\1', opt) for opt, _ in choices), nl=False) - - return True - - -def do_fish_complete(cli, prog_name): - commandline = os.environ['COMMANDLINE'] - args = split_args(commandline)[1:] - if args and not commandline.endswith(' '): - incomplete = args[-1] - args = args[:-1] - else: - incomplete = '' - - for item, help in get_choices(cli, prog_name, args, incomplete): - if help: - echo("%s\t%s" % (item, re.sub('\s', ' ', help))) - else: - echo(item) - - return True - - -def do_zsh_complete(cli, prog_name): - commandline = os.environ['COMMANDLINE'] - args = split_args(commandline)[1:] - if args and not commandline.endswith(' '): - incomplete = args[-1] - args = args[:-1] - else: - incomplete = '' - - def escape(s): - return s.replace('"', '""').replace("'", "''").replace('$', '\\$') - res = [] - for item, help in get_choices(cli, prog_name, args, incomplete): - if help: - res.append('"%s"\:"%s"' % (escape(item), escape(help))) - else: - res.append('"%s"' % escape(item)) - if res: - echo("_arguments '*: :((%s))'" % '\n'.join(res)) - else: - echo("_files") - - return True +from click import ParamType +from enum import Enum +from click_completion.core import completion_configuration, get_code, install, shells, resolve_ctx, get_choices, \ + startswith, Shell +from click_completion.lib import get_auto_shell +from click_completion.patch import patch as _patch -def do_powershell_complete(cli, prog_name): - commandline = os.environ['COMMANDLINE'] - args = split_args(commandline)[1:] - quote = single_quote - incomplete = '' - if args and not commandline.endswith(' '): - incomplete = args[-1] - args = args[:-1] - quote_pos = commandline.rfind(incomplete) - 1 - if quote_pos >= 0 and commandline[quote_pos] == '"': - quote = double_quote - - for item, help in get_choices(cli, prog_name, args, incomplete): - echo(quote(item)) - - return True - - -find_unsafe = re.compile(r'[^\w@%+=:,./-]').search - - -def single_quote(s): - """Return a shell-escaped version of the string *s*.""" - if not s: - return "''" - if find_unsafe(s) is None: - return s - - # use single quotes, and put single quotes into double quotes - # the string $'b is then quoted as '$'"'"'b' - return "'" + s.replace("'", "'\"'\"'") + "'" - - -def double_quote(s): - '''Return a shell-escaped version of the string *s*.''' - if not s: - return '""' - if find_unsafe(s) is None: - return s - - # use double quotes, and put double quotes into single quotes - # the string $"b is then quoted as "$"'"'"b" - return '"' + s.replace('"', '"\'"\'"') + '"' - - -# extend click completion features - -def param_type_complete(self, ctx, incomplete): - return [] - - -def choice_complete(self, ctx, incomplete): - return [c for c in self.choices if c.startswith(incomplete)] - - -def multicommand_get_command_short_help(self, ctx, cmd_name): - return self.get_command(ctx, cmd_name).short_help - - -def _shellcomplete(cli, prog_name, complete_var=None): - """Internal handler for the bash completion support.""" - if complete_var is None: - complete_var = '_%s_COMPLETE' % (prog_name.replace('-', '_')).upper() - complete_instr = os.environ.get(complete_var) - if not complete_instr: - return - - if complete_instr == 'source': - echo(get_code(prog_name=prog_name, env_name=complete_var)) - elif complete_instr == 'source-bash': - echo(get_code('bash', prog_name, complete_var)) - elif complete_instr == 'source-fish': - echo(get_code('fish', prog_name, complete_var)) - elif complete_instr == 'source-powershell': - echo(get_code('powershell', prog_name, complete_var)) - elif complete_instr == 'source-zsh': - echo(get_code('zsh', prog_name, complete_var)) - elif complete_instr in ['complete', 'complete-bash']: - # keep 'complete' for bash for backward compatibility - do_bash_complete(cli, prog_name) - elif complete_instr == 'complete-fish': - do_fish_complete(cli, prog_name) - elif complete_instr == 'complete-powershell': - do_powershell_complete(cli, prog_name) - elif complete_instr == 'complete-zsh': - do_zsh_complete(cli, prog_name) - elif complete_instr == 'install': - shell, path = install(prog_name=prog_name, env_name=complete_var) - click.echo('%s completion installed in %s' % (shell, path)) - elif complete_instr == 'install-bash': - shell, path = install(shell='bash', prog_name=prog_name, env_name=complete_var) - click.echo('%s completion installed in %s' % (shell, path)) - elif complete_instr == 'install-fish': - shell, path = install(shell='fish', prog_name=prog_name, env_name=complete_var) - click.echo('%s completion installed in %s' % (shell, path)) - elif complete_instr == 'install-zsh': - shell, path = install(shell='zsh', prog_name=prog_name, env_name=complete_var) - click.echo('%s completion installed in %s' % (shell, path)) - elif complete_instr == 'install-powershell': - shell, path = install(shell='powershell', prog_name=prog_name, env_name=complete_var) - click.echo('%s completion installed in %s' % (shell, path)) - sys.exit() - +__version__ = '0.4.1' _initialized = False -def init(complete_options=False): +def init(complete_options=False, match_incomplete=None): """Initialize the enhanced click completion - Args: - complete_options (bool): always complete the options, even when the user hasn't typed a first dash + Parameters + ---------- + complete_options : bool + always complete the options, even when the user hasn't typed a first dash (Default value = False) + match_incomplete : func + a function with two parameters choice and incomplete. Must return True + if incomplete is a correct match for choice, False otherwise. """ global _initialized if not _initialized: - import click - click.types.ParamType.complete = param_type_complete - click.types.Choice.complete = choice_complete - click.core.MultiCommand.get_command_short_help = multicommand_get_command_short_help - click.core._bashcomplete = _shellcomplete + _patch() completion_configuration.complete_options = complete_options + if match_incomplete is not None: + completion_configuration.match_incomplete = match_incomplete _initialized = True @@ -305,11 +43,19 @@ class DocumentedChoice(ParamType): supported values. All of these values have to be strings. Each value may be associated to a help message that will be display in the error message and during the completion. + + Parameters + ---------- + choices : dict or Enum + A dictionary with the possible choice as key, and the corresponding help string as value """ name = 'choice' def __init__(self, choices): - self.choices = dict(choices) + if isinstance(choices, Enum): + self.choices = dict((choice.name, choice.value) for choice in choices) + else: + self.choices = dict(choices) def get_metavar(self, param): return '[%s]' % '|'.join(self.choices.keys()) @@ -338,84 +84,5 @@ def __repr__(self): return 'DocumentedChoice(%r)' % list(self.choices.keys()) def complete(self, ctx, incomplete): - return [(c, v) for c, v in six.iteritems(self.choices) if startswith(c, incomplete)] - - -def get_code(shell=None, prog_name=None, env_name=None, extra_env=None): - """Return the specified completion code""" - from jinja2 import Environment, FileSystemLoader - if shell in [None, 'auto']: - shell = get_auto_shell() - prog_name = prog_name or click.get_current_context().find_root().info_name - env_name = env_name or '_%s_COMPLETE' % prog_name.upper().replace('-', '_') - extra_env = extra_env if extra_env else {} - env = Environment(loader=FileSystemLoader(os.path.dirname(__file__))) - template = env.get_template('%s.j2' % shell) - return template.render(prog_name=prog_name, complete_var=env_name, extra_env=extra_env) - - -def get_auto_shell(): - """Return the shell that is calling this process""" - try: - import psutil - parent = psutil.Process(os.getpid()).parent() - if platform.system() == 'Windows': - parent = parent.parent() or parent - return parent.name().replace('.exe', '') - except ImportError: - raise click.UsageError("Please explicitly give the shell type or install the psutil package to activate the" - " automatic shell detection.") - - -def install(shell=None, prog_name=None, env_name=None, path=None, append=None, extra_env=None): - """Install the completion""" - prog_name = prog_name or click.get_current_context().find_root().info_name - shell = shell or get_auto_shell() - if append is None and path is not None: - append = True - if append is not None: - mode = 'a' if append else 'w' - else: - mode = None - - if shell == 'fish': - path = path or os.path.expanduser('~') + '/.config/fish/completions/%s.fish' % prog_name - mode = mode or 'w' - elif shell == 'bash': - path = path or os.path.expanduser('~') + '/.bash_completion' - mode = mode or 'a' - elif shell == 'zsh': - ohmyzsh = os.path.expanduser('~') + '/.oh-my-zsh' - if os.path.exists(ohmyzsh): - path = path or ohmyzsh + '/completions/_%s' % prog_name - mode = mode or 'w' - else: - path = path or os.path.expanduser('~') + '/.zshrc' - mode = mode or 'a' - elif shell == 'powershell': - subprocess.check_call(['powershell', 'Set-ExecutionPolicy Unrestricted -Scope CurrentUser']) - path = path or subprocess.check_output(['powershell', '-NoProfile', 'echo $profile']).strip() if install else '' - mode = mode or 'a' - else: - raise click.ClickException('%s is not supported.' % shell) - - if append is not None: - mode = 'a' if append else 'w' - else: - mode = mode - d = os.path.dirname(path) - if not os.path.exists(d): - os.makedirs(d) - f = open(path, mode) - f.write(get_code(shell, prog_name, env_name, extra_env)) - f.write("\n") - f.close() - return shell, path - - -shells = { - 'bash': 'Bourne again shell', - 'fish': 'Friendly interactive shell', - 'zsh': 'Z shell', - 'powershell': 'Windows PowerShell' -} + match = completion_configuration.match_incomplete + return [(c, v) for c, v in six.iteritems(self.choices) if match(c, incomplete)] diff --git a/pipenv/vendor/click_completion/core.py b/pipenv/vendor/click_completion/core.py new file mode 100644 index 0000000000..2ede6eff41 --- /dev/null +++ b/pipenv/vendor/click_completion/core.py @@ -0,0 +1,384 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- + +from __future__ import print_function, absolute_import + +import os +import re +import shlex +import subprocess + +import click +from click import Option, Argument, MultiCommand, echo +from enum import Enum + +from click_completion.lib import resolve_ctx, split_args, single_quote, double_quote, get_auto_shell + + +def startswith(string, incomplete): + """Returns True when string starts with incomplete + + It might be overridden with a fuzzier version - for example a case insensitive version + + Parameters + ---------- + string : str + The string to check + incomplete : str + The incomplete string to compare to the begining of string + + Returns + ------- + bool + True if string starts with incomplete, False otherwise + """ + return string.startswith(incomplete) + + +class CompletionConfiguration(object): + """A class to hold the completion configuration + + Attributes + ---------- + + complete_options : bool + Wether to complete the options or not. By default, the options are only completed after the user has entered + a first dash '-'. Change this value to True to always complete the options, even without first typing any + character. + match_incomplete : func + A function use to check whether a parameter match an incomplete argument typed by the user + """ + def __init__(self): + self.complete_options = False + self.match_incomplete = startswith + + +def match(string, incomplete): + import click_completion + # backward compatibility handling + if click_completion.startswith != startswith: + fn = click_completion.startswith + else: + fn = completion_configuration.match_incomplete + return fn(string, incomplete) + + +def get_choices(cli, prog_name, args, incomplete): + """ + + Parameters + ---------- + cli : click.Command + The main click Command of the program + prog_name : str + The program name on the command line + args : [str] + The arguments already written by the user on the command line + incomplete : str + The partial argument to complete + + Returns + ------- + [(str, str)] + A list of completion results. The first element of each tuple is actually the argument to complete, the second + element is an help string for this argument. + """ + ctx = resolve_ctx(cli, prog_name, args) + if ctx is None: + return + optctx = None + if args: + options = [param + for param in ctx.command.get_params(ctx) + if isinstance(param, Option)] + arguments = [param + for param in ctx.command.get_params(ctx) + if isinstance(param, Argument)] + for param in options: + if not param.is_flag and args[-1] in param.opts + param.secondary_opts: + optctx = param + if optctx is None: + for param in arguments: + if ( + not incomplete.startswith("-") + and ( + ctx.params.get(param.name) in (None, ()) + or param.nargs == -1 + ) + ): + optctx = param + break + choices = [] + if optctx: + choices += [c if isinstance(c, tuple) else (c, None) for c in optctx.type.complete(ctx, incomplete)] + else: + for param in ctx.command.get_params(ctx): + if (completion_configuration.complete_options or incomplete and not incomplete[:1].isalnum()) and isinstance(param, Option): + for opt in param.opts: + if match(opt, incomplete): + choices.append((opt, param.help)) + for opt in param.secondary_opts: + if match(opt, incomplete): + # don't put the doc so fish won't group the primary and + # and secondary options + choices.append((opt, None)) + if isinstance(ctx.command, MultiCommand): + for name in ctx.command.list_commands(ctx): + if match(name, incomplete): + choices.append((name, ctx.command.get_command_short_help(ctx, name))) + + for item, help in choices: + yield (item, help) + + +def do_bash_complete(cli, prog_name): + """Do the completion for bash + + Parameters + ---------- + cli : click.Command + The main click Command of the program + prog_name : str + The program name on the command line + + Returns + ------- + bool + True if the completion was successful, False otherwise + """ + comp_words = os.environ['COMP_WORDS'] + try: + cwords = shlex.split(comp_words) + quoted = False + except ValueError: # No closing quotation + cwords = split_args(comp_words) + quoted = True + cword = int(os.environ['COMP_CWORD']) + args = cwords[1:cword] + try: + incomplete = cwords[cword] + except IndexError: + incomplete = '' + choices = get_choices(cli, prog_name, args, incomplete) + + if quoted: + echo('\t'.join(opt for opt, _ in choices), nl=False) + else: + echo('\t'.join(re.sub(r"""([\s\\"'()])""", r'\\\1', opt) for opt, _ in choices), nl=False) + + return True + + +def do_fish_complete(cli, prog_name): + """Do the fish completion + + Parameters + ---------- + cli : click.Command + The main click Command of the program + prog_name : str + The program name on the command line + + Returns + ------- + bool + True if the completion was successful, False otherwise + """ + commandline = os.environ['COMMANDLINE'] + args = split_args(commandline)[1:] + if args and not commandline.endswith(' '): + incomplete = args[-1] + args = args[:-1] + else: + incomplete = '' + + for item, help in get_choices(cli, prog_name, args, incomplete): + if help: + echo("%s\t%s" % (item, re.sub('\s', ' ', help))) + else: + echo(item) + + return True + + +def do_zsh_complete(cli, prog_name): + """Do the zsh completion + + Parameters + ---------- + cli : click.Command + The main click Command of the program + prog_name : str + The program name on the command line + + Returns + ------- + bool + True if the completion was successful, False otherwise + """ + commandline = os.environ['COMMANDLINE'] + args = split_args(commandline)[1:] + if args and not commandline.endswith(' '): + incomplete = args[-1] + args = args[:-1] + else: + incomplete = '' + + def escape(s): + return s.replace('"', '""').replace("'", "''").replace('$', '\\$') + res = [] + for item, help in get_choices(cli, prog_name, args, incomplete): + if help: + res.append('"%s"\:"%s"' % (escape(item), escape(help))) + else: + res.append('"%s"' % escape(item)) + if res: + echo("_arguments '*: :((%s))'" % '\n'.join(res)) + else: + echo("_files") + + return True + + +def do_powershell_complete(cli, prog_name): + """Do the powershell completion + + Parameters + ---------- + cli : click.Command + The main click Command of the program + prog_name : str + The program name on the command line + + Returns + ------- + bool + True if the completion was successful, False otherwise + """ + commandline = os.environ['COMMANDLINE'] + args = split_args(commandline)[1:] + quote = single_quote + incomplete = '' + if args and not commandline.endswith(' '): + incomplete = args[-1] + args = args[:-1] + quote_pos = commandline.rfind(incomplete) - 1 + if quote_pos >= 0 and commandline[quote_pos] == '"': + quote = double_quote + + for item, help in get_choices(cli, prog_name, args, incomplete): + echo(quote(item)) + + return True + + +def get_code(shell=None, prog_name=None, env_name=None, extra_env=None): + """Returns the completion code to be evaluated by the shell + + Parameters + ---------- + shell : Shell + The shell type (Default value = None) + prog_name : str + The program name on the command line (Default value = None) + env_name : str + The environment variable used to control the completion (Default value = None) + extra_env : dict + Some extra environment variables to be added to the generated code (Default value = None) + + Returns + ------- + str + The code to be evaluated by the shell + """ + from jinja2 import Environment, FileSystemLoader + if shell in [None, 'auto']: + shell = get_auto_shell() + if not isinstance(shell, Shell): + shell = Shell[shell] + prog_name = prog_name or click.get_current_context().find_root().info_name + env_name = env_name or '_%s_COMPLETE' % prog_name.upper().replace('-', '_') + extra_env = extra_env if extra_env else {} + env = Environment(loader=FileSystemLoader(os.path.dirname(__file__))) + template = env.get_template('%s.j2' % shell.name) + return template.render(prog_name=prog_name, complete_var=env_name, extra_env=extra_env) + + +def install(shell=None, prog_name=None, env_name=None, path=None, append=None, extra_env=None): + """Install the completion + + Parameters + ---------- + shell : Shell + The shell type targeted. It will be guessed with get_auto_shell() if the value is None (Default value = None) + prog_name : str + The program name on the command line. It will be automatically computed if the value is None + (Default value = None) + env_name : str + The environment variable name used to control the completion. It will be automatically computed if the value is + None (Default value = None) + path : str + The installation path of the code to be evaluated by the shell. The standard installation path is used if the + value is None (Default value = None) + append : bool + Whether to append the content to the file or to override it. The default behavior depends on the shell type + (Default value = None) + extra_env : dict + A set of environment variables and their values to be added to the generated code (Default value = None) + """ + prog_name = prog_name or click.get_current_context().find_root().info_name + shell = shell or get_auto_shell() + if append is None and path is not None: + append = True + if append is not None: + mode = 'a' if append else 'w' + else: + mode = None + + if shell == 'fish': + path = path or os.path.expanduser('~') + '/.config/fish/completions/%s.fish' % prog_name + mode = mode or 'w' + elif shell == 'bash': + path = path or os.path.expanduser('~') + '/.bash_completion' + mode = mode or 'a' + elif shell == 'zsh': + ohmyzsh = os.path.expanduser('~') + '/.oh-my-zsh' + if os.path.exists(ohmyzsh): + path = path or ohmyzsh + '/completions/_%s' % prog_name + mode = mode or 'w' + else: + path = path or os.path.expanduser('~') + '/.zshrc' + mode = mode or 'a' + elif shell == 'powershell': + subprocess.check_call(['powershell', 'Set-ExecutionPolicy Unrestricted -Scope CurrentUser']) + path = path or subprocess.check_output(['powershell', '-NoProfile', 'echo $profile']).strip() if install else '' + mode = mode or 'a' + else: + raise click.ClickException('%s is not supported.' % shell) + + if append is not None: + mode = 'a' if append else 'w' + else: + mode = mode + d = os.path.dirname(path) + if not os.path.exists(d): + os.makedirs(d) + f = open(path, mode) + f.write(get_code(shell, prog_name, env_name, extra_env)) + f.write("\n") + f.close() + return shell, path + + +class Shell(Enum): + bash = 'Bourne again shell' + fish = 'Friendly interactive shell' + zsh = 'Z shell' + powershell = 'Windows PowerShell' + + +# deprecated - use Shell instead +shells = dict((shell.name, shell.value) for shell in Shell) + + +completion_configuration = CompletionConfiguration() diff --git a/pipenv/vendor/click_completion/lib.py b/pipenv/vendor/click_completion/lib.py new file mode 100644 index 0000000000..cd53bc03c0 --- /dev/null +++ b/pipenv/vendor/click_completion/lib.py @@ -0,0 +1,122 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- + +from __future__ import print_function, absolute_import + +import re +import shlex + +import click +import shellingham +from click import MultiCommand + +find_unsafe = re.compile(r'[^\w@%+=:,./-]').search + + +def single_quote(s): + """Escape a string with single quotes in order to be parsed as a single element by shlex + + Parameters + ---------- + s : str + The string to quote + + Returns + ------- + str + The quoted string + """ + if not s: + return "''" + if find_unsafe(s) is None: + return s + + # use single quotes, and put single quotes into double quotes + # the string $'b is then quoted as '$'"'"'b' + return "'" + s.replace("'", "'\"'\"'") + "'" + + +def double_quote(s): + """Escape a string with double quotes in order to be parsed as a single element by shlex + + Parameters + ---------- + s : str + The string to quote + + Returns + ------- + str + The quoted string + """ + if not s: + return '""' + if find_unsafe(s) is None: + return s + + # use double quotes, and put double quotes into single quotes + # the string $"b is then quoted as "$"'"'"b" + return '"' + s.replace('"', '"\'"\'"') + '"' + + +def resolve_ctx(cli, prog_name, args): + """ + + Parameters + ---------- + cli : click.Command + The main click Command of the program + prog_name : str + The program name on the command line + args : [str] + The arguments already written by the user on the command line + + Returns + ------- + click.core.Context + A new context corresponding to the current command + """ + ctx = cli.make_context(prog_name, list(args), resilient_parsing=True) + while ctx.args + ctx.protected_args and isinstance(ctx.command, MultiCommand): + a = ctx.protected_args + ctx.args + cmd = ctx.command.get_command(ctx, a[0]) + if cmd is None: + return None + ctx = cmd.make_context(a[0], a[1:], parent=ctx, resilient_parsing=True) + return ctx + + +def split_args(line): + """Version of shlex.split that silently accept incomplete strings. + + Parameters + ---------- + line : str + The string to split + + Returns + ------- + [str] + The line split in separated arguments + """ + lex = shlex.shlex(line, posix=True) + lex.whitespace_split = True + lex.commenters = '' + res = [] + try: + while True: + res.append(next(lex)) + except ValueError: # No closing quotation + pass + except StopIteration: # End of loop + pass + if lex.token: + res.append(lex.token) + return res + + +def get_auto_shell(): + """Returns the current shell + + This feature depends on psutil and will not work if it is not available""" + return shellingham.detect_shell()[0] diff --git a/pipenv/vendor/click_completion/patch.py b/pipenv/vendor/click_completion/patch.py new file mode 100644 index 0000000000..ab351f45f5 --- /dev/null +++ b/pipenv/vendor/click_completion/patch.py @@ -0,0 +1,142 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- + +from __future__ import print_function, absolute_import + +import os +import sys + +import click +from click import echo + +from click_completion.core import do_bash_complete, do_fish_complete, do_zsh_complete, do_powershell_complete,\ + get_code, install, completion_configuration + +"""All the code used to monkey patch click""" + + +def param_type_complete(self, ctx, incomplete): + """Returns a set of possible completions values, along with their documentation string + + Default implementation of the complete method for click.types.ParamType just returns an empty list + + Parameters + ---------- + ctx : click.core.Context + The current context + incomplete : + The string to complete + + Returns + ------- + [(str, str)] + A list of completion results. The first element of each tuple is actually the argument to complete, the second + element is an help string for this argument. + """ + return [] + + +def choice_complete(self, ctx, incomplete): + """Returns the completion results for click.core.Choice + + Parameters + ---------- + ctx : click.core.Context + The current context + incomplete : + The string to complete + + Returns + ------- + [(str, str)] + A list of completion results + """ + return [ + (c, None) for c in self.choices + if completion_configuration.match_incomplete(c, incomplete) + ] + + +def multicommand_get_command_short_help(self, ctx, cmd_name): + """Returns the short help of a subcommand + + It allows MultiCommand subclasses to implement more efficient ways to provide the subcommand short help, for + example by leveraging some caching. + + Parameters + ---------- + ctx : click.core.Context + The current context + cmd_name : + The sub command name + + Returns + ------- + str + The sub command short help + """ + return self.get_command(ctx, cmd_name).short_help + + +def _shellcomplete(cli, prog_name, complete_var=None): + """Internal handler for the bash completion support. + + Parameters + ---------- + cli : click.Command + The main click Command of the program + prog_name : str + The program name on the command line + complete_var : str + The environment variable name used to control the completion behavior (Default value = None) + """ + if complete_var is None: + complete_var = '_%s_COMPLETE' % (prog_name.replace('-', '_')).upper() + complete_instr = os.environ.get(complete_var) + if not complete_instr: + return + + if complete_instr == 'source': + echo(get_code(prog_name=prog_name, env_name=complete_var)) + elif complete_instr == 'source-bash': + echo(get_code('bash', prog_name, complete_var)) + elif complete_instr == 'source-fish': + echo(get_code('fish', prog_name, complete_var)) + elif complete_instr == 'source-powershell': + echo(get_code('powershell', prog_name, complete_var)) + elif complete_instr == 'source-zsh': + echo(get_code('zsh', prog_name, complete_var)) + elif complete_instr in ['complete', 'complete-bash']: + # keep 'complete' for bash for backward compatibility + do_bash_complete(cli, prog_name) + elif complete_instr == 'complete-fish': + do_fish_complete(cli, prog_name) + elif complete_instr == 'complete-powershell': + do_powershell_complete(cli, prog_name) + elif complete_instr == 'complete-zsh': + do_zsh_complete(cli, prog_name) + elif complete_instr == 'install': + shell, path = install(prog_name=prog_name, env_name=complete_var) + click.echo('%s completion installed in %s' % (shell, path)) + elif complete_instr == 'install-bash': + shell, path = install(shell='bash', prog_name=prog_name, env_name=complete_var) + click.echo('%s completion installed in %s' % (shell, path)) + elif complete_instr == 'install-fish': + shell, path = install(shell='fish', prog_name=prog_name, env_name=complete_var) + click.echo('%s completion installed in %s' % (shell, path)) + elif complete_instr == 'install-zsh': + shell, path = install(shell='zsh', prog_name=prog_name, env_name=complete_var) + click.echo('%s completion installed in %s' % (shell, path)) + elif complete_instr == 'install-powershell': + shell, path = install(shell='powershell', prog_name=prog_name, env_name=complete_var) + click.echo('%s completion installed in %s' % (shell, path)) + sys.exit() + + +def patch(): + """Patch click""" + import click + click.types.ParamType.complete = param_type_complete + click.types.Choice.complete = choice_complete + click.core.MultiCommand.get_command_short_help = multicommand_get_command_short_help + click.core._bashcomplete = _shellcomplete diff --git a/pipenv/vendor/click_completion/zsh.j2 b/pipenv/vendor/click_completion/zsh.j2 index e8dd437d22..9e1024a81c 100644 --- a/pipenv/vendor/click_completion/zsh.j2 +++ b/pipenv/vendor/click_completion/zsh.j2 @@ -2,7 +2,7 @@ _{{prog_name}}() { eval $(env COMMANDLINE="${words[1,$CURRENT]}" {{complete_var}}=complete-zsh {% for k, v in extra_env.items() %} {{k}}={{v}}{% endfor %} {{prog_name}}) } -if [[ "$(basename ${(%):-%x})" != "_{{prog_name}}" ]]; then +if [[ "$(basename -- ${(%):-%x})" != "_{{prog_name}}" ]]; then autoload -U compinit && compinit compdef _{{prog_name}} {{prog_name}} fi From 3cb3f594a1a5780ceabcfb9d0310f786f58e6188 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 25 Aug 2018 13:15:20 -0400 Subject: [PATCH 09/26] Update shellingham to 1.24 Signed-off-by: Dan Ryan --- pipenv/vendor/shellingham/LICENSE | 13 +++ pipenv/vendor/shellingham/__init__.py | 2 +- pipenv/vendor/shellingham/_consts.py | 7 ++ pipenv/vendor/shellingham/nt.py | 89 +++++++++------------ pipenv/vendor/shellingham/posix.py | 56 +++++++++++++ pipenv/vendor/shellingham/posix/_default.py | 27 +++++++ pipenv/vendor/shellingham/posix/_proc.py | 41 ++++++++++ pipenv/vendor/shellingham/posix/_ps.py | 25 ++++++ pipenv/vendor/shellingham/posix/linux.py | 35 ++++++++ 9 files changed, 241 insertions(+), 54 deletions(-) create mode 100644 pipenv/vendor/shellingham/LICENSE create mode 100644 pipenv/vendor/shellingham/_consts.py create mode 100644 pipenv/vendor/shellingham/posix.py create mode 100644 pipenv/vendor/shellingham/posix/_default.py create mode 100644 pipenv/vendor/shellingham/posix/_proc.py create mode 100644 pipenv/vendor/shellingham/posix/_ps.py create mode 100644 pipenv/vendor/shellingham/posix/linux.py diff --git a/pipenv/vendor/shellingham/LICENSE b/pipenv/vendor/shellingham/LICENSE new file mode 100644 index 0000000000..b9077766e9 --- /dev/null +++ b/pipenv/vendor/shellingham/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2018, Tzu-ping Chung + +Permission to use, copy, modify, and distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/pipenv/vendor/shellingham/__init__.py b/pipenv/vendor/shellingham/__init__.py index 164f4593c7..a415c4a967 100644 --- a/pipenv/vendor/shellingham/__init__.py +++ b/pipenv/vendor/shellingham/__init__.py @@ -4,7 +4,7 @@ from ._core import ShellDetectionFailure -__version__ = '1.2.3.dev0' +__version__ = '1.2.4' def detect_shell(pid=None, max_depth=6): diff --git a/pipenv/vendor/shellingham/_consts.py b/pipenv/vendor/shellingham/_consts.py new file mode 100644 index 0000000000..2c788fecbe --- /dev/null +++ b/pipenv/vendor/shellingham/_consts.py @@ -0,0 +1,7 @@ +SHELL_NAMES = { + 'sh', 'bash', 'dash', # Bourne. + 'csh', 'tcsh', # C. + 'ksh', 'zsh', 'fish', # Common alternatives. + 'cmd', 'powershell', 'pwsh', # Microsoft. + 'elvish', 'xonsh', # More exotic. +} diff --git a/pipenv/vendor/shellingham/nt.py b/pipenv/vendor/shellingham/nt.py index 757d47b47a..7b3cc6b4dc 100644 --- a/pipenv/vendor/shellingham/nt.py +++ b/pipenv/vendor/shellingham/nt.py @@ -6,7 +6,7 @@ import sys from ctypes import ( - byref, sizeof, windll, Structure, WinError, POINTER, + byref, sizeof, windll, Structure, WinError, c_size_t, c_char, c_void_p ) from ctypes.wintypes import DWORD, LONG @@ -15,6 +15,8 @@ ERROR_NO_MORE_FILES = 18 +ERROR_INSUFFICIENT_BUFFER = 122 + INVALID_HANDLE_VALUE = c_void_p(-1).value @@ -39,64 +41,45 @@ class PROCESSENTRY32(Structure): ] -LPPROCESSENTRY32 = POINTER(PROCESSENTRY32) +def _iter_process(): + """Iterate through processes, yielding process ID and properties of each. + Example usage:: -def CreateToolhelp32Snapshot(dwFlags=2, th32ProcessID=0): - hSnapshot = windll.kernel32.CreateToolhelp32Snapshot( - dwFlags, - th32ProcessID + >>> for pid, info in _iter_process(): + ... print(pid, '->', info) + 1509 -> {'parent_pid': 1201, 'executable': 'python.exe'} + """ + # TODO: Process32{First,Next} does not return full executable path, only + # the name. To get the full path, Module32{First,Next} is needed, but that + # does not contain parent process information. We probably need to call + # BOTH to build the correct process tree. + h_process = windll.kernel32.CreateToolhelp32Snapshot( + 2, # dwFlags=TH32CS_SNAPPROCESS (include all processes). + 0, # th32ProcessID=0 (the current process). ) - if hSnapshot == INVALID_HANDLE_VALUE: + if h_process == INVALID_HANDLE_VALUE: raise WinError() - return hSnapshot - - -def Process32First(hSnapshot): pe = PROCESSENTRY32() pe.dwSize = sizeof(PROCESSENTRY32) - success = windll.kernel32.Process32First(hSnapshot, byref(pe)) - if not success: - if windll.kernel32.GetLastError() == ERROR_NO_MORE_FILES: - return - raise WinError() - return pe - - -def Process32Next(hSnapshot, pe=None): - if pe is None: - pe = PROCESSENTRY32() - pe.dwSize = sizeof(PROCESSENTRY32) - success = windll.kernel32.Process32Next(hSnapshot, byref(pe)) - if not success: - if windll.kernel32.GetLastError() == ERROR_NO_MORE_FILES: - return - raise WinError() - return pe - - -def get_all_processes(): - """Return a dictionary of properties about all processes. - >>> get_all_processes() - { - 1509: { - 'parent_pid': 1201, - 'executable': 'C:\\Program\\\\ Files\\Python36\\python.exe' - } - } - """ - h_process = CreateToolhelp32Snapshot() - pids = {} - pe = Process32First(h_process) - while pe: - pids[pe.th32ProcessID] = { - 'executable': str(pe.szExeFile.decode('utf-8')) - } + success = windll.kernel32.Process32First(h_process, byref(pe)) + while True: + if not success: + errcode = windll.kernel32.GetLastError() + if errcode == ERROR_NO_MORE_FILES: + # No more processes to iterate through, we're done here. + return + elif errcode == ERROR_INSUFFICIENT_BUFFER: + # This is likely because the file path is longer than the + # Windows limit. Just ignore it, it's likely not what we're + # looking for. We can fix this when it actually matters. (#8) + continue + raise WinError() + info = {'executable': str(pe.szExeFile.decode('utf-8'))} if pe.th32ParentProcessID: - pids[pe.th32ProcessID]['parent_pid'] = pe.th32ParentProcessID - pe = Process32Next(h_process, pe) - - return pids + info['parent_pid'] = pe.th32ParentProcessID + yield pe.th32ProcessID, info + success = windll.kernel32.Process32Next(h_process, byref(pe)) def _get_executable(process_dict): @@ -114,7 +97,7 @@ def get_shell(pid=None, max_depth=6): """ if not pid: pid = os.getpid() - processes = get_all_processes() + processes = dict(_iter_process()) def check_parent(pid, lvl=0): ppid = processes[pid].get('parent_pid') diff --git a/pipenv/vendor/shellingham/posix.py b/pipenv/vendor/shellingham/posix.py new file mode 100644 index 0000000000..b25dd87456 --- /dev/null +++ b/pipenv/vendor/shellingham/posix.py @@ -0,0 +1,56 @@ +import collections +import os +import shlex +import subprocess +import sys + +from ._consts import SHELL_NAMES + + +Process = collections.namedtuple('Process', 'args pid ppid') + + +def _get_process_mapping(): + """Try to look up the process tree via the output of `ps`. + """ + output = subprocess.check_output([ + 'ps', '-ww', '-o', 'pid=', '-o', 'ppid=', '-o', 'args=', + ]) + if not isinstance(output, str): + output = output.decode(sys.stdout.encoding) + processes = {} + for line in output.split('\n'): + try: + pid, ppid, args = line.strip().split(maxsplit=2) + except ValueError: + continue + processes[pid] = Process( + args=tuple(shlex.split(args)), pid=pid, ppid=ppid, + ) + return processes + + +def get_shell(pid=None, max_depth=6): + """Get the shell that the supplied pid or os.getpid() is running in. + """ + pid = str(pid or os.getpid()) + mapping = _get_process_mapping() + login_shell = os.environ.get('SHELL', '') + for _ in range(max_depth): + try: + proc = mapping[pid] + except KeyError: + break + name = os.path.basename(proc.args[0]).lower() + if name in SHELL_NAMES: + return (name, proc.args[0]) + elif proc.args[0].startswith('-'): + # This is the login shell. Use the SHELL environ if possible + # because it provides better information. + if login_shell: + name = login_shell.lower() + else: + name = proc.args[0][1:].lower() + return (os.path.basename(name), name) + pid = proc.ppid # Go up one level. + return None diff --git a/pipenv/vendor/shellingham/posix/_default.py b/pipenv/vendor/shellingham/posix/_default.py new file mode 100644 index 0000000000..8694427611 --- /dev/null +++ b/pipenv/vendor/shellingham/posix/_default.py @@ -0,0 +1,27 @@ +import collections +import shlex +import subprocess +import sys + + +Process = collections.namedtuple('Process', 'args pid ppid') + + +def get_process_mapping(): + """Try to look up the process tree via the output of `ps`. + """ + output = subprocess.check_output([ + 'ps', '-ww', '-o', 'pid=', '-o', 'ppid=', '-o', 'args=', + ]) + if not isinstance(output, str): + output = output.decode(sys.stdout.encoding) + processes = {} + for line in output.split('\n'): + try: + pid, ppid, args = line.strip().split(None, 2) + except ValueError: + continue + processes[pid] = Process( + args=tuple(shlex.split(args)), pid=pid, ppid=ppid, + ) + return processes diff --git a/pipenv/vendor/shellingham/posix/_proc.py b/pipenv/vendor/shellingham/posix/_proc.py new file mode 100644 index 0000000000..921f250819 --- /dev/null +++ b/pipenv/vendor/shellingham/posix/_proc.py @@ -0,0 +1,41 @@ +import os +import re + +from ._core import Process + + +STAT_PPID = 3 +STAT_TTY = 6 + +STAT_PATTERN = re.compile(r'\(.+\)|\S+') + + +def _get_stat(pid): + with open(os.path.join('/proc', str(pid), 'stat')) as f: + parts = STAT_PATTERN.findall(f.read()) + return parts[STAT_TTY], parts[STAT_PPID] + + +def _get_cmdline(pid): + with open(os.path.join('/proc', str(pid), 'cmdline')) as f: + return tuple(f.read().split('\0')[:-1]) + + +def get_process_mapping(): + """Try to look up the process tree via the /proc interface. + """ + self_tty = _get_stat(os.getpid())[0] + processes = {} + for pid in os.listdir('/proc'): + if not pid.isdigit(): + continue + try: + tty, ppid = _get_stat(pid) + if tty != self_tty: + continue + args = _get_cmdline(pid) + processes[pid] = Process(args=args, pid=pid, ppid=ppid) + except IOError: + # Process has disappeared - just ignore it. + continue + return processes diff --git a/pipenv/vendor/shellingham/posix/_ps.py b/pipenv/vendor/shellingham/posix/_ps.py new file mode 100644 index 0000000000..e96278cf5f --- /dev/null +++ b/pipenv/vendor/shellingham/posix/_ps.py @@ -0,0 +1,25 @@ +import shlex +import subprocess +import sys + +from ._core import Process + + +def get_process_mapping(): + """Try to look up the process tree via the output of `ps`. + """ + output = subprocess.check_output([ + 'ps', '-ww', '-o', 'pid=', '-o', 'ppid=', '-o', 'args=', + ]) + if not isinstance(output, str): + output = output.decode(sys.stdout.encoding) + processes = {} + for line in output.split('\n'): + try: + pid, ppid, args = line.strip().split(None, 2) + except ValueError: + continue + processes[pid] = Process( + args=tuple(shlex.split(args)), pid=pid, ppid=ppid, + ) + return processes diff --git a/pipenv/vendor/shellingham/posix/linux.py b/pipenv/vendor/shellingham/posix/linux.py new file mode 100644 index 0000000000..6db9783481 --- /dev/null +++ b/pipenv/vendor/shellingham/posix/linux.py @@ -0,0 +1,35 @@ +import os +import re + +from ._default import Process + + +STAT_PPID = 3 +STAT_TTY = 6 + + +def get_process_mapping(): + """Try to look up the process tree via Linux's /proc + """ + with open('/proc/{0}/stat'.format(os.getpid())) as f: + self_tty = f.read().split()[STAT_TTY] + processes = {} + for pid in os.listdir('/proc'): + if not pid.isdigit(): + continue + try: + stat = '/proc/{0}/stat'.format(pid) + cmdline = '/proc/{0}/cmdline'.format(pid) + with open(stat) as fstat, open(cmdline) as fcmdline: + stat = re.findall(r'\(.+\)|\S+', fstat.read()) + cmd = fcmdline.read().split('\x00')[:-1] + ppid = stat[STAT_PPID] + tty = stat[STAT_TTY] + if tty == self_tty: + processes[pid] = Process( + args=tuple(cmd), pid=pid, ppid=ppid, + ) + except IOError: + # Process has disappeared - just ignore it. + continue + return processes From b626a111502e73d34b4bef022d439a9061eaa7a7 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 25 Aug 2018 13:15:47 -0400 Subject: [PATCH 10/26] Update requirementslib to 1.1.2 Signed-off-by: Dan Ryan --- pipenv/vendor/requirementslib/LICENSE | 24 +- pipenv/vendor/requirementslib/LICENSE.APACHE | 177 ----- pipenv/vendor/requirementslib/LICENSE.BSD | 23 - pipenv/vendor/requirementslib/__init__.py | 2 +- pipenv/vendor/requirementslib/_compat.py | 56 -- .../requirementslib/_vendor/__init__.py | 1 - .../requirementslib/_vendor/pipfile/LICENSE | 3 - .../_vendor/pipfile/LICENSE.APACHE | 177 ----- .../_vendor/pipfile/LICENSE.BSD | 23 - .../_vendor/pipfile/__about__.py | 21 - .../_vendor/pipfile/__init__.py | 11 - .../requirementslib/_vendor/pipfile/api.py | 230 ------- pipenv/vendor/requirementslib/exceptions.py | 9 + .../vendor/requirementslib/models/__init__.py | 3 +- pipenv/vendor/requirementslib/models/cache.py | 266 +++++++ .../requirementslib/models/dependencies.py | 650 ++++++++++++++++++ .../vendor/requirementslib/models/lockfile.py | 82 ++- .../vendor/requirementslib/models/markers.py | 15 +- .../vendor/requirementslib/models/pipfile.py | 204 ++---- .../requirementslib/models/requirements.py | 425 +++++++++--- .../requirementslib/models/resolvers.py | 239 +++++++ pipenv/vendor/requirementslib/models/utils.py | 350 +++++++++- pipenv/vendor/requirementslib/models/vcs.py | 48 ++ pipenv/vendor/requirementslib/utils.py | 137 ++-- 24 files changed, 2038 insertions(+), 1138 deletions(-) delete mode 100644 pipenv/vendor/requirementslib/LICENSE.APACHE delete mode 100644 pipenv/vendor/requirementslib/LICENSE.BSD delete mode 100644 pipenv/vendor/requirementslib/_compat.py delete mode 100644 pipenv/vendor/requirementslib/_vendor/__init__.py delete mode 100644 pipenv/vendor/requirementslib/_vendor/pipfile/LICENSE delete mode 100644 pipenv/vendor/requirementslib/_vendor/pipfile/LICENSE.APACHE delete mode 100644 pipenv/vendor/requirementslib/_vendor/pipfile/LICENSE.BSD delete mode 100644 pipenv/vendor/requirementslib/_vendor/pipfile/__about__.py delete mode 100644 pipenv/vendor/requirementslib/_vendor/pipfile/__init__.py delete mode 100644 pipenv/vendor/requirementslib/_vendor/pipfile/api.py create mode 100644 pipenv/vendor/requirementslib/models/cache.py create mode 100644 pipenv/vendor/requirementslib/models/dependencies.py create mode 100644 pipenv/vendor/requirementslib/models/resolvers.py create mode 100644 pipenv/vendor/requirementslib/models/vcs.py diff --git a/pipenv/vendor/requirementslib/LICENSE b/pipenv/vendor/requirementslib/LICENSE index 6f62d44e4e..8c731e2798 100644 --- a/pipenv/vendor/requirementslib/LICENSE +++ b/pipenv/vendor/requirementslib/LICENSE @@ -1,3 +1,21 @@ -This software is made available under the terms of *either* of the licenses -found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made -under the terms of *both* these licenses. +The MIT License (MIT) + +Copyright 2018 Dan Ryan. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/pipenv/vendor/requirementslib/LICENSE.APACHE b/pipenv/vendor/requirementslib/LICENSE.APACHE deleted file mode 100644 index 4947287f7b..0000000000 --- a/pipenv/vendor/requirementslib/LICENSE.APACHE +++ /dev/null @@ -1,177 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS \ No newline at end of file diff --git a/pipenv/vendor/requirementslib/LICENSE.BSD b/pipenv/vendor/requirementslib/LICENSE.BSD deleted file mode 100644 index 698fc43e20..0000000000 --- a/pipenv/vendor/requirementslib/LICENSE.BSD +++ /dev/null @@ -1,23 +0,0 @@ -Copyright (c) Kenneth Reitz and individual contributors. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - 1. Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - - 2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/pipenv/vendor/requirementslib/__init__.py b/pipenv/vendor/requirementslib/__init__.py index eeb970c40d..faba3b44ce 100644 --- a/pipenv/vendor/requirementslib/__init__.py +++ b/pipenv/vendor/requirementslib/__init__.py @@ -1,5 +1,5 @@ # -*- coding=utf-8 -*- -__version__ = "1.0.11" +__version__ = '1.1.2' from .exceptions import RequirementError diff --git a/pipenv/vendor/requirementslib/_compat.py b/pipenv/vendor/requirementslib/_compat.py deleted file mode 100644 index 52b35565fe..0000000000 --- a/pipenv/vendor/requirementslib/_compat.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding=utf-8 -*- -import importlib -import six - -# Use these imports as compatibility imports -try: - from pathlib import Path -except ImportError: - from pathlib2 import Path - -try: - from urllib.parse import urlparse, unquote -except ImportError: - from urlparse import urlparse, unquote - -if six.PY2: - - class FileNotFoundError(IOError): - pass - - -else: - - class FileNotFoundError(FileNotFoundError): - pass - - -def do_import(module_path, subimport=None, old_path=None): - internal = "pip._internal.{0}".format(module_path) - old_path = old_path or module_path - pip9 = "pip.{0}".format(old_path) - try: - _tmp = importlib.import_module(internal) - except ImportError: - _tmp = importlib.import_module(pip9) - if subimport: - return getattr(_tmp, subimport, _tmp) - return _tmp - - -InstallRequirement = do_import("req.req_install", "InstallRequirement") -user_cache_dir = do_import("utils.appdirs", "user_cache_dir") -FAVORITE_HASH = do_import("utils.hashes", "FAVORITE_HASH") -is_file_url = do_import("download", "is_file_url") -url_to_path = do_import("download", "url_to_path") -path_to_url = do_import("download", "path_to_url") -is_archive_file = do_import("download", "is_archive_file") -_strip_extras = do_import("req.req_install", "_strip_extras") -Link = do_import("index", "Link") -Wheel = do_import("wheel", "Wheel") -is_installable_file = do_import("utils.misc", "is_installable_file", old_path="utils") -is_installable_dir = do_import("utils.misc", "is_installable_dir", old_path="utils") -make_abstract_dist = do_import( - "operations.prepare", "make_abstract_dist", old_path="req.req_set" -) -VcsSupport = do_import("vcs", "VcsSupport") diff --git a/pipenv/vendor/requirementslib/_vendor/__init__.py b/pipenv/vendor/requirementslib/_vendor/__init__.py deleted file mode 100644 index a6bcbda15d..0000000000 --- a/pipenv/vendor/requirementslib/_vendor/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# -*- coding=utf-8 -*- diff --git a/pipenv/vendor/requirementslib/_vendor/pipfile/LICENSE b/pipenv/vendor/requirementslib/_vendor/pipfile/LICENSE deleted file mode 100644 index 6f62d44e4e..0000000000 --- a/pipenv/vendor/requirementslib/_vendor/pipfile/LICENSE +++ /dev/null @@ -1,3 +0,0 @@ -This software is made available under the terms of *either* of the licenses -found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made -under the terms of *both* these licenses. diff --git a/pipenv/vendor/requirementslib/_vendor/pipfile/LICENSE.APACHE b/pipenv/vendor/requirementslib/_vendor/pipfile/LICENSE.APACHE deleted file mode 100644 index 4947287f7b..0000000000 --- a/pipenv/vendor/requirementslib/_vendor/pipfile/LICENSE.APACHE +++ /dev/null @@ -1,177 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS \ No newline at end of file diff --git a/pipenv/vendor/requirementslib/_vendor/pipfile/LICENSE.BSD b/pipenv/vendor/requirementslib/_vendor/pipfile/LICENSE.BSD deleted file mode 100644 index 698fc43e20..0000000000 --- a/pipenv/vendor/requirementslib/_vendor/pipfile/LICENSE.BSD +++ /dev/null @@ -1,23 +0,0 @@ -Copyright (c) Kenneth Reitz and individual contributors. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - 1. Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - - 2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/pipenv/vendor/requirementslib/_vendor/pipfile/__about__.py b/pipenv/vendor/requirementslib/_vendor/pipfile/__about__.py deleted file mode 100644 index 3ba7219186..0000000000 --- a/pipenv/vendor/requirementslib/_vendor/pipfile/__about__.py +++ /dev/null @@ -1,21 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -__all__ = [ - "__title__", "__summary__", "__uri__", "__version__", "__author__", - "__email__", "__license__", "__copyright__", -] - -__title__ = "pipfile" -__summary__ = "" -__uri__ = "https://github.com/pypa/pipfile" - -__version__ = "0.0.2" - -__author__ = "Kenneth Reitz and individual contributors" -__email__ = "me@kennethreitz.org" - -__license__ = "BSD or Apache License, Version 2.0" -__copyright__ = "Copyright 2017 %s" % __author__ diff --git a/pipenv/vendor/requirementslib/_vendor/pipfile/__init__.py b/pipenv/vendor/requirementslib/_vendor/pipfile/__init__.py deleted file mode 100644 index fddd4f90b9..0000000000 --- a/pipenv/vendor/requirementslib/_vendor/pipfile/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -from .__about__ import ( - __author__, __copyright__, __email__, __license__, __summary__, __title__, - __uri__, __version__ -) - -from .api import load, Pipfile diff --git a/pipenv/vendor/requirementslib/_vendor/pipfile/api.py b/pipenv/vendor/requirementslib/_vendor/pipfile/api.py deleted file mode 100644 index e8fa0277a3..0000000000 --- a/pipenv/vendor/requirementslib/_vendor/pipfile/api.py +++ /dev/null @@ -1,230 +0,0 @@ -import toml - -import codecs -import json -import hashlib -import platform -import six -import sys -import os - - -DEFAULT_SOURCE = { - u'url': u'https://pypi.org/simple', - u'verify_ssl': True, - u'name': u'pypi', -} - - -def format_full_version(info): - version = '{0.major}.{0.minor}.{0.micro}'.format(info) - kind = info.releaselevel - if kind != 'final': - version += kind[0] + str(info.serial) - return version - - -def walk_up(bottom): - """mimic os.walk, but walk 'up' instead of down the directory tree. - From: https://gist.github.com/zdavkeos/1098474 - """ - - bottom = os.path.realpath(bottom) - - # get files in current dir - try: - names = os.listdir(bottom) - except Exception: - return - - dirs, nondirs = [], [] - for name in names: - if os.path.isdir(os.path.join(bottom, name)): - dirs.append(name) - else: - nondirs.append(name) - - yield bottom, dirs, nondirs - - new_path = os.path.realpath(os.path.join(bottom, '..')) - - # see if we are at the top - if new_path == bottom: - return - - for x in walk_up(new_path): - yield x - - -class PipfileParser(object): - def __init__(self, filename='Pipfile'): - self.filename = filename - self.sources = [] - self.groups = { - 'default': [], - 'develop': [] - } - self.group_stack = ['default'] - self.requirements = [] - - def __repr__(self): - return '= packaging.version.parse('18'): + with RequirementTracker() as req_tracker: + preparer = preparer(req_tracker=req_tracker) + yield resolver(preparer=preparer) + else: + preparer = preparer() + yield resolver(preparer=preparer) + + +def get_grouped_dependencies(constraints): + # We need to track what contributed a specifierset + # as well as which specifiers were required by the root node + # in order to resolve any conflicts when we are deciding which thing to backtrack on + # then we take the loose match (which _is_ flexible) and start moving backwards in + # versions by popping them off of a stack and checking for the conflicting package + for _, ireqs in full_groupby(constraints, key=key_from_ireq): + ireqs = list(ireqs) + editable_ireq = first(ireqs, key=lambda ireq: ireq.editable) + if editable_ireq: + yield editable_ireq # ignore all the other specs: the editable one is the one that counts + continue + ireqs = iter(ireqs) + # deepcopy the accumulator so as to not modify the self.our_constraints invariant + combined_ireq = copy.deepcopy(next(ireqs)) + for ireq in ireqs: + # NOTE we may be losing some info on dropped reqs here + try: + combined_ireq.req.specifier &= ireq.req.specifier + except TypeError: + if ireq.req.specifier._specs and not combined_ireq.req.specifier._specs: + combined_ireq.req.specifier._specs = ireq.req.specifier._specs + combined_ireq.constraint &= ireq.constraint + if not combined_ireq.markers: + combined_ireq.markers = ireq.markers + else: + _markers = combined_ireq.markers._markers + if not isinstance(_markers[0], (tuple, list)): + combined_ireq.markers._markers = [ + _markers, + "and", + ireq.markers._markers, + ] + # Return a sorted, de-duped tuple of extras + combined_ireq.extras = tuple( + sorted(set(tuple(combined_ireq.extras) + tuple(ireq.extras))) + ) + yield combined_ireq diff --git a/pipenv/vendor/requirementslib/models/lockfile.py b/pipenv/vendor/requirementslib/models/lockfile.py index c04ae021fd..92055d6e2c 100644 --- a/pipenv/vendor/requirementslib/models/lockfile.py +++ b/pipenv/vendor/requirementslib/models/lockfile.py @@ -1,24 +1,54 @@ # -*- coding: utf-8 -*- from __future__ import absolute_import -import attr + import json +import os + +import plette.lockfiles +import six + +from vistir.compat import Path +from vistir.contextmanagers import atomic_open_for_write + from .requirements import Requirement -from .utils import optional_instance_of -from .._compat import Path, FileNotFoundError -@attr.s -class Lockfile(object): - dev_requirements = attr.ib(default=attr.Factory(list)) - requirements = attr.ib(default=attr.Factory(list)) - path = attr.ib(default=None, validator=optional_instance_of(Path)) - pipfile_hash = attr.ib(default=None) +DEFAULT_NEWLINES = u"\n" + + +def preferred_newlines(f): + if isinstance(f.newlines, six.text_type): + return f.newlines + return DEFAULT_NEWLINES + + +class Lockfile(plette.lockfiles.Lockfile): + def __init__(self, *args, **kwargs): + path = kwargs.pop("path", None) + self.requirements = kwargs.pop("requirements", []) + self.dev_requirements = kwargs.pop("dev_requirements", []) + self.path = Path(path) if path else None + self.newlines = u"\n" + super(Lockfile, self).__init__(*args, **kwargs) + + @classmethod + def load(cls, path): + if not path: + path = os.curdir + path = Path(path).absolute() + if path.is_dir(): + path = path / "Pipfile.lock" + elif path.name == "Pipfile": + path = path.parent / "Pipfile.lock" + if not path.exists(): + raise OSError("Path does not exist: %s" % path) + return cls.create(path.parent, lockfile_name=path.name) @classmethod def create(cls, project_path, lockfile_name="Pipfile.lock"): """Create a new lockfile instance - :param project_path: Path to the project root + :param project_path: Path to project root :type project_path: str or :class:`~pathlib.Path` :returns: List[:class:`~requirementslib.Requirement`] objects """ @@ -28,20 +58,30 @@ def create(cls, project_path, lockfile_name="Pipfile.lock"): lockfile_path = project_path / lockfile_name requirements = [] dev_requirements = [] - if not lockfile_path.exists(): - raise FileNotFoundError("No such lockfile: %s" % lockfile_path) - with lockfile_path.open(encoding="utf-8") as f: - lockfile = json.loads(f.read()) + lockfile = super(Lockfile, cls).load(f) + lockfile.newlines = preferred_newlines(f) for k in lockfile["develop"].keys(): - dev_requirements.append(Requirement.from_pipfile(k, lockfile["develop"][k])) + dev_requirements.append(Requirement.from_pipfile(k, lockfile.develop[k]._data)) for k in lockfile["default"].keys(): - requirements.append(Requirement.from_pipfile(k, lockfile["default"][k])) - return cls( - path=lockfile_path, - requirements=requirements, - dev_requirements=dev_requirements, - ) + requirements.append(Requirement.from_pipfile(k, lockfile.default[k]._data)) + lockfile.requirements = requirements + lockfile.dev_requirements = dev_requirements + lockfile.path = lockfile_path + return lockfile + + @property + def dev_requirements_list(self): + return [r.as_pipfile() for r in self.dev_requirements] + + @property + def requirements_list(self): + return [r.as_pipfile() for r in self.requirements] + + def write(self): + open_kwargs = {"newline": self.newlines} + with atomic_open_for_write(self.path.as_posix(), **open_kwargs) as f: + super(Lockfile, self).dump(f, encoding="utf-8") def as_requirements(self, include_hashes=False, dev=False): """Returns a list of requirements in pip-style format""" diff --git a/pipenv/vendor/requirementslib/models/markers.py b/pipenv/vendor/requirementslib/models/markers.py index 70adc21f8c..534978ed40 100644 --- a/pipenv/vendor/requirementslib/models/markers.py +++ b/pipenv/vendor/requirementslib/models/markers.py @@ -1,10 +1,11 @@ # -*- coding: utf-8 -*- import attr -import six -from packaging.markers import Marker, InvalidMarker -from .baserequirement import BaseRequirement -from .utils import validate_markers, filter_none + +from packaging.markers import InvalidMarker, Marker + from ..exceptions import RequirementError +from .baserequirement import BaseRequirement +from .utils import filter_none, validate_markers @attr.s @@ -84,10 +85,10 @@ def from_pipfile(cls, name, pipfile): markers = [] for marker in marker_strings: markers.append(marker) - marker = '' + combined_marker = None try: - marker = cls.make_marker(" and ".join(markers)) + combined_marker = cls.make_marker(" and ".join(markers)) except RequirementError: pass else: - return marker + return combined_marker diff --git a/pipenv/vendor/requirementslib/models/pipfile.py b/pipenv/vendor/requirementslib/models/pipfile.py index af67752a29..f952252498 100644 --- a/pipenv/vendor/requirementslib/models/pipfile.py +++ b/pipenv/vendor/requirementslib/models/pipfile.py @@ -1,100 +1,13 @@ # -*- coding: utf-8 -*- -import attr -import contoml import os -import toml -from .._vendor import pipfile -from .requirements import Requirement -from .utils import optional_instance_of, filter_none -from .._compat import Path, FileNotFoundError -from ..exceptions import RequirementError - - -@attr.s -class Source(object): - #: URL to PyPI instance - url = attr.ib(default="pypi") - #: If False, skip SSL checks - verify_ssl = attr.ib(default=True, validator=optional_instance_of(bool)) - #: human name to refer to this source (can be referenced in packages or dev-packages) - name = attr.ib(default="") - - def get_dict(self): - return attr.asdict(self) - - @property - def expanded(self): - source_dict = attr.asdict(self).copy() - source_dict["url"] = os.path.expandvars(source_dict.get("url")) - return source_dict - - -@attr.s -class Section(object): - ALLOWED_NAMES = ("packages", "dev-packages") - #: Name of the pipfile section - name = attr.ib(default="packages") - #: A list of requirements that are contained by the section - requirements = attr.ib(default=list) - - def get_dict(self): - _dict = {} - for req in self.requirements: - _dict.update(req.as_pipfile()) - return {self.name: _dict} - - @property - def vcs_requirements(self): - return [req for req in self.requirements if req.is_vcs] - - @property - def editable_requirements(self): - return [req for req in self.requirements if req.editable] - -@attr.s -class RequiresSection(object): - python_version = attr.ib(default=None) - python_full_version = attr.ib(default=None) - - def get_dict(self): - requires = attr.asdict(self, filter=filter_none) - if not requires: - return {} - return {"requires": requires} - - -@attr.s -class PipenvSection(object): - allow_prereleases = attr.ib(default=False) - - def get_dict(self): - if self.allow_prereleases: - return {"pipenv": attr.asdict(self)} - return {} +from vistir.compat import Path +from .requirements import Requirement +import plette.pipfiles -@attr.s -class Pipfile(object): - #: Path to the pipfile - path = attr.ib(default=None, converter=Path, validator=optional_instance_of(Path)) - #: Sources listed in the pipfile - sources = attr.ib(default=attr.Factory(list)) - #: Sections contained by the pipfile - sections = attr.ib(default=attr.Factory(list)) - #: Scripts found in the pipfile - scripts = attr.ib(default=attr.Factory(dict)) - #: This section stores information about what python version is required - requires = attr.ib(default=attr.Factory(RequiresSection)) - #: This section stores information about pipenv such as prerelease requirements - pipenv = attr.ib(default=attr.Factory(PipenvSection)) - #: This is the sha256 hash of the pipfile (without environment interpolation) - pipfile_hash = attr.ib() - @pipfile_hash.default - def get_hash(self): - p = pipfile.load(self.path.as_posix(), inject_env=False) - return p.hash +class Pipfile(plette.pipfiles.Pipfile): @property def requires_python(self): @@ -102,50 +15,7 @@ def requires_python(self): @property def allow_prereleases(self): - return self.pipenv.allow_prereleases - - def get_sources(self): - """Return a dictionary with a list of dictionaries of pipfile sources""" - _dict = {} - for src in self.sources: - _dict.update(src.get_dict()) - return {"source": _dict} if _dict else {} - - def get_sections(self): - """Return a dictionary with both pipfile sections and requirements""" - _dict = {} - for section in self.sections: - _dict.update(section.get_dict()) - return _dict - - def get_pipenv(self): - pipenv_dict = self.pipenv.get_dict() - if pipenv_dict: - return pipenv_dict - - def get_requires(self): - req_dict = self.requires.get_dict() - return req_dict if req_dict else {} - - def get_dict(self): - _dict = attr.asdict(self, recurse=False) - for k in ["path", "pipfile_hash", "sources", "sections", "requires", "pipenv"]: - if k in _dict: - _dict.pop(k) - return _dict - - def dump(self, to_dict=False): - """Dumps the pipfile to a toml string - """ - - _dict = self.get_sources() - _dict.update(self.get_sections()) - _dict.update(self.get_dict()) - _dict.update(self.get_pipenv()) - _dict.update(self.get_requires()) - if to_dict: - return _dict - return contoml.dumps(_dict) + return self.get("pipenv", {}).get("allow_prereleases", False) @classmethod def load(cls, path): @@ -156,34 +26,40 @@ def load(cls, path): raise FileNotFoundError("%s is not a valid project path!" % path) elif not pipfile_path.exists() or not pipfile_path.is_file(): raise RequirementError("%s is not a valid Pipfile" % pipfile_path) - pipfile_dict = toml.load(pipfile_path.as_posix()) - sections = [cls.get_section(pipfile_dict, s) for s in Section.ALLOWED_NAMES] - pipenv = pipfile_dict.get("pipenv", {}) - requires = pipfile_dict.get("requires", {}) - creation_dict = { - "path": pipfile_path, - "sources": [Source(**src) for src in pipfile_dict.get("source", [])], - "sections": sections, - "scripts": pipfile_dict.get("scripts"), - } - if requires: - creation_dict["requires"] = RequiresSection(**requires) - if pipenv: - creation_dict["pipenv"] = PipenvSection(**pipenv) - return cls(**creation_dict) + with pipfile_path.open(encoding="utf-8") as fp: + pipfile = super(Pipfile, cls).load(fp) + pipfile.dev_requirements = [ + Requirement.from_pipfile(k, v) for k, v in pipfile.dev_packages.items() + ] + pipfile.requirements = [ + Requirement.from_pipfile(k, v) for k, v in pipfile.packages.items() + ] + pipfile.path = pipfile_path + return pipfile + + # def resolve(self): + # It would be nice to still use this api someday + # option_sources = [s.expanded for s in self.sources] + # pip_args = [] + # if self.pipenv.allow_prereleases: + # pip_args.append('--pre') + # pip_options = get_pip_options(pip_args, sources=option_sources) + # finder = get_finder(sources=option_sources, pip_options=pip_options) + # resolver = DependencyResolver.create(finder=finder, allow_prereleases=self.pipenv.allow_prereleases) + # pkg_dict = {} + # for pkg in self.dev_packages.requirements + self.packages.requirements: + # pkg_dict[pkg.name] = pkg + # resolver.resolve(list(pkg_dict.values())) + # return resolver - @staticmethod - def get_section(pf_dict, section): - """Get section objects from a pipfile dictionary + @property + def dev_packages(self, as_requirements=True): + if as_requirements: + return self.dev_requirements + return self.dev_packages - :param pf_dict: A toml loaded pipfile dictionary - :type pf_dict: dict - :returns: Section objects - """ - sect = pf_dict.get(section) - requirements = [] - if section not in Section.ALLOWED_NAMES: - raise ValueError("Not a valid pipfile section name: %s" % section) - for name, pf_entry in sect.items(): - requirements.append(Requirement.from_pipfile(name, pf_entry)) - return Section(name=section, requirements=requirements) + @property + def packages(self, as_requirements=True): + if as_requirements: + return self.requirements + return self.packages diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py index ff3bba73f5..1baaa265ac 100644 --- a/pipenv/vendor/requirementslib/models/requirements.py +++ b/pipenv/vendor/requirementslib/models/requirements.py @@ -1,55 +1,46 @@ # -*- coding: utf-8 -*- from __future__ import absolute_import -import attr import collections import hashlib import os -import requirements + +import attr +import atexit from first import first +from packaging.markers import Marker +from packaging.specifiers import Specifier, SpecifierSet +from packaging.utils import canonicalize_name from six.moves.urllib import parse as urllib_parse +from six.moves.urllib.parse import unquote + +from pip_shims.shims import ( + InstallRequirement, Link, Wheel, _strip_extras, parse_version, path_to_url, + url_to_path +) +from vistir.compat import FileNotFoundError, Path, TemporaryDirectory +from vistir.misc import dedup +from vistir.path import get_converted_relative_path, is_valid_url, is_file_url, mkdir_p +from ..exceptions import RequirementError +from ..utils import VCS_LIST, is_vcs, is_installable_file from .baserequirement import BaseRequirement +from .dependencies import ( + AbstractDependency, find_all_matches, get_abstract_dependencies, + get_dependencies, get_finder +) from .markers import PipenvMarkers from .utils import ( - HASH_STRING, - extras_to_string, - get_version, - specs_to_string, - validate_specifiers, - validate_path, - validate_vcs, - build_vcs_link, - add_ssh_scheme_to_git_uri, - strip_ssh_from_git_uri, - split_vcs_method_from_uri, - filter_none, - optional_instance_of, - split_markers_from_line, - parse_extras, -) -from .._compat import ( - Link, - path_to_url, - url_to_path, - _strip_extras, - InstallRequirement, - Path, - urlparse, - unquote, - Wheel, - FileNotFoundError, -) -from ..exceptions import RequirementError -from ..utils import ( - VCS_LIST, - is_installable_file, - is_vcs, - is_valid_url, - pep423_name, - get_converted_relative_path, + HASH_STRING, add_ssh_scheme_to_git_uri, build_vcs_link, filter_none, + format_requirement, get_version, init_requirement, + is_pinned_requirement, make_install_requirement, optional_instance_of, + parse_extras, specs_to_string, split_markers_from_line, + split_vcs_method_from_uri, strip_ssh_from_git_uri, validate_path, + validate_specifiers, validate_vcs, extras_to_string ) +from .vcs import VCSRepository +from packaging.requirements import Requirement as PackagingRequirement @attr.s @@ -58,25 +49,31 @@ class NamedRequirement(BaseRequirement): version = attr.ib(validator=attr.validators.optional(validate_specifiers)) req = attr.ib() extras = attr.ib(default=attr.Factory(list)) + editable = attr.ib(default=False) @req.default def get_requirement(self): - from pkg_resources import RequirementParseError - try: - req = first(requirements.parse("{0}{1}".format(self.name, self.version))) - except RequirementParseError: - raise RequirementError( - "Error parsing requirement: %s%s" % (self.name, self.version) - ) + req = init_requirement("{0}{1}".format(canonicalize_name(self.name), self.version)) return req @classmethod def from_line(cls, line): - req = first(requirements.parse(line)) + req = init_requirement(line) specifiers = None if req.specifier: - specifiers = specs_to_string(req.specs) - return cls(name=req.name, version=specifiers, req=req) + specifiers = specs_to_string(req.specifier) + req.line = line + name = getattr(req, "name", None) + if not name: + name = getattr(req, "project_name", None) + req.name = name + if not name: + name = getattr(req, "key", line) + req.name = name + extras = None + if req.extras: + extras = list(req.extras) + return cls(name=name, version=specifiers, req=req, extras=extras) @classmethod def from_pipfile(cls, name, pipfile): @@ -85,13 +82,17 @@ def from_pipfile(cls, name, pipfile): creation_args = {k: v for k, v in pipfile.items() if k in cls.attr_fields()} creation_args["name"] = name version = get_version(pipfile) + extras = creation_args.get("extras", None) creation_args["version"] = version - creation_args["req"] = first(requirements.parse("{0}{1}".format(name, version))) + req = init_requirement("{0}{1}".format(name, version)) + if extras: + req.extras += tuple(extras,) + creation_args["req"] = req return cls(**creation_args) @property def line_part(self): - return "{self.name}".format(self=self) + return "{0}".format(canonicalize_name(self.name)) @property def pipfile_part(self): @@ -243,17 +244,17 @@ def get_name(self): and self.setup_path and self.setup_path.exists() ): - from distutils.core import run_setup + from setuptools.dist import distutils old_curdir = os.path.abspath(os.getcwd()) try: os.chdir(str(self.setup_path.parent)) - dist = run_setup(self.setup_path.as_posix(), stop_after="init") + dist = distutils.core.run_setup(self.setup_path.as_posix()) name = dist.get_name() except (FileNotFoundError, IOError) as e: dist = None except Exception as e: - from .._compat import InstallRequirement, make_abstract_dist + from pip_shims.shims import InstallRequirement, make_abstract_dist try: if not isinstance(Path, self.path): @@ -289,14 +290,18 @@ def get_link(self): @req.default def get_requirement(self): - prefix = "-e " if self.editable else "" - line = "{0}{1}".format(prefix, self.link.url) - req = first(requirements.parse(line)) + req = init_requirement(canonicalize_name(self.name)) + req.editable = False + req.line = self.link.url_without_fragment if self.path and self.link and self.link.scheme.startswith("file"): req.local_file = True req.path = self.path - req.uri = None + req.url = None self._uri_scheme = "file" + else: + req.local_file = False + req.path = None + req.url = self.link.url_without_fragment if self.editable: req.editable = True req.link = self.link @@ -330,7 +335,7 @@ def from_line(cls, line): editable = line.startswith("-e ") line = line.split(" ", 1)[1] if editable else line setup_path = None - if not any([is_installable_file(line), is_valid_url(line)]): + if not any([is_installable_file(line), is_valid_url(line), is_file_url(line)]): raise RequirementError( "Supplied requirement is not installable: {0!r}".format(line) ) @@ -473,17 +478,6 @@ class VCSRequirement(FileRequirement): name = attr.ib() link = attr.ib() req = attr.ib() - _INCLUDE_FIELDS = ( - "editable", - "uri", - "path", - "vcs", - "ref", - "subdirectory", - "name", - "link", - "req", - ) def __attrs_post_init__(self): split = urllib_parse.urlsplit(self.uri) @@ -522,16 +516,66 @@ def vcs_uri(self): uri = "{0}+{1}".format(self.vcs, uri) return uri + def get_commit_hash(self, src_dir=None): + src_dir = os.environ.get('SRC_DIR', None) if not src_dir else src_dir + if not src_dir: + _src_dir = TemporaryDirectory() + atexit.register(_src_dir.cleanup) + src_dir = _src_dir.name + checkout_dir = Path(src_dir).joinpath(self.name).as_posix() + vcsrepo = VCSRepository( + url=self.link.url, + name=self.name, + ref=self.ref if self.ref else None, + checkout_directory=checkout_dir, + vcs_type=self.vcs + ) + vcsrepo.obtain() + return vcsrepo.get_commit_hash() + + def update_repo(self, src_dir=None, ref=None): + src_dir = os.environ.get('SRC_DIR', None) if not src_dir else src_dir + if not src_dir: + _src_dir = TemporaryDirectory() + atexit.register(_src_dir.cleanup) + src_dir = _src_dir.name + checkout_dir = Path(src_dir).joinpath(self.name).as_posix() + ref = self.ref if not ref else ref + vcsrepo = VCSRepository( + url=self.link.url, + name=self.name, + ref=ref if ref else None, + checkout_directory=checkout_dir, + vcs_type=self.vcs + ) + if not os.path.exists(checkout_dir): + vcsrepo.obtain() + else: + vcsrepo.update() + return vcsrepo.get_commit_hash() + @req.default def get_requirement(self): - prefix = "-e " if self.editable else "" - line = "{0}{1}".format(prefix, self.link.url) - req = first(requirements.parse(line)) + name = self.name or self.link.egg_fragment + url = self.uri or self.link.url_without_fragment + if not name: + raise ValueError( + "pipenv requires an #egg fragment for version controlled " + "dependencies. Please install remote dependency " + "in the form {0}#egg=.".format(url) + ) + req = init_requirement(canonicalize_name(self.name)) + req.editable = self.editable + req.url = self.uri + req.line = self.link.url + if self.ref: + req.revision = self.ref + if self.extras: + req.extras = self.extras + req.vcs = self.vcs if self.path and self.link and self.link.scheme.startswith("file"): req.local_file = True req.path = self.path - if self.editable: - req.editable = True req.link = self.link if ( self.uri != unquote(self.link.url_without_fragment) @@ -539,19 +583,7 @@ def get_requirement(self): and "git+git@" in self.uri ): req.line = self.uri - req.uri = self.uri - if not req.name: - raise ValueError( - "pipenv requires an #egg fragment for version controlled " - "dependencies. Please install remote dependency " - "in the form {0}#egg=.".format(req.uri) - ) - if self.vcs and not req.vcs: - req.vcs = self.vcs - if self.ref and not req.revision: - req.revision = self.ref - if self.extras and not req.extras: - req.extras = self.extras + req.url = self.uri return req @classmethod @@ -564,6 +596,10 @@ def from_pipfile(cls, name, pipfile): if k in pipfile ] for key in pipfile_keys: + if key == "extras": + extras = pipfile.get(key, None) + if extras: + pipfile[key] = sorted(dedup([extra.lower() for extra in extras])) if key in VCS_LIST: creation_args["vcs"] = key composed_uri = add_ssh_scheme_to_git_uri( @@ -612,8 +648,12 @@ def from_line(cls, line, editable=None, extras=None): def line_part(self): """requirements.txt compatible line part sans-extras""" if self.req: - return self.req.line - base = "{0}".format(self.link) + base = self.req.line + if base and self.extras and not extras_to_string(self.extras) in base: + if self.subdirectory: + base = "{0}".format(self.get_link().url) + else: + base = "{0}{1}".format(base, extras_to_string(sorted(self.extras))) if self.editable: base = "-e {0}".format(base) return base @@ -650,8 +690,8 @@ class Requirement(object): editable = attr.ib(default=None) hashes = attr.ib(default=attr.Factory(list), converter=list) extras = attr.ib(default=attr.Factory(list)) + abstract_dep = attr.ib(default=None) _ireq = None - _INCLUDE_FIELDS = ("name", "markers", "index", "editable", "hashes", "extras") @name.default def get_name(self): @@ -678,14 +718,20 @@ def markers_as_pip(self): @property def extras_as_pip(self): if self.extras: - return "[{0}]".format(",".join(self.extras)) + return "[{0}]".format(",".join(sorted([extra.lower() for extra in self.extras]))) return "" + @property + def commit_hash(self): + if not self.is_vcs: + return None + return self.req.get_commit_hash() + @specifiers.default def get_specifiers(self): if self.req and self.req.req.specifier: - return specs_to_string(self.req.req.specs) + return specs_to_string(self.req.req.specifier) return @property @@ -702,10 +748,15 @@ def is_named(self): @property def normalized_name(self): - return pep423_name(self.name) + return canonicalize_name(self.name) + + def copy(self): + return attr.evolve(self) @classmethod def from_line(cls, line): + if isinstance(line, InstallRequirement): + line = format_requirement(line) hashes = None if "--hash=" in line: hashes = line.split(" --hash=") @@ -714,6 +765,7 @@ def from_line(cls, line): line = line.split(" ", 1)[1] if editable else line line, markers = split_markers_from_line(line) line, extras = _strip_extras(line) + specifiers = '' if extras: extras = parse_extras(extras) line = line.strip('"').strip("'").strip() @@ -721,9 +773,10 @@ def from_line(cls, line): vcs = None # Installable local files and installable non-vcs urls are handled # as files, generally speaking - if is_installable_file(line) or (is_valid_url(line) and not is_vcs(line)): + line_is_vcs = is_vcs(line) + if is_installable_file(line) or ((is_file_url(line) or is_valid_url(line)) and not line_is_vcs): r = FileRequirement.from_line(line_with_prefix) - elif is_vcs(line): + elif line_is_vcs: r = VCSRequirement.from_line(line_with_prefix, extras=extras) vcs = r.vcs elif line == "." and not is_installable_file(line): @@ -739,6 +792,7 @@ def from_line(cls, line): spec_idx = min((line.index(match) for match in spec_matches)) name = line[:spec_idx] version = line[spec_idx:] + specifiers = version if not extras: name, extras = _strip_extras(name) if extras: @@ -746,8 +800,19 @@ def from_line(cls, line): if version: name = "{0}{1}".format(name, version) r = NamedRequirement.from_line(line) + req_markers = None if markers: - r.req.markers = markers + req_markers = PackagingRequirement("fakepkg; {0}".format(markers)) + r.req.marker = getattr(req_markers, "marker", None) + r.req.local_file = getattr(r.req, "local_file", False) + name = getattr(r.req, "name", None) + if not name: + name = getattr(r.req, "project_name", None) + r.req.name = name + if not name: + name = getattr(r.req, "key", None) + if name: + r.req.name = name args = { "name": r.name, "vcs": vcs, @@ -756,15 +821,26 @@ def from_line(cls, line): "editable": editable, } if extras: + extras = sorted(dedup([extra.lower() for extra in extras])) args["extras"] = extras r.req.extras = extras r.extras = extras elif r.extras: - args["extras"] = r.extras + args["extras"] = sorted(dedup([extra.lower() for extra in r.extras])) if hashes: args["hashes"] = hashes return cls(**args) + @classmethod + def from_ireq(cls, ireq): + return cls.from_line(format_requirement(ireq)) + + @classmethod + def from_metadata(cls, name, version, extras, markers): + return cls.from_ireq(make_install_requirement( + name, version, extras=extras, markers=markers, + )) + @classmethod def from_pipfile(cls, name, pipfile): _pipfile = {} @@ -780,8 +856,14 @@ def from_pipfile(cls, name, pipfile): else: r = NamedRequirement.from_pipfile(name, pipfile) markers = PipenvMarkers.from_pipfile(name, _pipfile) + req_markers = None if markers: markers = str(markers) + req_markers = PackagingRequirement("fakepkg; {0}".format(markers)) + r.req.marker = getattr(req_markers, "marker", None) + r.req.specifier = SpecifierSet(_pipfile["version"]) + extras = _pipfile.get("extras") + r.req.extras = sorted(dedup([extra.lower() for extra in extras])) if extras else [] args = { "name": r.name, "vcs": vcs, @@ -793,9 +875,12 @@ def from_pipfile(cls, name, pipfile): } if any(key in _pipfile for key in ["hash", "hashes"]): args["hashes"] = _pipfile.get("hashes", [pipfile.get("hash")]) - return cls(**args) + cls_inst = cls(**args) + if cls_inst.is_named: + cls_inst.req.req.line = cls_inst.as_line() + return cls_inst - def as_line(self, sources=None): + def as_line(self, sources=None, include_hashes=True, include_extras=True): """Format this requirement as a line in requirements.txt. If `sources` provided, it should be an sequence of mappings, containing @@ -804,22 +889,52 @@ def as_line(self, sources=None): If `sources` is omitted or falsy, no index information will be included in the requirement line. """ - line = "{0}{1}{2}{3}{4}".format( + if self.is_vcs: + include_extras = False + parts = [ self.req.line_part, - self.extras_as_pip if not self.is_vcs else "", + self.extras_as_pip if include_extras else "", self.specifiers if self.specifiers else "", self.markers_as_pip, - self.hashes_as_pip, - ) + ] + if include_hashes: + parts.append(self.hashes_as_pip) if sources and not (self.requirement.local_file or self.vcs): from ..utils import prepare_pip_source_args if self.index: sources = [s for s in sources if s.get("name") == self.index] index_string = " ".join(prepare_pip_source_args(sources)) - line = "{0} {1}".format(line, index_string) + parts.extend([" ", index_string]) + line = "".join(parts) return line + def get_markers(self): + markers = self.markers + if markers: + fake_pkg = PackagingRequirement('fakepkg; {0}'.format(markers)) + markers = fake_pkg.markers + return markers + + def get_specifier(self): + return Specifier(self.specifiers) + + def get_version(self): + return parse_version(self.get_specifier().version) + + def get_requirement(self): + req_line = self.req.req.line + if req_line.startswith('-e '): + _, req_line = req_line.split(" ", 1) + req = init_requirement(self.name) + req.line = req_line + req.specifier = SpecifierSet(self.specifiers if self.specifiers else '') + if self.is_vcs or self.is_file_or_url: + req.url = self.req.link.url_without_fragment + req.marker = self.get_markers() + req.extras = set(self.extras) if self.extras else set() + return req + @property def constraint_line(self): return self.as_line() @@ -839,6 +954,8 @@ def as_pipfile(self): if k in good_keys } name = self.name + if 'markers' in req_dict and req_dict['markers']: + req_dict['markers'] = req_dict['markers'].replace('"', "'") base_dict = { k: v for k, v in self.req.pipfile_part[name].items() @@ -850,23 +967,103 @@ def as_pipfile(self): conflicts = [k for k in (conflicting_keys[1:],) if k in base_dict] for k in conflicts: base_dict.pop(k) - if "hashes" in base_dict and len(base_dict["hashes"]) == 1: - base_dict["hash"] = base_dict.pop("hashes")[0] + if "hashes" in base_dict: + _hashes = base_dict.pop("hashes") + hashes = [] + for _hash in _hashes: + try: + hashes.append(_hash.as_line()) + except AttributeError: + hashes.append(_hash) + base_dict["hashes"] = sorted(hashes) if len(base_dict.keys()) == 1 and "version" in base_dict: base_dict = base_dict.get("version") return {name: base_dict} + def as_ireq(self): + ireq_line = self.as_line(include_hashes=False) + if self.editable or self.req.editable: + if ireq_line.startswith("-e "): + ireq_line = ireq_line[len("-e "):] + ireq = InstallRequirement.from_editable(ireq_line) + else: + ireq = InstallRequirement.from_line(ireq_line) + if not getattr(ireq, "req", None): + ireq.req = self.req.req + else: + ireq.req.extras = self.req.req.extras + ireq.req.marker = self.req.req.marker + return ireq + @property def pipfile_entry(self): return self.as_pipfile().copy().popitem() @property def ireq(self): - if not self._ireq: - ireq_line = self.as_line() - if ireq_line.startswith("-e "): - ireq_line = ireq_line[len("-e ") :] - self._ireq = InstallRequirement.from_editable(ireq_line) - else: - self._ireq = InstallRequirement.from_line(ireq_line) - return self._ireq + return self.as_ireq() + + def get_dependencies(self, sources=None): + """Retrieve the dependencies of the current requirement. + + Retrieves dependencies of the current requirement. This only works on pinned + requirements. + + :param sources: Pipfile-formatted sources, defaults to None + :param sources: list[dict], optional + :return: A set of requirement strings of the dependencies of this requirement. + :rtype: set(str) + """ + if not sources: + sources = [{ + 'name': 'pypi', + 'url': 'https://pypi.org/simple', + 'verify_ssl': True, + }] + return get_dependencies(self.as_ireq(), sources=sources) + + def get_abstract_dependencies(self, sources=None): + """Retrieve the abstract dependencies of this requirement. + + Returns the abstract dependencies of the current requirement in order to resolve. + + :param sources: A list of sources (pipfile format), defaults to None + :param sources: list, optional + :return: A list of abstract (unpinned) dependencies + :rtype: list[ :class:`~requirementslib.models.dependency.AbstractDependency` ] + """ + + if not self.abstract_dep: + parent = getattr(self, 'parent', None) + self.abstract_dep = AbstractDependency.from_requirement(self, parent=parent) + if not sources: + sources = [{'url': 'https://pypi.org/simple', 'name': 'pypi', 'verify_ssl': True},] + if is_pinned_requirement(self.ireq): + deps = self.get_dependencies() + else: + ireq = sorted(self.find_all_matches(), key=lambda k: k.version) + deps = get_dependencies(ireq.pop(), sources=sources) + return get_abstract_dependencies(deps, sources=sources, parent=self.abstract_dep) + + def find_all_matches(self, sources=None, finder=None): + """Find all matching candidates for the current requirement. + + Consults a finder to find all matching candidates. + + :param sources: Pipfile-formatted sources, defaults to None + :param sources: list[dict], optional + :return: A list of Installation Candidates + :rtype: list[ :class:`~pip._internal.index.InstallationCandidate` ] + """ + if not finder: + finder = get_finder(sources=sources) + return find_all_matches(finder, self.as_ireq()) + + def merge_markers(self, markers): + if not isinstance(markers, Marker): + markers = Marker(markers) + _markers = set(Marker(self.ireq.markers)) if self.ireq.markers else set(markers) + _markers.add(markers) + new_markers = Marker(" or ".join([str(m) for m in sorted(_markers)])) + self.markers = str(new_markers) + self.req.req.marker = new_markers diff --git a/pipenv/vendor/requirementslib/models/resolvers.py b/pipenv/vendor/requirementslib/models/resolvers.py new file mode 100644 index 0000000000..da6d0dda5f --- /dev/null +++ b/pipenv/vendor/requirementslib/models/resolvers.py @@ -0,0 +1,239 @@ +# -*- coding=utf-8 -*- +from contextlib import contextmanager + +import attr +import six + +from pip_shims.shims import VcsSupport, Wheel + +from ..utils import log +from .cache import HashCache +from .dependencies import AbstractDependency, find_all_matches, get_finder +from .utils import format_requirement, is_pinned_requirement, version_from_ireq + + +class ResolutionError(Exception): + pass + + +@attr.s +class DependencyResolver(object): + pinned_deps = attr.ib(default=attr.Factory(dict)) + #: A dictionary of abstract dependencies by name + dep_dict = attr.ib(default=attr.Factory(dict)) + #: A dictionary of sets of version numbers that are valid for a candidate currently + candidate_dict = attr.ib(default=attr.Factory(dict)) + #: A historical record of pins + pin_history = attr.ib(default=attr.Factory(dict)) + #: Whether to allow prerelease dependencies + allow_prereleases = attr.ib(default=False) + #: Stores hashes for each dependency + hashes = attr.ib(default=attr.Factory(dict)) + #: A hash cache + hash_cache = attr.ib(default=attr.Factory(HashCache)) + #: A finder for searching the index + finder = attr.ib(default=None) + #: Whether to include hashes even from incompatible wheels + include_incompatible_hashes = attr.ib(default=True) + #: A cache for storing available canddiates when using all wheels + _available_candidates_cache = attr.ib(default=attr.Factory(dict)) + + @classmethod + def create(cls, finder=None, allow_prereleases=False, get_all_hashes=True): + if not finder: + finder_args = [] + if allow_prereleases: + finder_args.append('--pre') + finder = get_finder(*finder_args) + creation_kwargs = { + 'allow_prereleases': allow_prereleases, + 'include_incompatible_hashes': get_all_hashes, + 'finder': finder, + 'hash_cache': HashCache(), + } + resolver = cls(**creation_kwargs) + return resolver + + @property + def dependencies(self): + return list(self.dep_dict.values()) + + @property + def resolution(self): + return list(self.pinned_deps.values()) + + def add_abstract_dep(self, dep): + """Add an abstract dependency by either creating a new entry or + merging with an old one. + + :param dep: An abstract dependency to add + :type dep: :class:`~requirementslib.models.dependency.AbstractDependency` + :raises ResolutionError: Raised when the given dependency is not compatible with + an existing abstract dependency. + """ + + if dep.name in self.dep_dict: + compatible_versions = self.dep_dict[dep.name].compatible_versions(dep) + if compatible_versions: + self.candidate_dict[dep.name] = compatible_versions + self.dep_dict[dep.name] = self.dep_dict[ + dep.name + ].compatible_abstract_dep(dep) + else: + raise ResolutionError + else: + self.candidate_dict[dep.name] = dep.version_set + self.dep_dict[dep.name] = dep + + def pin_deps(self): + """Pins the current abstract dependencies and adds them to the history dict. + + Adds any new dependencies to the abstract dependencies already present by + merging them together to form new, compatible abstract dependencies. + """ + + for name in list(self.dep_dict.keys()): + candidates = self.dep_dict[name].candidates[:] + abs_dep = self.dep_dict[name] + while candidates: + pin = candidates.pop() + # Move on from existing pins if the new pin isn't compatible + if name in self.pinned_deps: + if self.pinned_deps[name].editable: + continue + old_version = version_from_ireq(self.pinned_deps[name]) + if not pin.editable: + new_version = version_from_ireq(pin) + if (new_version != old_version and + new_version not in self.candidate_dict[name]): + continue + pin.parent = abs_dep.parent + pin_subdeps = self.dep_dict[name].get_deps(pin) + backup = self.dep_dict.copy(), self.candidate_dict.copy() + try: + for pin_dep in pin_subdeps: + self.add_abstract_dep(pin_dep) + except ResolutionError: + self.dep_dict, self.candidate_dict = backup + continue + else: + self.pinned_deps[name] = pin + break + + def resolve(self, root_nodes, max_rounds=20): + """Resolves dependencies using a backtracking resolver and multiple endpoints. + + Note: this resolver caches aggressively. + Runs for *max_rounds* or until any two pinning rounds yield the same outcome. + + :param root_nodes: A list of the root requirements. + :type root_nodes: list[:class:`~requirementslib.models.requirements.Requirement`] + :param max_rounds: The max number of resolution rounds, defaults to 20 + :param max_rounds: int, optional + :raises RuntimeError: Raised when max rounds is exceeded without a resolution. + """ + if self.dep_dict: + raise RuntimeError("Do not use the same resolver more than once") + + if not self.hash_cache: + self.hash_cache = HashCache() + + # Coerce input into AbstractDependency instances. + # We accept str, Requirement, and AbstractDependency as input. + for dep in root_nodes: + if isinstance(dep, six.string_types): + dep = AbstractDependency.from_string(dep) + elif not isinstance(dep, AbstractDependency): + dep = AbstractDependency.from_requirement(dep) + self.add_abstract_dep(dep) + + for round_ in range(max_rounds): + self.pin_deps() + self.pin_history[round_] = self.pinned_deps.copy() + + if round_ > 0: + previous_round = set(self.pin_history[round_ - 1].values()) + current_values = set(self.pin_history[round_].values()) + difference = current_values - previous_round + else: + difference = set(self.pin_history[round_].values()) + + log.debug("\n") + log.debug("{:=^30}".format(" Round {0} ".format(round_))) + log.debug("\n") + if difference: + log.debug("New Packages: ") + for d in difference: + log.debug("{:>30}".format(format_requirement(d))) + elif round_ >= 3: + log.debug("Stable Pins: ") + for d in current_values: + log.debug("{:>30}".format(format_requirement(d))) + return + else: + log.debug("No New Packages.") + # TODO: Raise a better error. + raise RuntimeError("cannot resolve after {} rounds".format(max_rounds)) + + def get_hashes(self): + for dep in self.pinned_deps.values(): + if dep.name not in self.hashes: + self.hashes[dep.name] = self.get_hashes_for_one(dep) + return self.hashes.copy() + + def get_hashes_for_one(self, ireq): + if not self.finder: + finder_args = [] + if self.allow_prereleases: + finder_args.append('--pre') + self.finder = get_finder(*finder_args) + + if ireq.editable: + return set() + + vcs = VcsSupport() + if ireq.link and ireq.link.scheme in vcs.all_schemes and 'ssh' in ireq.link.scheme: + return set() + + if not is_pinned_requirement(ireq): + raise TypeError( + "Expected pinned requirement, got {}".format(ireq)) + + matching_candidates = set() + with self.allow_all_wheels(): + matching_candidates = ( + find_all_matches(self.finder, ireq, pre=self.allow_prereleases) + ) + + return { + self.hash_cache.get_hash(candidate.location) + for candidate in matching_candidates + } + + @contextmanager + def allow_all_wheels(self): + """ + Monkey patches pip.Wheel to allow wheels from all platforms and Python versions. + + This also saves the candidate cache and set a new one, or else the results from the + previous non-patched calls will interfere. + """ + def _wheel_supported(self, tags=None): + # Ignore current platform. Support everything. + return True + + def _wheel_support_index_min(self, tags=None): + # All wheels are equal priority for sorting. + return 0 + + original_wheel_supported = Wheel.supported + original_support_index_min = Wheel.support_index_min + + Wheel.supported = _wheel_supported + Wheel.support_index_min = _wheel_support_index_min + + try: + yield + finally: + Wheel.supported = original_wheel_supported + Wheel.support_index_min = original_support_index_min diff --git a/pipenv/vendor/requirementslib/models/utils.py b/pipenv/vendor/requirementslib/models/utils.py index 44692399c7..6fd55b6ff2 100644 --- a/pipenv/vendor/requirementslib/models/utils.py +++ b/pipenv/vendor/requirementslib/models/utils.py @@ -1,10 +1,26 @@ # -*- coding: utf-8 -*- from __future__ import absolute_import + import os +import sys + +from collections import defaultdict +from itertools import chain, groupby +from operator import attrgetter + import six + from attr import validators from first import first -from .._compat import Link +from packaging.markers import InvalidMarker, Marker, Op, Value, Variable +from packaging.specifiers import InvalidSpecifier, Specifier, SpecifierSet +from packaging.version import parse as parse_version +from packaging.requirements import Requirement as PackagingRequirement +from pkg_resources import Requirement + +from vistir.misc import dedup +from pip_shims.shims import InstallRequirement, Link + from ..utils import SCHEME_LIST, VCS_LIST, is_star @@ -21,6 +37,15 @@ def optional_instance_of(cls): return validators.optional(validators.instance_of(cls)) +def init_requirement(name): + req = Requirement.parse(name) + req.vcs = None + req.local_file = None + req.revision = None + req.path = None + return req + + def extras_to_string(extras): """Turn a list of extras into a string""" if isinstance(extras, six.string_types): @@ -29,16 +54,13 @@ def extras_to_string(extras): else: extras = [extras] - return "[{0}]".format(",".join(extras)) + return "[{0}]".format(",".join(sorted(extras))) def parse_extras(extras_str): """Turn a string of extras into a parsed extras list""" - import requirements - extras = first( - requirements.parse("fakepkg{0}".format(extras_to_string(extras_str))) - ).extras - return extras + extras = Requirement.parse("fakepkg{0}".format(extras_to_string(extras_str))).extras + return sorted(dedup([extra.lower() for extra in extras])) def specs_to_string(specs): @@ -46,7 +68,11 @@ def specs_to_string(specs): if specs: if isinstance(specs, six.string_types): return specs - return ",".join(["".join(spec) for spec in specs]) + try: + extras = ",".join(["".join(spec) for spec in specs]) + except TypeError: + extras = ",".join(["".join(spec._spec) for spec in specs]) + return extras return "" @@ -91,7 +117,7 @@ def strip_ssh_from_git_uri(uri): def add_ssh_scheme_to_git_uri(uri): - """Cleans VCS uris from pip format""" + """Cleans VCS uris from pipenv.patched.notpip format""" if isinstance(uri, six.string_types): # Add scheme for parsing purposes, this is also what pip does if uri.startswith("git+") and "://" not in uri: @@ -101,7 +127,6 @@ def add_ssh_scheme_to_git_uri(uri): def split_markers_from_line(line): """Split markers from a dependency""" - from packaging.markers import Marker, InvalidMarker if not any(line.startswith(uri_prefix) for uri_prefix in SCHEME_LIST): marker_sep = ";" else: @@ -133,7 +158,6 @@ def validate_path(instance, attr_, value): def validate_markers(instance, attr_, value): - from packaging.markers import Marker, InvalidMarker try: Marker("{0}{1}".format(attr_.name, value)) except InvalidMarker: @@ -141,11 +165,311 @@ def validate_markers(instance, attr_, value): def validate_specifiers(instance, attr_, value): - from packaging.specifiers import SpecifierSet, InvalidSpecifier - from packaging.markers import InvalidMarker if value == "": return True try: SpecifierSet(value) except (InvalidMarker, InvalidSpecifier): raise ValueError("Invalid Specifiers {0}".format(value)) + + +def key_from_ireq(ireq): + """Get a standardized key for an InstallRequirement.""" + if ireq.req is None and ireq.link is not None: + return str(ireq.link) + else: + return key_from_req(ireq.req) + + +def key_from_req(req): + """Get an all-lowercase version of the requirement's name.""" + if hasattr(req, 'key'): + # from pkg_resources, such as installed dists for pip-sync + key = req.key + else: + # from packaging, such as install requirements from requirements.txt + key = req.name + + key = key.replace('_', '-').lower() + return key + + +def _requirement_to_str_lowercase_name(requirement): + """ + Formats a packaging.requirements.Requirement with a lowercase name. + + This is simply a copy of + https://github.com/pypa/packaging/blob/16.8/packaging/requirements.py#L109-L124 + modified to lowercase the dependency name. + + Previously, we were invoking the original Requirement.__str__ method and + lowercasing the entire result, which would lowercase the name, *and* other, + important stuff that should not be lowercased (such as the marker). See + this issue for more information: https://github.com/pypa/pipenv/issues/2113. + """ + parts = [requirement.name.lower()] + + if requirement.extras: + parts.append("[{0}]".format(",".join(sorted(requirement.extras)))) + + if requirement.specifier: + parts.append(str(requirement.specifier)) + + if requirement.url: + parts.append("@ {0}".format(requirement.url)) + + if requirement.marker: + parts.append("; {0}".format(requirement.marker)) + + return "".join(parts) + + +def format_requirement(ireq): + """ + Generic formatter for pretty printing InstallRequirements to the terminal + in a less verbose way than using its `__str__` method. + """ + if ireq.editable: + line = '-e {}'.format(ireq.link) + else: + line = _requirement_to_str_lowercase_name(ireq.req) + + if str(ireq.req.marker) != str(ireq.markers): + if not ireq.req.marker: + line = '{}; {}'.format(line, ireq.markers) + else: + name, markers = line.split(";", 1) + markers = markers.strip() + line = '{}; ({}) and ({})'.format(name, markers, ireq.markers) + + return line + + +def format_specifier(ireq): + """ + Generic formatter for pretty printing the specifier part of + InstallRequirements to the terminal. + """ + # TODO: Ideally, this is carried over to the pip library itself + specs = ireq.specifier._specs if ireq.req is not None else [] + specs = sorted(specs, key=lambda x: x._spec[1]) + return ','.join(str(s) for s in specs) or '' + + +def is_pinned_requirement(ireq): + """ + Returns whether an InstallRequirement is a "pinned" requirement. + + An InstallRequirement is considered pinned if: + + - Is not editable + - It has exactly one specifier + - That specifier is "==" + - The version does not contain a wildcard + + Examples: + django==1.8 # pinned + django>1.8 # NOT pinned + django~=1.8 # NOT pinned + django==1.* # NOT pinned + """ + if ireq.editable: + return False + + specifier = getattr(ireq, "specifier", None) + if not specifier: + return False + if len(specifier._specs) != 1: + return False + + op, version = first(specifier._specs)._spec + return (op == '==' or op == '===') and not version.endswith('.*') + + +def as_tuple(ireq): + """ + Pulls out the (name: str, version:str, extras:(str)) tuple from the pinned InstallRequirement. + """ + if not is_pinned_requirement(ireq): + raise TypeError('Expected a pinned InstallRequirement, got {}'.format(ireq)) + + name = key_from_req(ireq.req) + version = first(ireq.specifier._specs)._spec[1] + extras = tuple(sorted(ireq.extras)) + return name, version, extras + + +def full_groupby(iterable, key=None): + """Like groupby(), but sorts the input on the group key first.""" + return groupby(sorted(iterable, key=key), key=key) + + +def flat_map(fn, collection): + """Map a function over a collection and flatten the result by one-level""" + return chain.from_iterable(map(fn, collection)) + + +def lookup_table(values, key=None, keyval=None, unique=False, use_lists=False): + """ + Builds a dict-based lookup table (index) elegantly. + + Supports building normal and unique lookup tables. For example: + + >>> assert lookup_table( + ... ['foo', 'bar', 'baz', 'qux', 'quux'], lambda s: s[0]) == { + ... 'b': {'bar', 'baz'}, + ... 'f': {'foo'}, + ... 'q': {'quux', 'qux'} + ... } + + For key functions that uniquely identify values, set unique=True: + + >>> assert lookup_table( + ... ['foo', 'bar', 'baz', 'qux', 'quux'], lambda s: s[0], + ... unique=True) == { + ... 'b': 'baz', + ... 'f': 'foo', + ... 'q': 'quux' + ... } + + The values of the resulting lookup table will be values, not sets. + + For extra power, you can even change the values while building up the LUT. + To do so, use the `keyval` function instead of the `key` arg: + + >>> assert lookup_table( + ... ['foo', 'bar', 'baz', 'qux', 'quux'], + ... keyval=lambda s: (s[0], s[1:])) == { + ... 'b': {'ar', 'az'}, + ... 'f': {'oo'}, + ... 'q': {'uux', 'ux'} + ... } + + """ + if keyval is None: + if key is None: + keyval = (lambda v: v) + else: + keyval = (lambda v: (key(v), v)) + + if unique: + return dict(keyval(v) for v in values) + + lut = {} + for value in values: + k, v = keyval(value) + try: + s = lut[k] + except KeyError: + if use_lists: + s = lut[k] = list() + else: + s = lut[k] = set() + if use_lists: + s.append(v) + else: + s.add(v) + return dict(lut) + + +def name_from_req(req): + """Get the name of the requirement""" + if hasattr(req, 'project_name'): + # from pkg_resources, such as installed dists for pip-sync + return req.project_name + else: + # from packaging, such as install requirements from requirements.txt + return req.name + + +def make_install_requirement(name, version, extras, markers, constraint=False): + """make_install_requirement Generates an :class:`~pip._internal.req.req_install.InstallRequirement`. + + Create an InstallRequirement from the supplied metadata. + + :param name: The requirement's name. + :type name: str + :param version: The requirement version (must be pinned). + :type version: str. + :param extras: The desired extras. + :type extras: list[str] + :param markers: The desired markers, without a preceding semicolon. + :type markers: str + :param constraint: Whether to flag the requirement as a constraint, defaults to False. + :param constraint: bool, optional + :return: A generated InstallRequirement + :rtype: :class:`~pip._internal.req.req_install.InstallRequirement` + """ + + # If no extras are specified, the extras string is blank + extras_string = "" + if extras: + # Sort extras for stability + extras_string = "[{}]".format(",".join(sorted(extras))) + + if not markers: + return InstallRequirement.from_line( + str('{}{}=={}'.format(name, extras_string, version)), + constraint=constraint) + else: + return InstallRequirement.from_line( + str('{}{}=={}; {}'.format(name, extras_string, version, str(markers))), + constraint=constraint) + + +def version_from_ireq(ireq): + """version_from_ireq Extract the version from a supplied :class:`~pip._internal.req.req_install.InstallRequirement` + + :param ireq: An InstallRequirement + :type ireq: :class:`~pip._internal.req.req_install.InstallRequirement` + :return: The version of the InstallRequirement. + :rtype: str + """ + + return first(ireq.specifier._specs).version + + +def clean_requires_python(candidates): + """Get a cleaned list of all the candidates with valid specifiers in the `requires_python` attributes.""" + all_candidates = [] + sys_version = '.'.join(map(str, sys.version_info[:3])) + py_version = parse_version(os.environ.get('PIP_PYTHON_VERSION', sys_version)) + for c in candidates: + from_location = attrgetter("location.requires_python") + requires_python = getattr(c, "requires_python", from_location(c)) + if requires_python: + # Old specifications had people setting this to single digits + # which is effectively the same as '>=digit,=2.6"')` + marker_key = Variable('python_version') + for spec in specifierset: + operator, val = spec._spec + cleaned_val = Value(val).serialize().replace('"', "") + spec_dict[Op(operator).serialize()].add(cleaned_val) + marker_str = ' and '.join([ + "{0}{1}'{2}'".format(marker_key.serialize(), op, ','.join(vals)) + for op, vals in spec_dict.items() + ]) + marker_to_add = PackagingRequirement('fakepkg; {0}'.format(marker_str)).marker + return marker_to_add diff --git a/pipenv/vendor/requirementslib/models/vcs.py b/pipenv/vendor/requirementslib/models/vcs.py new file mode 100644 index 0000000000..a588f62985 --- /dev/null +++ b/pipenv/vendor/requirementslib/models/vcs.py @@ -0,0 +1,48 @@ +# -*- coding=utf-8 -*- +import attr +from pip_shims import VcsSupport +import os + + +VCS_SUPPORT = VcsSupport() + + +@attr.s +class VCSRepository(object): + url = attr.ib() + name = attr.ib() + checkout_directory = attr.ib() + vcs_type = attr.ib() + commit_sha = attr.ib(default=None) + ref = attr.ib(default=None) + repo_instance = attr.ib() + + @repo_instance.default + def get_repo_instance(self): + backend = VCS_SUPPORT._registry.get(self.vcs_type) + return backend(url=self.url) + + def obtain(self): + if not os.path.exists(self.checkout_directory): + self.repo_instance.obtain(self.checkout_directory) + if self.ref: + self.checkout_ref(self.ref) + self.commit_sha = self.get_commit_hash(self.ref) + else: + self.ref = self.repo_instance.default_arg_rev + if not self.commit_sha: + self.commit_sha = self.get_commit_hash() + + def checkout_ref(self, ref): + target_rev = self.repo_instance.make_rev_options(ref) + if not self.repo_instance.is_commit_id_equal( + self.checkout_directory, self.get_commit_hash(ref) + ) and not self.repo_instance.is_commit_id_equal(self.checkout_directory, ref): + self.repo_instance.switch(self.checkout_directory, self.url, target_rev) + + def update(self, ref): + target_rev = self.repo_instance.make_rev_options(ref) + self.repo_instance.update(self.checkout_directory, target_rev) + + def get_commit_hash(self, ref=None): + return self.repo_instance.get_revision(self.checkout_directory) diff --git a/pipenv/vendor/requirementslib/utils.py b/pipenv/vendor/requirementslib/utils.py index 685a90b316..c41f967578 100644 --- a/pipenv/vendor/requirementslib/utils.py +++ b/pipenv/vendor/requirementslib/utils.py @@ -1,25 +1,28 @@ # -*- coding=utf-8 -*- from __future__ import absolute_import + import logging import os -import posixpath + import six -from itertools import product +from six.moves.urllib.parse import urlparse, urlsplit -try: - from urllib.parse import urlparse -except ImportError: - from urlparse import urlparse +from pip_shims import ( + Command, VcsSupport, cmdoptions, is_archive_file, is_installable_dir +) +from vistir.compat import Path +from vistir.path import is_valid_url, ensure_mkdir_p -try: - from pathlib import Path -except ImportError: - from pathlib2 import Path +VCS_ACCESS = VcsSupport() VCS_LIST = ("git", "svn", "hg", "bzr") +VCS_SCHEMES = [] SCHEME_LIST = ("http://", "https://", "ftp://", "ftps://", "file://") +if not VCS_SCHEMES: + VCS_SCHEMES = VcsSupport().all_schemes + def setup_logger(): logger = logging.getLogger("requirementslib") @@ -40,74 +43,14 @@ def is_vcs(pipfile_entry): return any(key for key in pipfile_entry.keys() if key in VCS_LIST) elif isinstance(pipfile_entry, six.string_types): - vcs_starts = product( - ("git+", "hg+", "svn+", "bzr+"), - ("file", "ssh", "https", "http", "svn", "sftp", ""), - ) - - return next( - ( - v - for v in ( - pipfile_entry.startswith("{0}{1}".format(vcs, scheme)) - for vcs, scheme in vcs_starts - ) - if v - ), - False, - ) - + if not is_valid_url(pipfile_entry) and pipfile_entry.startswith("git+"): + from .models.utils import add_ssh_scheme_to_git_uri + pipfile_entry = add_ssh_scheme_to_git_uri(pipfile_entry) + parsed_entry = urlsplit(pipfile_entry) + return parsed_entry.scheme in VCS_SCHEMES return False -def check_for_unc_path(path): - """ Checks to see if a pathlib `Path` object is a unc path or not""" - if ( - os.name == "nt" - and len(path.drive) > 2 - and not path.drive[0].isalpha() - and path.drive[1] != ":" - ): - return True - else: - return False - - -def get_converted_relative_path(path, relative_to=os.curdir): - """Convert `path` to be relative. - - Given a vague relative path, return the path relative to the given - location. - - This performs additional conversion to ensure the result is of POSIX form, - and starts with `./`, or is precisely `.`. - """ - - start_path = Path(relative_to) - try: - start = start_path.resolve() - except OSError: - start = start_path.absolute() - - # check if there is a drive letter or mount point - # if it is a mountpoint use the original absolute path - # instead of the unc path - if check_for_unc_path(start): - start = start_path.absolute() - - path = start.joinpath(path).relative_to(start) - - # check and see if the path that was passed into the function is a UNC path - # and raise value error if it is not. - if check_for_unc_path(path): - raise ValueError("The path argument does not currently accept UNC paths") - - relpath_s = posixpath.normpath(path.as_posix()) - if not (relpath_s == "." or relpath_s.startswith("./")): - relpath_s = posixpath.join(".", relpath_s) - return relpath_s - - def multi_split(s, split): """Splits on multiple given separators.""" for r in split: @@ -121,7 +64,6 @@ def is_star(val): def is_installable_file(path): """Determine if a path can potentially be installed""" - from ._compat import is_installable_dir, is_archive_file from packaging import specifiers if hasattr(path, "keys") and any( @@ -160,22 +102,6 @@ def is_installable_file(path): return False -def is_valid_url(url): - """Checks if a given string is an url""" - pieces = urlparse(url) - return all([pieces.scheme, any([pieces.netloc, pieces.path])]) - - -def pep423_name(name): - """Normalize package name to PEP 423 style standard.""" - name = name.lower() - if any(i not in name for i in (VCS_LIST + SCHEME_LIST)): - return name.replace("_", "-") - - else: - return name - - def prepare_pip_source_args(sources, pip_args=None): if pip_args is None: pip_args = [] @@ -197,3 +123,30 @@ def prepare_pip_source_args(sources, pip_args=None): ["--trusted-host", urlparse(source["url"]).hostname] ) return pip_args + + +class PipCommand(Command): + name = 'PipCommand' + + +def get_pip_command(): + # Use pip's parser for pip.conf management and defaults. + # General options (find_links, index_url, extra_index_url, trusted_host, + # and pre) are defered to pip. + import optparse + pip_command = PipCommand() + pip_command.parser.add_option(cmdoptions.no_binary()) + pip_command.parser.add_option(cmdoptions.only_binary()) + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + pip_command.parser, + ) + pip_command.parser.insert_option_group(0, index_opts) + pip_command.parser.add_option(optparse.Option('--pre', action='store_true', default=False)) + + return pip_command + + +@ensure_mkdir_p(mode=0o777) +def _ensure_dir(path): + return path From 21f48ac3e0e03720beb8ed76a7eb67d09d9c2af3 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 25 Aug 2018 13:17:33 -0400 Subject: [PATCH 11/26] Update pythonfinder to 1.0.2 Signed-off-by: Dan Ryan --- pipenv/vendor/pythonfinder/__init__.py | 2 +- pipenv/vendor/pythonfinder/_vendor/Makefile | 14 ++ pipenv/vendor/pythonfinder/_vendor/vendor.txt | 1 + pipenv/vendor/pythonfinder/cli.py | 2 +- pipenv/vendor/pythonfinder/models/__init__.py | 118 ++-------------- pipenv/vendor/pythonfinder/models/mixins.py | 129 ++++++++++++++++++ pipenv/vendor/pythonfinder/models/path.py | 33 ++--- pipenv/vendor/pythonfinder/models/pyenv.py | 19 ++- pipenv/vendor/pythonfinder/models/python.py | 26 ++-- pipenv/vendor/pythonfinder/models/windows.py | 14 +- pipenv/vendor/pythonfinder/utils.py | 49 ++----- 11 files changed, 215 insertions(+), 192 deletions(-) create mode 100644 pipenv/vendor/pythonfinder/_vendor/Makefile create mode 100644 pipenv/vendor/pythonfinder/_vendor/vendor.txt create mode 100644 pipenv/vendor/pythonfinder/models/mixins.py diff --git a/pipenv/vendor/pythonfinder/__init__.py b/pipenv/vendor/pythonfinder/__init__.py index 9f1628be84..f6ef476b80 100644 --- a/pipenv/vendor/pythonfinder/__init__.py +++ b/pipenv/vendor/pythonfinder/__init__.py @@ -1,6 +1,6 @@ from __future__ import print_function, absolute_import -__version__ = "1.0.0" +__version__ = '1.0.2' __all__ = ["Finder", "WindowsFinder", "SystemPath", "InvalidPythonVersion"] from .pythonfinder import Finder diff --git a/pipenv/vendor/pythonfinder/_vendor/Makefile b/pipenv/vendor/pythonfinder/_vendor/Makefile new file mode 100644 index 0000000000..5c44fea4e9 --- /dev/null +++ b/pipenv/vendor/pythonfinder/_vendor/Makefile @@ -0,0 +1,14 @@ +# Taken from pip: https://github.com/pypa/pip/blob/95bcf8c5f6394298035a7332c441868f3b0169f4/src/pip/_vendor/Makefile +all: clean vendor + +clean: + @# Delete vendored items + find . -maxdepth 1 -mindepth 1 -type d -exec rm -rf {} \; + +vendor: + @# Install vendored libraries + pip install -t . -r vendor.txt + + @# Cleanup .egg-info directories + rm -rf *.egg-info + rm -rf *.dist-info diff --git a/pipenv/vendor/pythonfinder/_vendor/vendor.txt b/pipenv/vendor/pythonfinder/_vendor/vendor.txt new file mode 100644 index 0000000000..8875249845 --- /dev/null +++ b/pipenv/vendor/pythonfinder/_vendor/vendor.txt @@ -0,0 +1 @@ +-e git+https://github.com/zooba/pep514tools.git@320e48745660b696e2dcaee888fc2e516b435e48#egg=pep514tools diff --git a/pipenv/vendor/pythonfinder/cli.py b/pipenv/vendor/pythonfinder/cli.py index 4e620b0875..d285fb29d4 100644 --- a/pipenv/vendor/pythonfinder/cli.py +++ b/pipenv/vendor/pythonfinder/cli.py @@ -11,7 +11,7 @@ # @click.group(invoke_without_command=True, context_settings=CONTEXT_SETTINGS) @click.command() @click.option("--find", default=False, nargs=1, help="Find a specific python version.") -@click.option("--which", default=False, nargs=1, help="Run the which commend.") +@click.option("--which", default=False, nargs=1, help="Run the which command.") @click.option( "--findall", is_flag=True, default=False, help="Find all python versions." ) diff --git a/pipenv/vendor/pythonfinder/models/__init__.py b/pipenv/vendor/pythonfinder/models/__init__.py index a38494edb7..ef0ed36830 100644 --- a/pipenv/vendor/pythonfinder/models/__init__.py +++ b/pipenv/vendor/pythonfinder/models/__init__.py @@ -1,10 +1,17 @@ # -*- coding=utf-8 -*- -from __future__ import print_function, absolute_import +from __future__ import absolute_import, print_function + import abc import operator -import six + from itertools import chain + +import six + from ..utils import KNOWN_EXTS, unnest +from .path import SystemPath +from .python import PythonVersion +from .windows import WindowsFinder @six.add_metaclass(abc.ABCMeta) @@ -24,110 +31,3 @@ def version_paths(self): @property def expanded_paths(self): return (p.paths.values() for p in self.version_paths) - - -class BasePath(object): - def which(self, name): - """Search in this path for an executable. - - :param executable: The name of an executable to search for. - :type executable: str - :returns: :class:`~pythonfinder.models.PathEntry` instance. - """ - - valid_names = [name] + [ - "{0}.{1}".format(name, ext).lower() if ext else "{0}".format(name).lower() - for ext in KNOWN_EXTS - ] - children = self.children - found = next( - ( - children[(self.path / child).as_posix()] - for child in valid_names - if (self.path / child).as_posix() in children - ), - None, - ) - return found - - def find_all_python_versions( - self, major=None, minor=None, patch=None, pre=None, dev=None, arch=None - ): - """Search for a specific python version on the path. Return all copies - - :param major: Major python version to search for. - :type major: int - :param int minor: Minor python version to search for, defaults to None - :param int patch: Patch python version to search for, defaults to None - :param bool pre: Search for prereleases (default None) - prioritize releases if None - :param bool dev: Search for devreleases (default None) - prioritize releases if None - :param str arch: Architecture to include, e.g. '64bit', defaults to None - :return: A list of :class:`~pythonfinder.models.PathEntry` instances matching the version requested. - :rtype: List[:class:`~pythonfinder.models.PathEntry`] - """ - - call_method = ( - "find_all_python_versions" if self.is_dir else "find_python_version" - ) - sub_finder = operator.methodcaller( - call_method, major, minor=minor, patch=patch, pre=pre, dev=dev, arch=arch - ) - if not self.is_dir: - return sub_finder(self) - path_filter = filter(None, (sub_finder(p) for p in self.children.values())) - version_sort = operator.attrgetter("as_python.version_sort") - return [c for c in sorted(path_filter, key=version_sort, reverse=True)] - - def find_python_version( - self, major=None, minor=None, patch=None, pre=None, dev=None, arch=None - ): - """Search or self for the specified Python version and return the first match. - - :param major: Major version number. - :type major: int - :param int minor: Minor python version to search for, defaults to None - :param int patch: Patch python version to search for, defaults to None - :param bool pre: Search for prereleases (default None) - prioritize releases if None - :param bool dev: Search for devreleases (default None) - prioritize releases if None - :param str arch: Architecture to include, e.g. '64bit', defaults to None - :returns: A :class:`~pythonfinder.models.PathEntry` instance matching the version requested. - """ - - version_matcher = operator.methodcaller( - "matches", - major=major, - minor=minor, - patch=patch, - pre=pre, - dev=dev, - arch=arch, - ) - is_py = operator.attrgetter("is_python") - py_version = operator.attrgetter("as_python") - if not self.is_dir: - if self.is_python and self.as_python and version_matcher(self.as_python): - return self - return - finder = ( - (child, child.as_python) - for child in unnest(self.pythons.values()) - if child.as_python - ) - py_filter = filter( - None, filter(lambda child: version_matcher(child[1]), finder) - ) - version_sort = operator.attrgetter("version_sort") - return next( - ( - c[0] - for c in sorted( - py_filter, key=lambda child: child[1].version_sort, reverse=True - ) - ), - None, - ) - - -from .path import SystemPath -from .windows import WindowsFinder -from .python import PythonVersion diff --git a/pipenv/vendor/pythonfinder/models/mixins.py b/pipenv/vendor/pythonfinder/models/mixins.py new file mode 100644 index 0000000000..8cbd45dfeb --- /dev/null +++ b/pipenv/vendor/pythonfinder/models/mixins.py @@ -0,0 +1,129 @@ +# -*- coding=utf-8 -*- +from __future__ import absolute_import, unicode_literals + +import abc +import operator +import six + +from ..utils import KNOWN_EXTS, unnest + + +class BasePath(object): + def which(self, name): + """Search in this path for an executable. + + :param executable: The name of an executable to search for. + :type executable: str + :returns: :class:`~pythonfinder.models.PathEntry` instance. + """ + + valid_names = [name] + [ + "{0}.{1}".format(name, ext).lower() if ext else "{0}".format(name).lower() + for ext in KNOWN_EXTS + ] + children = self.children + found = next( + ( + children[(self.path / child).as_posix()] + for child in valid_names + if (self.path / child).as_posix() in children + ), + None, + ) + return found + + def find_all_python_versions( + self, major=None, minor=None, patch=None, pre=None, dev=None, arch=None + ): + """Search for a specific python version on the path. Return all copies + + :param major: Major python version to search for. + :type major: int + :param int minor: Minor python version to search for, defaults to None + :param int patch: Patch python version to search for, defaults to None + :param bool pre: Search for prereleases (default None) - prioritize releases if None + :param bool dev: Search for devreleases (default None) - prioritize releases if None + :param str arch: Architecture to include, e.g. '64bit', defaults to None + :return: A list of :class:`~pythonfinder.models.PathEntry` instances matching the version requested. + :rtype: List[:class:`~pythonfinder.models.PathEntry`] + """ + + call_method = ( + "find_all_python_versions" if self.is_dir else "find_python_version" + ) + sub_finder = operator.methodcaller( + call_method, major, minor=minor, patch=patch, pre=pre, dev=dev, arch=arch + ) + if not self.is_dir: + return sub_finder(self) + path_filter = filter(None, (sub_finder(p) for p in self.children.values())) + version_sort = operator.attrgetter("as_python.version_sort") + return [c for c in sorted(path_filter, key=version_sort, reverse=True)] + + def find_python_version( + self, major=None, minor=None, patch=None, pre=None, dev=None, arch=None + ): + """Search or self for the specified Python version and return the first match. + + :param major: Major version number. + :type major: int + :param int minor: Minor python version to search for, defaults to None + :param int patch: Patch python version to search for, defaults to None + :param bool pre: Search for prereleases (default None) - prioritize releases if None + :param bool dev: Search for devreleases (default None) - prioritize releases if None + :param str arch: Architecture to include, e.g. '64bit', defaults to None + :returns: A :class:`~pythonfinder.models.PathEntry` instance matching the version requested. + """ + + version_matcher = operator.methodcaller( + "matches", + major=major, + minor=minor, + patch=patch, + pre=pre, + dev=dev, + arch=arch, + ) + is_py = operator.attrgetter("is_python") + py_version = operator.attrgetter("as_python") + if not self.is_dir: + if self.is_python and self.as_python and version_matcher(self.as_python): + return self + return + finder = ( + (child, child.as_python) + for child in unnest(self.pythons.values()) + if child.as_python + ) + py_filter = filter( + None, filter(lambda child: version_matcher(child[1]), finder) + ) + version_sort = operator.attrgetter("version_sort") + return next( + ( + c[0] + for c in sorted( + py_filter, key=lambda child: child[1].version_sort, reverse=True + ) + ), + None, + ) + + +@six.add_metaclass(abc.ABCMeta) +class BaseFinder(object): + def get_versions(self): + """Return the available versions from the finder""" + raise NotImplementedError + + @classmethod + def create(cls): + raise NotImplementedError + + @property + def version_paths(self): + return self.versions.values() + + @property + def expanded_paths(self): + return (p.paths.values() for p in self.version_paths) diff --git a/pipenv/vendor/pythonfinder/models/path.py b/pipenv/vendor/pythonfinder/models/path.py index af1039157c..8e38aef323 100644 --- a/pipenv/vendor/pythonfinder/models/path.py +++ b/pipenv/vendor/pythonfinder/models/path.py @@ -1,31 +1,28 @@ # -*- coding=utf-8 -*- -from __future__ import print_function, absolute_import -import attr +from __future__ import absolute_import, print_function + import copy import operator import os import sys + from collections import defaultdict -from cached_property import cached_property from itertools import chain -from . import BasePath -from .python import PythonVersion + +import attr + +from cached_property import cached_property + +from vistir.compat import Path, fs_str + +from .mixins import BasePath from ..environment import PYENV_INSTALLED, PYENV_ROOT from ..exceptions import InvalidPythonVersion from ..utils import ( - optional_instance_of, - filter_pythons, - path_is_known_executable, - looks_like_python, - ensure_path, - fs_str, - unnest, + ensure_path, filter_pythons, looks_like_python, optional_instance_of, + path_is_known_executable, unnest ) - -try: - from pathlib import Path -except ImportError: - from pathlib2 import Path +from .python import PythonVersion @attr.s @@ -251,7 +248,7 @@ def find_python_version( if major and minor and patch: _tuple_pre = pre if pre is not None else False _tuple_dev = dev if dev is not None else False - version_tuple = (major, minor_, patch, _tuple_pre, _tuple_dev) + version_tuple = (major, minor, patch, _tuple_pre, _tuple_dev) version_tuple_pre = (major, minor, patch, True, False) if os.name == "nt" and self.windows_finder: windows_finder_version = sub_finder(self.windows_finder) diff --git a/pipenv/vendor/pythonfinder/models/pyenv.py b/pipenv/vendor/pythonfinder/models/pyenv.py index 6c8909369d..6df4717971 100644 --- a/pipenv/vendor/pythonfinder/models/pyenv.py +++ b/pipenv/vendor/pythonfinder/models/pyenv.py @@ -1,17 +1,16 @@ # -*- coding=utf-8 -*- -from __future__ import print_function, absolute_import -import attr +from __future__ import absolute_import, print_function + from collections import defaultdict -from . import BaseFinder -from .path import VersionPath -from .python import PythonVersion -from ..utils import optional_instance_of, ensure_path +import attr + +from vistir.compat import Path -try: - from pathlib import Path -except ImportError: - from pathlib2 import Path +from ..utils import ensure_path, optional_instance_of +from .mixins import BaseFinder +from .path import VersionPath +from .python import PythonVersion @attr.s diff --git a/pipenv/vendor/pythonfinder/models/python.py b/pipenv/vendor/pythonfinder/models/python.py index f10ddb4e42..8a40e2482d 100644 --- a/pipenv/vendor/pythonfinder/models/python.py +++ b/pipenv/vendor/pythonfinder/models/python.py @@ -1,18 +1,22 @@ # -*- coding=utf-8 -*- -from __future__ import print_function, absolute_import -import attr +from __future__ import absolute_import, print_function + import copy -from collections import defaultdict import platform -from packaging.version import parse as parse_version, Version -from ..environment import SYSTEM_ARCH -from ..utils import _filter_none, optional_instance_of, get_python_version, ensure_path +from collections import defaultdict -try: - from pathlib import Path -except ImportError: - from pathlib2 import Path +import attr + +from packaging.version import Version +from packaging.version import parse as parse_version + +from vistir.compat import Path + +from ..environment import SYSTEM_ARCH +from ..utils import ( + _filter_none, ensure_path, get_python_version, optional_instance_of +) @attr.s @@ -31,7 +35,7 @@ class PythonVersion(object): @property def version_sort(self): """version_sort tuple for sorting against other instances of the same class. - + Returns a tuple of the python version but includes a point for non-dev, and a point for non-prerelease versions. So released versions will have 2 points for this value. E.g. `(3, 6, 6, 2)` is a release, `(3, 6, 6, 1)` is a prerelease, diff --git a/pipenv/vendor/pythonfinder/models/windows.py b/pipenv/vendor/pythonfinder/models/windows.py index f731432c91..4f0b64d129 100644 --- a/pipenv/vendor/pythonfinder/models/windows.py +++ b/pipenv/vendor/pythonfinder/models/windows.py @@ -1,13 +1,17 @@ # -*- coding=utf-8 -*- -from __future__ import print_function, absolute_import -import attr +from __future__ import absolute_import, print_function + import operator + from collections import defaultdict -from . import BaseFinder -from .path import PathEntry -from .python import PythonVersion, VersionMap + +import attr + from ..exceptions import InvalidPythonVersion from ..utils import ensure_path +from .mixins import BaseFinder +from .path import PathEntry +from .python import PythonVersion, VersionMap @attr.s diff --git a/pipenv/vendor/pythonfinder/utils.py b/pipenv/vendor/pythonfinder/utils.py index 1a9bfa2c1a..285e06be58 100644 --- a/pipenv/vendor/pythonfinder/utils.py +++ b/pipenv/vendor/pythonfinder/utils.py @@ -1,19 +1,22 @@ # -*- coding=utf-8 -*- -from __future__ import print_function, absolute_import -import attr +from __future__ import absolute_import, print_function + import locale import os -import six import subprocess import sys + from fnmatch import fnmatch -from .exceptions import InvalidPythonVersion from itertools import chain -try: - from pathlib import Path -except ImportError: - from pathlib2 import Path +import attr +import six + +import vistir + +from vistir.compat import Path + +from .exceptions import InvalidPythonVersion PYTHON_IMPLEMENTATIONS = ("python", "ironpython", "jython", "pypy") @@ -24,25 +27,11 @@ ) -def _run(cmd): - """Use `subprocess.check_output` to get the output of a command and decode it. - - :param list cmd: A list representing the command you want to run. - :returns: A 2-tuple of (output, error) - """ - encoding = locale.getdefaultlocale()[1] or "utf-8" - c = subprocess.Popen( - cmd, env=os.environ.copy(), stdout=subprocess.PIPE, stderr=subprocess.PIPE - ) - out, err = c.communicate() - return out.decode(encoding).strip(), err.decode(encoding).strip() - - def get_python_version(path): """Get python version string using subprocess from a given path.""" version_cmd = [path, "-c", "import sys; print(sys.version.split()[0])"] try: - out, _ = _run(version_cmd) + out, _ = vistir.misc.run(version_cmd) except OSError: raise InvalidPythonVersion("%s is not a valid python path" % path) if not out: @@ -123,20 +112,6 @@ def filter_pythons(path): return filter(lambda x: path_is_python(x), path.iterdir()) -def fs_str(string): - """Encodes a string into the proper filesystem encoding - - Borrowed from pip-tools - """ - if isinstance(string, str): - return string - assert not isinstance(string, bytes) - return string.encode(_fs_encoding) - - -_fs_encoding = sys.getfilesystemencoding() or sys.getdefaultencoding() - - def unnest(item): if isinstance(next((i for i in item), None), (list, tuple)): return chain(*filter(None, item)) From 4fd5c797274b9c0c35b221dd62f264c4781edcbd Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 25 Aug 2018 13:18:34 -0400 Subject: [PATCH 12/26] Replace appdirs with correct copy Signed-off-by: Dan Ryan --- pipenv/vendor/appdirs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pipenv/vendor/appdirs.py b/pipenv/vendor/appdirs.py index 2acd1debeb..ae67001af8 100644 --- a/pipenv/vendor/appdirs.py +++ b/pipenv/vendor/appdirs.py @@ -13,8 +13,8 @@ # - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html # - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html -__version__ = "1.4.4" -__version_info__ = tuple(int(segment) for segment in __version__.split(".")) +__version_info__ = (1, 4, 3) +__version__ = '.'.join(map(str, __version_info__)) import sys From 3bfc25b61950198ecc1bb9b24a3a024d06527dec Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 25 Aug 2018 13:19:11 -0400 Subject: [PATCH 13/26] Update certifi Signed-off-by: Dan Ryan --- pipenv/vendor/certifi/__init__.py | 2 +- pipenv/vendor/certifi/cacert.pem | 203 ++++++------------------------ 2 files changed, 41 insertions(+), 164 deletions(-) diff --git a/pipenv/vendor/certifi/__init__.py b/pipenv/vendor/certifi/__init__.py index 0c4963ef60..7e1f370c96 100644 --- a/pipenv/vendor/certifi/__init__.py +++ b/pipenv/vendor/certifi/__init__.py @@ -1,3 +1,3 @@ from .core import where, old_where -__version__ = "2018.04.16" +__version__ = "2018.08.13" diff --git a/pipenv/vendor/certifi/cacert.pem b/pipenv/vendor/certifi/cacert.pem index 2713f541c4..24fe597fcb 100644 --- a/pipenv/vendor/certifi/cacert.pem +++ b/pipenv/vendor/certifi/cacert.pem @@ -3692,169 +3692,6 @@ lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR -----END CERTIFICATE----- -# Issuer: CN=Certplus Root CA G1 O=Certplus -# Subject: CN=Certplus Root CA G1 O=Certplus -# Label: "Certplus Root CA G1" -# Serial: 1491911565779898356709731176965615564637713 -# MD5 Fingerprint: 7f:09:9c:f7:d9:b9:5c:69:69:56:d5:37:3e:14:0d:42 -# SHA1 Fingerprint: 22:fd:d0:b7:fd:a2:4e:0d:ac:49:2c:a0:ac:a6:7b:6a:1f:e3:f7:66 -# SHA256 Fingerprint: 15:2a:40:2b:fc:df:2c:d5:48:05:4d:22:75:b3:9c:7f:ca:3e:c0:97:80:78:b0:f0:ea:76:e5:61:a6:c7:43:3e ------BEGIN CERTIFICATE----- -MIIFazCCA1OgAwIBAgISESBVg+QtPlRWhS2DN7cs3EYRMA0GCSqGSIb3DQEBDQUA -MD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2Vy -dHBsdXMgUm9vdCBDQSBHMTAeFw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBa -MD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2Vy -dHBsdXMgUm9vdCBDQSBHMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB -ANpQh7bauKk+nWT6VjOaVj0W5QOVsjQcmm1iBdTYj+eJZJ+622SLZOZ5KmHNr49a -iZFluVj8tANfkT8tEBXgfs+8/H9DZ6itXjYj2JizTfNDnjl8KvzsiNWI7nC9hRYt -6kuJPKNxQv4c/dMcLRC4hlTqQ7jbxofaqK6AJc96Jh2qkbBIb6613p7Y1/oA/caP -0FG7Yn2ksYyy/yARujVjBYZHYEMzkPZHogNPlk2dT8Hq6pyi/jQu3rfKG3akt62f -6ajUeD94/vI4CTYd0hYCyOwqaK/1jpTvLRN6HkJKHRUxrgwEV/xhc/MxVoYxgKDE -EW4wduOU8F8ExKyHcomYxZ3MVwia9Az8fXoFOvpHgDm2z4QTd28n6v+WZxcIbekN -1iNQMLAVdBM+5S//Ds3EC0pd8NgAM0lm66EYfFkuPSi5YXHLtaW6uOrc4nBvCGrc -h2c0798wct3zyT8j/zXhviEpIDCB5BmlIOklynMxdCm+4kLV87ImZsdo/Rmz5yCT -mehd4F6H50boJZwKKSTUzViGUkAksnsPmBIgJPaQbEfIDbsYIC7Z/fyL8inqh3SV -4EJQeIQEQWGw9CEjjy3LKCHyamz0GqbFFLQ3ZU+V/YDI+HLlJWvEYLF7bY5KinPO -WftwenMGE9nTdDckQQoRb5fc5+R+ob0V8rqHDz1oihYHAgMBAAGjYzBhMA4GA1Ud -DwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSowcCbkahDFXxd -Bie0KlHYlwuBsTAfBgNVHSMEGDAWgBSowcCbkahDFXxdBie0KlHYlwuBsTANBgkq -hkiG9w0BAQ0FAAOCAgEAnFZvAX7RvUz1isbwJh/k4DgYzDLDKTudQSk0YcbX8ACh -66Ryj5QXvBMsdbRX7gp8CXrc1cqh0DQT+Hern+X+2B50ioUHj3/MeXrKls3N/U/7 -/SMNkPX0XtPGYX2eEeAC7gkE2Qfdpoq3DIMku4NQkv5gdRE+2J2winq14J2by5BS -S7CTKtQ+FjPlnsZlFT5kOwQ/2wyPX1wdaR+v8+khjPPvl/aatxm2hHSco1S1cE5j -2FddUyGbQJJD+tZ3VTNPZNX70Cxqjm0lpu+F6ALEUz65noe8zDUa3qHpimOHZR4R -Kttjd5cUvpoUmRGywO6wT/gUITJDT5+rosuoD6o7BlXGEilXCNQ314cnrUlZp5Gr -RHpejXDbl85IULFzk/bwg2D5zfHhMf1bfHEhYxQUqq/F3pN+aLHsIqKqkHWetUNy -6mSjhEv9DKgma3GX7lZjZuhCVPnHHd/Qj1vfyDBviP4NxDMcU6ij/UgQ8uQKTuEV -V/xuZDDCVRHc6qnNSlSsKWNEz0pAoNZoWRsz+e86i9sgktxChL8Bq4fA1SCC28a5 -g4VCXA9DO2pJNdWY9BW/+mGBDAkgGNLQFwzLSABQ6XaCjGTXOqAHVcweMcDvOrRl -++O/QmueD6i9a5jc2NvLi6Td11n0bt3+qsOR0C5CB8AMTVPNJLFMWx5R9N/pkvo= ------END CERTIFICATE----- - -# Issuer: CN=Certplus Root CA G2 O=Certplus -# Subject: CN=Certplus Root CA G2 O=Certplus -# Label: "Certplus Root CA G2" -# Serial: 1492087096131536844209563509228951875861589 -# MD5 Fingerprint: a7:ee:c4:78:2d:1b:ee:2d:b9:29:ce:d6:a7:96:32:31 -# SHA1 Fingerprint: 4f:65:8e:1f:e9:06:d8:28:02:e9:54:47:41:c9:54:25:5d:69:cc:1a -# SHA256 Fingerprint: 6c:c0:50:41:e6:44:5e:74:69:6c:4c:fb:c9:f8:0f:54:3b:7e:ab:bb:44:b4:ce:6f:78:7c:6a:99:71:c4:2f:17 ------BEGIN CERTIFICATE----- -MIICHDCCAaKgAwIBAgISESDZkc6uo+jF5//pAq/Pc7xVMAoGCCqGSM49BAMDMD4x -CzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBs -dXMgUm9vdCBDQSBHMjAeFw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBaMD4x -CzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBs -dXMgUm9vdCBDQSBHMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABM0PW1aC3/BFGtat -93nwHcmsltaeTpwftEIRyoa/bfuFo8XlGVzX7qY/aWfYeOKmycTbLXku54uNAm8x -Ik0G42ByRZ0OQneezs/lf4WbGOT8zC5y0xaTTsqZY1yhBSpsBqNjMGEwDgYDVR0P -AQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNqDYwJ5jtpMxjwj -FNiPwyCrKGBZMB8GA1UdIwQYMBaAFNqDYwJ5jtpMxjwjFNiPwyCrKGBZMAoGCCqG -SM49BAMDA2gAMGUCMHD+sAvZ94OX7PNVHdTcswYO/jOYnYs5kGuUIe22113WTNch -p+e/IQ8rzfcq3IUHnQIxAIYUFuXcsGXCwI4Un78kFmjlvPl5adytRSv3tjFzzAal -U5ORGpOucGpnutee5WEaXw== ------END CERTIFICATE----- - -# Issuer: CN=OpenTrust Root CA G1 O=OpenTrust -# Subject: CN=OpenTrust Root CA G1 O=OpenTrust -# Label: "OpenTrust Root CA G1" -# Serial: 1492036577811947013770400127034825178844775 -# MD5 Fingerprint: 76:00:cc:81:29:cd:55:5e:88:6a:7a:2e:f7:4d:39:da -# SHA1 Fingerprint: 79:91:e8:34:f7:e2:ee:dd:08:95:01:52:e9:55:2d:14:e9:58:d5:7e -# SHA256 Fingerprint: 56:c7:71:28:d9:8c:18:d9:1b:4c:fd:ff:bc:25:ee:91:03:d4:75:8e:a2:ab:ad:82:6a:90:f3:45:7d:46:0e:b4 ------BEGIN CERTIFICATE----- -MIIFbzCCA1egAwIBAgISESCzkFU5fX82bWTCp59rY45nMA0GCSqGSIb3DQEBCwUA -MEAxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9w -ZW5UcnVzdCBSb290IENBIEcxMB4XDTE0MDUyNjA4NDU1MFoXDTM4MDExNTAwMDAw -MFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwU -T3BlblRydXN0IFJvb3QgQ0EgRzEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQD4eUbalsUwXopxAy1wpLuwxQjczeY1wICkES3d5oeuXT2R0odsN7faYp6b -wiTXj/HbpqbfRm9RpnHLPhsxZ2L3EVs0J9V5ToybWL0iEA1cJwzdMOWo010hOHQX -/uMftk87ay3bfWAfjH1MBcLrARYVmBSO0ZB3Ij/swjm4eTrwSSTilZHcYTSSjFR0 -77F9jAHiOH3BX2pfJLKOYheteSCtqx234LSWSE9mQxAGFiQD4eCcjsZGT44ameGP -uY4zbGneWK2gDqdkVBFpRGZPTBKnjix9xNRbxQA0MMHZmf4yzgeEtE7NCv82TWLx -p2NX5Ntqp66/K7nJ5rInieV+mhxNaMbBGN4zK1FGSxyO9z0M+Yo0FMT7MzUj8czx -Kselu7Cizv5Ta01BG2Yospb6p64KTrk5M0ScdMGTHPjgniQlQ/GbI4Kq3ywgsNw2 -TgOzfALU5nsaqocTvz6hdLubDuHAk5/XpGbKuxs74zD0M1mKB3IDVedzagMxbm+W -G+Oin6+Sx+31QrclTDsTBM8clq8cIqPQqwWyTBIjUtz9GVsnnB47ev1CI9sjgBPw -vFEVVJSmdz7QdFG9URQIOTfLHzSpMJ1ShC5VkLG631UAC9hWLbFJSXKAqWLXwPYY -EQRVzXR7z2FwefR7LFxckvzluFqrTJOVoSfupb7PcSNCupt2LQIDAQABo2MwYTAO -BgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUl0YhVyE1 -2jZVx/PxN3DlCPaTKbYwHwYDVR0jBBgwFoAUl0YhVyE12jZVx/PxN3DlCPaTKbYw -DQYJKoZIhvcNAQELBQADggIBAB3dAmB84DWn5ph76kTOZ0BP8pNuZtQ5iSas000E -PLuHIT839HEl2ku6q5aCgZG27dmxpGWX4m9kWaSW7mDKHyP7Rbr/jyTwyqkxf3kf -gLMtMrpkZ2CvuVnN35pJ06iCsfmYlIrM4LvgBBuZYLFGZdwIorJGnkSI6pN+VxbS -FXJfLkur1J1juONI5f6ELlgKn0Md/rcYkoZDSw6cMoYsYPXpSOqV7XAp8dUv/TW0 -V8/bhUiZucJvbI/NeJWsZCj9VrDDb8O+WVLhX4SPgPL0DTatdrOjteFkdjpY3H1P -XlZs5VVZV6Xf8YpmMIzUUmI4d7S+KNfKNsSbBfD4Fdvb8e80nR14SohWZ25g/4/I -i+GOvUKpMwpZQhISKvqxnUOOBZuZ2mKtVzazHbYNeS2WuOvyDEsMpZTGMKcmGS3t -TAZQMPH9WD25SxdfGbRqhFS0OE85og2WaMMolP3tLR9Ka0OWLpABEPs4poEL0L91 -09S5zvE/bw4cHjdx5RiHdRk/ULlepEU0rbDK5uUTdg8xFKmOLZTW1YVNcxVPS/Ky -Pu1svf0OnWZzsD2097+o4BGkxK51CUpjAEggpsadCwmKtODmzj7HPiY46SvepghJ -AwSQiumPv+i2tCqjI40cHLI5kqiPAlxAOXXUc0ECd97N4EOH1uS6SsNsEn/+KuYj -1oxx ------END CERTIFICATE----- - -# Issuer: CN=OpenTrust Root CA G2 O=OpenTrust -# Subject: CN=OpenTrust Root CA G2 O=OpenTrust -# Label: "OpenTrust Root CA G2" -# Serial: 1492012448042702096986875987676935573415441 -# MD5 Fingerprint: 57:24:b6:59:24:6b:ae:c8:fe:1c:0c:20:f2:c0:4e:eb -# SHA1 Fingerprint: 79:5f:88:60:c5:ab:7c:3d:92:e6:cb:f4:8d:e1:45:cd:11:ef:60:0b -# SHA256 Fingerprint: 27:99:58:29:fe:6a:75:15:c1:bf:e8:48:f9:c4:76:1d:b1:6c:22:59:29:25:7b:f4:0d:08:94:f2:9e:a8:ba:f2 ------BEGIN CERTIFICATE----- -MIIFbzCCA1egAwIBAgISESChaRu/vbm9UpaPI+hIvyYRMA0GCSqGSIb3DQEBDQUA -MEAxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9w -ZW5UcnVzdCBSb290IENBIEcyMB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAw -MFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwU -T3BlblRydXN0IFJvb3QgQ0EgRzIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQDMtlelM5QQgTJT32F+D3Y5z1zCU3UdSXqWON2ic2rxb95eolq5cSG+Ntmh -/LzubKh8NBpxGuga2F8ORAbtp+Dz0mEL4DKiltE48MLaARf85KxP6O6JHnSrT78e -CbY2albz4e6WiWYkBuTNQjpK3eCasMSCRbP+yatcfD7J6xcvDH1urqWPyKwlCm/6 -1UWY0jUJ9gNDlP7ZvyCVeYCYitmJNbtRG6Q3ffyZO6v/v6wNj0OxmXsWEH4db0fE -FY8ElggGQgT4hNYdvJGmQr5J1WqIP7wtUdGejeBSzFfdNTVY27SPJIjki9/ca1TS -gSuyzpJLHB9G+h3Ykst2Z7UJmQnlrBcUVXDGPKBWCgOz3GIZ38i1MH/1PCZ1Eb3X -G7OHngevZXHloM8apwkQHZOJZlvoPGIytbU6bumFAYueQ4xncyhZW+vj3CzMpSZy -YhK05pyDRPZRpOLAeiRXyg6lPzq1O4vldu5w5pLeFlwoW5cZJ5L+epJUzpM5ChaH -vGOz9bGTXOBut9Dq+WIyiET7vycotjCVXRIouZW+j1MY5aIYFuJWpLIsEPUdN6b4 -t/bQWVyJ98LVtZR00dX+G7bw5tYee9I8y6jj9RjzIR9u701oBnstXW5DiabA+aC/ -gh7PU3+06yzbXfZqfUAkBXKJOAGTy3HCOV0GEfZvePg3DTmEJwIDAQABo2MwYTAO -BgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUajn6QiL3 -5okATV59M4PLuG53hq8wHwYDVR0jBBgwFoAUajn6QiL35okATV59M4PLuG53hq8w -DQYJKoZIhvcNAQENBQADggIBAJjLq0A85TMCl38th6aP1F5Kr7ge57tx+4BkJamz -Gj5oXScmp7oq4fBXgwpkTx4idBvpkF/wrM//T2h6OKQQbA2xx6R3gBi2oihEdqc0 -nXGEL8pZ0keImUEiyTCYYW49qKgFbdEfwFFEVn8nNQLdXpgKQuswv42hm1GqO+qT -RmTFAHneIWv2V6CG1wZy7HBGS4tz3aAhdT7cHcCP009zHIXZ/n9iyJVvttN7jLpT -wm+bREx50B1ws9efAvSyB7DH5fitIw6mVskpEndI2S9G/Tvw/HRwkqWOOAgfZDC2 -t0v7NqwQjqBSM2OdAzVWxWm9xiNaJ5T2pBL4LTM8oValX9YZ6e18CL13zSdkzJTa -TkZQh+D5wVOAHrut+0dSixv9ovneDiK3PTNZbNTe9ZUGMg1RGUFcPk8G97krgCf2 -o6p6fAbhQ8MTOWIaNr3gKC6UAuQpLmBVrkA9sHSSXvAgZJY/X0VdiLWK2gKgW0VU -3jg9CcCoSmVGFvyqv1ROTVu+OEO3KMqLM6oaJbolXCkvW0pujOotnCr2BXbgd5eA -iN1nE28daCSLT7d0geX0YJ96Vdc+N9oWaz53rK4YcJUIeSkDiv7BO7M/Gg+kO14f -WKGVyasvc0rQLW6aWQ9VGHgtPFGml4vmu7JwqkwR3v98KzfUetF3NI/n+UL3PIEM -S1IK ------END CERTIFICATE----- - -# Issuer: CN=OpenTrust Root CA G3 O=OpenTrust -# Subject: CN=OpenTrust Root CA G3 O=OpenTrust -# Label: "OpenTrust Root CA G3" -# Serial: 1492104908271485653071219941864171170455615 -# MD5 Fingerprint: 21:37:b4:17:16:92:7b:67:46:70:a9:96:d7:a8:13:24 -# SHA1 Fingerprint: 6e:26:64:f3:56:bf:34:55:bf:d1:93:3f:7c:01:de:d8:13:da:8a:a6 -# SHA256 Fingerprint: b7:c3:62:31:70:6e:81:07:8c:36:7c:b8:96:19:8f:1e:32:08:dd:92:69:49:dd:8f:57:09:a4:10:f7:5b:62:92 ------BEGIN CERTIFICATE----- -MIICITCCAaagAwIBAgISESDm+Ez8JLC+BUCs2oMbNGA/MAoGCCqGSM49BAMDMEAx -CzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9wZW5U -cnVzdCBSb290IENBIEczMB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAwMFow -QDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwUT3Bl -blRydXN0IFJvb3QgQ0EgRzMwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARK7liuTcpm -3gY6oxH84Bjwbhy6LTAMidnW7ptzg6kjFYwvWYpa3RTqnVkrQ7cG7DK2uu5Bta1d -oYXM6h0UZqNnfkbilPPntlahFVmhTzeXuSIevRHr9LIfXsMUmuXZl5mjYzBhMA4G -A1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRHd8MUi2I5 -DMlv4VBN0BBY3JWIbTAfBgNVHSMEGDAWgBRHd8MUi2I5DMlv4VBN0BBY3JWIbTAK -BggqhkjOPQQDAwNpADBmAjEAj6jcnboMBBf6Fek9LykBl7+BFjNAk2z8+e2AcG+q -j9uEwov1NcoG3GRvaBbhj5G5AjEA2Euly8LQCGzpGPta3U1fJAuwACEl74+nBCZx -4nxp5V2a+EEfOzmTk51V6s2N8fvB ------END CERTIFICATE----- - # Issuer: CN=ISRG Root X1 O=Internet Security Research Group # Subject: CN=ISRG Root X1 O=Internet Security Research Group # Label: "ISRG Root X1" @@ -4398,3 +4235,43 @@ MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== -----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Label: "GlobalSign Root CA - R6" +# Serial: 1417766617973444989252670301619537 +# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae +# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 +# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg +MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx +MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET +MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI +xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k +ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD +aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw +LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw +1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX +k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 +SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h +bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n +WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY +rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce +MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu +bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN +nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt +Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 +55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj +vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf +cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz +oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp +nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs +pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v +JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R +8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 +5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= +-----END CERTIFICATE----- From c29ba07c57bb0dae0c48eb81fb3be255a21da1b3 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 25 Aug 2018 13:21:01 -0400 Subject: [PATCH 14/26] Swap to pypi version of pipdeptree Signed-off-by: Dan Ryan --- pipenv/vendor/pipdeptree.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pipenv/vendor/pipdeptree.py b/pipenv/vendor/pipdeptree.py index a2ea83fddc..7820aa5db4 100644 --- a/pipenv/vendor/pipdeptree.py +++ b/pipenv/vendor/pipdeptree.py @@ -24,7 +24,7 @@ # from graphviz import backend, Digraph -__version__ = '0.12.1' +__version__ = '0.13.0' flatten = chain.from_iterable From d5fe6a94556803d188389e48c1fef1d2a6640d05 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 25 Aug 2018 13:23:09 -0400 Subject: [PATCH 15/26] Update ptyprocess (better error handling) Signed-off-by: Dan Ryan --- pipenv/vendor/ptyprocess/__init__.py | 2 +- pipenv/vendor/ptyprocess/_fork_pty.py | 5 +++-- pipenv/vendor/ptyprocess/ptyprocess.py | 29 ++++++++++++++++---------- pipenv/vendor/ptyprocess/util.py | 6 +++++- 4 files changed, 27 insertions(+), 15 deletions(-) diff --git a/pipenv/vendor/ptyprocess/__init__.py b/pipenv/vendor/ptyprocess/__init__.py index fff62f3979..e633d0cdda 100644 --- a/pipenv/vendor/ptyprocess/__init__.py +++ b/pipenv/vendor/ptyprocess/__init__.py @@ -1,4 +1,4 @@ """Run a subprocess in a pseudo terminal""" from .ptyprocess import PtyProcess, PtyProcessUnicode, PtyProcessError -__version__ = '0.5.2' +__version__ = '0.6.0' diff --git a/pipenv/vendor/ptyprocess/_fork_pty.py b/pipenv/vendor/ptyprocess/_fork_pty.py index d00eb65e2f..a8d05fe5a3 100644 --- a/pipenv/vendor/ptyprocess/_fork_pty.py +++ b/pipenv/vendor/ptyprocess/_fork_pty.py @@ -4,6 +4,7 @@ import errno from pty import (STDIN_FILENO, STDOUT_FILENO, STDERR_FILENO, CHILD) +from .util import PtyProcessError def fork_pty(): '''This implements a substitute for the forkpty system call. This @@ -63,7 +64,7 @@ def pty_make_controlling_tty(tty_fd): try: fd = os.open("/dev/tty", os.O_RDWR | os.O_NOCTTY) os.close(fd) - raise ExceptionPexpect("OSError of errno.ENXIO should be raised.") + raise PtyProcessError("OSError of errno.ENXIO should be raised.") except OSError as err: if err.errno != errno.ENXIO: raise @@ -74,4 +75,4 @@ def pty_make_controlling_tty(tty_fd): # Verify we now have a controlling tty. fd = os.open("/dev/tty", os.O_WRONLY) - os.close(fd) \ No newline at end of file + os.close(fd) diff --git a/pipenv/vendor/ptyprocess/ptyprocess.py b/pipenv/vendor/ptyprocess/ptyprocess.py index cb3efae741..29b4e43b56 100644 --- a/pipenv/vendor/ptyprocess/ptyprocess.py +++ b/pipenv/vendor/ptyprocess/ptyprocess.py @@ -19,7 +19,7 @@ # Constants from pty import (STDIN_FILENO, CHILD) -from .util import which +from .util import which, PtyProcessError _platform = sys.platform.lower() @@ -60,11 +60,18 @@ def _make_eof_intr(): # inherit EOF and INTR definitions from controlling process. try: from termios import VEOF, VINTR - try: - fd = sys.__stdin__.fileno() - except ValueError: - # ValueError: I/O operation on closed file - fd = sys.__stdout__.fileno() + fd = None + for name in 'stdin', 'stdout': + stream = getattr(sys, '__%s__' % name, None) + if stream is None or not hasattr(stream, 'fileno'): + continue + try: + fd = stream.fileno() + except ValueError: + continue + if fd is None: + # no fd, raise ValueError to fallback on CEOF, CINTR + raise ValueError("No stream has a fileno") intr = ord(termios.tcgetattr(fd)[6][VINTR]) eof = ord(termios.tcgetattr(fd)[6][VEOF]) except (ImportError, OSError, IOError, ValueError, termios.error): @@ -81,14 +88,11 @@ def _make_eof_intr(): _INTR = _byte(intr) _EOF = _byte(eof) -class PtyProcessError(Exception): - """Generic error class for this package.""" - # setecho and setwinsize are pulled out here because on some platforms, we need # to do this from the child before we exec() def _setecho(fd, state): - errmsg = 'setecho() may not be called on this platform' + errmsg = 'setecho() may not be called on this platform (it may still be possible to enable/disable echo when spawning the child process)' try: attr = termios.tcgetattr(fd) @@ -251,7 +255,10 @@ def spawn( # Do not allow child to inherit open file descriptors from parent, # with the exception of the exec_err_pipe_write of the pipe - max_fd = resource.getrlimit(resource.RLIMIT_NOFILE)[0] + # Impose ceiling on max_fd: AIX bugfix for users with unlimited + # nofiles where resource.RLIMIT_NOFILE is 2^63-1 and os.closerange() + # occasionally raises out of range error + max_fd = min(1048576, resource.getrlimit(resource.RLIMIT_NOFILE)[0]) os.closerange(3, exec_err_pipe_write) os.closerange(exec_err_pipe_write+1, max_fd) diff --git a/pipenv/vendor/ptyprocess/util.py b/pipenv/vendor/ptyprocess/util.py index d7fb7b27ab..aadbd62c80 100644 --- a/pipenv/vendor/ptyprocess/util.py +++ b/pipenv/vendor/ptyprocess/util.py @@ -64,4 +64,8 @@ def _access_check(fn, mode): name = os.path.join(dir, thefile) if _access_check(name, mode): return name - return None \ No newline at end of file + return None + + +class PtyProcessError(Exception): + """Generic error class for this package.""" From a51e479a455c5727c30eb19efd65a6ac0582b5ac Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 25 Aug 2018 13:23:38 -0400 Subject: [PATCH 16/26] Update vendored requirements.txt Signed-off-by: Dan Ryan --- pipenv/vendor/vendor.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index 582ec3aa0d..170ccec86a 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -42,3 +42,4 @@ toml==0.9.4 cached-property==1.4.3 vistir==0.1.4 pip-shims==0.1.2 +ptyprocess==0.6.0 \ No newline at end of file From 124a463d9684618e49a7d4c4606defc9c3ea42ba Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 25 Aug 2018 13:24:19 -0400 Subject: [PATCH 17/26] Update safety Signed-off-by: Dan Ryan --- pipenv/patched/safety/__init__.py | 2 +- pipenv/patched/safety/formatter.py | 5 ++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/pipenv/patched/safety/__init__.py b/pipenv/patched/safety/__init__.py index 5a7f697dae..56b497d286 100644 --- a/pipenv/patched/safety/__init__.py +++ b/pipenv/patched/safety/__init__.py @@ -2,4 +2,4 @@ __author__ = """pyup.io""" __email__ = 'support@pyup.io' -__version__ = '1.8.1' +__version__ = '1.8.4' diff --git a/pipenv/patched/safety/formatter.py b/pipenv/patched/safety/formatter.py index 950e910161..8bc57ec1be 100644 --- a/pipenv/patched/safety/formatter.py +++ b/pipenv/patched/safety/formatter.py @@ -113,7 +113,10 @@ def render(vulns, full, checked_packages, used_db): for chunk in [descr[i:i + 76] for i in range(0, len(descr), 76)]: for line in chunk.splitlines(): - table.append("│ {:76} │".format(line)) + try: + table.append("│ {:76} │".format(line.encode('utf-8'))) + except TypeError: + table.append("│ {:76} │".format(line)) # append the REPORT_SECTION only if this isn't the last entry if n + 1 < len(vulns): table.append(SheetReport.REPORT_SECTION) From cec086ee928783c819b72ea1749b03e69777cddd Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 25 Aug 2018 13:27:49 -0400 Subject: [PATCH 18/26] Update news and vendoring scripts Signed-off-by: Dan Ryan --- news/2639.vendor | 15 ++++++++++++++- pipenv/patched/piptools/utils.py | 2 +- tasks/vendoring/__init__.py | 13 ++++++++++--- 3 files changed, 25 insertions(+), 5 deletions(-) diff --git a/news/2639.vendor b/news/2639.vendor index 777350aacb..e0d5516d93 100644 --- a/news/2639.vendor +++ b/news/2639.vendor @@ -1 +1,14 @@ -Vendored new libraries ``vistir`` and ``pip-shims``. +- Vendored new libraries ``vistir`` and ``pip-shims``. + +- Update vendored libraries: + - ``scandir`` to ``1.9.0`` + - ``click-completion`` to ``0.4.1`` + - ``semver`` to ``2.8.1`` + - ``shellingham`` to ``1.2.4`` + - ``pytoml`` to ``0.1.18`` + - ``certifi`` to ``2018.8.13`` + - ``ptyprocess`` to ``0.6.0`` + - ``requirementslib`` to ``1.1.2`` + - ``pythonfinder`` to ``1.0.2`` + - ``pipdeptree`` to ``0.13.0`` + - ``python-dotenv`` to ``0.9.1`` diff --git a/pipenv/patched/piptools/utils.py b/pipenv/patched/piptools/utils.py index c77fd8ad76..2f389eecaf 100644 --- a/pipenv/patched/piptools/utils.py +++ b/pipenv/patched/piptools/utils.py @@ -341,7 +341,7 @@ def fs_str(string): _fs_encoding = sys.getfilesystemencoding() or sys.getdefaultencoding() -# Borrowed from Pew. +# Borrowed from pew to avoid importing pew which imports psutil # See https://github.com/berdario/pew/blob/master/pew/_utils.py#L82 @contextmanager def temp_environ(): diff --git a/tasks/vendoring/__init__.py b/tasks/vendoring/__init__.py index 4deb58b86b..c09fae7525 100644 --- a/tasks/vendoring/__init__.py +++ b/tasks/vendoring/__init__.py @@ -407,17 +407,24 @@ def rewrite_all_imports(ctx): @invoke.task -def download_licenses(ctx, vendor_dir=None, requirements_file='vendor.txt', package=None): +def download_licenses(ctx, vendor_dir=None, requirements_file='vendor.txt', package=None, only=False): log('Downloading licenses') if not vendor_dir: vendor_dir = _get_vendor_dir(ctx) requirements_file = vendor_dir / requirements_file requirement = "-r {0}".format(requirements_file.as_posix()) if package: - requirement = _ensure_package_in_requirements(ctx, requirements_file, package) + if not only: + # for packages we want to add to the requirements file + requirement = _ensure_package_in_requirements(ctx, requirements_file, package) + else: + # for packages we want to get the license for by themselves + requirement = package tmp_dir = vendor_dir / '__tmp__' + # TODO: Fix this whenever it gets sorted out (see https://github.com/pypa/pip/issues/5739) + ctx.run('pip install flit') ctx.run( - 'pip download --no-binary :all: --no-deps -d {0} {1}'.format( + 'pip download --no-binary :all: --no-build-isolation --no-deps -d {0} {1}'.format( tmp_dir.as_posix(), requirement, ) From 4589727d4621d335c256183e5a14eb3dce38b7ef Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 25 Aug 2018 13:58:03 -0400 Subject: [PATCH 19/26] Vendor modutil Signed-off-by: Dan Ryan --- pipenv/vendor/modutil.LICENSE | 29 +++++++ pipenv/vendor/modutil.py | 145 ++++++++++++++++++++++++++++++++++ pipenv/vendor/vendor.txt | 3 +- setup.py | 1 + 4 files changed, 177 insertions(+), 1 deletion(-) create mode 100644 pipenv/vendor/modutil.LICENSE create mode 100644 pipenv/vendor/modutil.py diff --git a/pipenv/vendor/modutil.LICENSE b/pipenv/vendor/modutil.LICENSE new file mode 100644 index 0000000000..f680f0717a --- /dev/null +++ b/pipenv/vendor/modutil.LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2018, Brett Cannon +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/pipenv/vendor/modutil.py b/pipenv/vendor/modutil.py new file mode 100644 index 0000000000..d68f4851f5 --- /dev/null +++ b/pipenv/vendor/modutil.py @@ -0,0 +1,145 @@ +"""Help for working with modules.""" +__version__ = "2.0.0" + +import importlib +import importlib.machinery +import importlib.util +import types + + +STANDARD_MODULE_ATTRS = frozenset(['__all__', '__builtins__', '__cached__', + '__doc__', '__file__', '__loader__', + '__name__', '__package__', '__spec__', + '__getattr__']) + + +class ModuleAttributeError(AttributeError): + """An AttributeError specifically for modules. + + The module_name and 'attribute' attributes are set to strings representing + the module the attribute was searched on and the missing attribute, + respectively. + + """ + + def __init__(self, module_name, attribute): + self.module_name = module_name + self.attribute = attribute + super().__init__(f"module {module_name!r} has no attribute {attribute!r}") + + + +def lazy_import(module_name, to_import): + """Return the importing module and a callable for lazy importing. + + The module named by module_name represents the module performing the + import to help facilitate resolving relative imports. + + to_import is an iterable of the modules to be potentially imported (absolute + or relative). The `as` form of importing is also supported, + e.g. `pkg.mod as spam`. + + This function returns a tuple of two items. The first is the importer + module for easy reference within itself. The second item is a callable to be + set to `__getattr__`. + """ + module = importlib.import_module(module_name) + import_mapping = {} + for name in to_import: + importing, _, binding = name.partition(' as ') + if not binding: + _, _, binding = importing.rpartition('.') + import_mapping[binding] = importing + + def __getattr__(name): + if name not in import_mapping: + raise ModuleAttributeError(module_name, name) + importing = import_mapping[name] + # imortlib.import_module() implicitly sets submodules on this module as + # appropriate for direct imports. + imported = importlib.import_module(importing, + module.__spec__.parent) + setattr(module, name, imported) + return imported + + return module, __getattr__ + + +def filtered_attrs(module, *, modules=False, private=False, dunder=False, + common=False): + """Return a collection of attributes on 'module'. + + If 'modules' is false then module instances are excluded. If 'private' is + false then attributes starting with, but not ending in, '_' will be + excluded. With 'dunder' set to false then attributes starting and ending + with '_' are left out. The 'common' argument controls whether attributes + found in STANDARD_MODULE_ATTRS are returned. + + """ + attr_names = set() + for name, value in module.__dict__.items(): + if not common and name in STANDARD_MODULE_ATTRS: + continue + if name.startswith('_'): + if name.endswith('_'): + if not dunder: + continue + elif not private: + continue + if not modules and isinstance(value, types.ModuleType): + continue + attr_names.add(name) + return frozenset(attr_names) + + +def calc___all__(module_name, **kwargs): + """Return a sorted list of defined attributes on 'module_name'. + + All values specified in **kwargs are directly passed to filtered_attrs(). + + """ + module = importlib.import_module(module_name) + return sorted(filtered_attrs(module, **kwargs)) + + +def filtered_dir(module_name, *, additions={}, **kwargs): + """Return a callable appropriate for __dir__(). + + All values specified in **kwargs get passed directly to filtered_attrs(). + The 'additions' argument should be an iterable which is added to the final + results. + + """ + module = importlib.import_module(module_name) + + def __dir__(): + attr_names = set(filtered_attrs(module, **kwargs)) + attr_names.update(additions) + return sorted(attr_names) + + return __dir__ + + +def chained___getattr__(module_name, *getattrs): + """Create a callable which calls each __getattr__ in sequence. + + Any raised ModuleAttributeError which matches module_name and the + attribute being searched for will be caught and the search will continue. + All other exceptions will be allowed to propagate. If no callable + successfully returns a value, ModuleAttributeError will be raised. + + """ + def __getattr__(name): + """Call each __getattr__ function in sequence.""" + for getattr_ in getattrs: + try: + return getattr_(name) + except ModuleAttributeError as exc: + if exc.module_name == module_name and exc.attribute == name: + continue + else: + raise + else: + raise ModuleAttributeError(module_name, name) + + return __getattr__ diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index 170ccec86a..1ff20bd4f8 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -42,4 +42,5 @@ toml==0.9.4 cached-property==1.4.3 vistir==0.1.4 pip-shims==0.1.2 -ptyprocess==0.6.0 \ No newline at end of file +ptyprocess==0.6.0 +enum34==1.1.6 diff --git a/setup.py b/setup.py index 7c87a2d6bd..7e121fb7a5 100644 --- a/setup.py +++ b/setup.py @@ -29,6 +29,7 @@ "virtualenv", 'requests[security];python_version<"2.7"', 'ordereddict;python_version<"2.7"', + 'enum34; python_version<"3"' ] From abcdae3f347e3937a5101911abda9710699cf8f2 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 25 Aug 2018 14:11:23 -0400 Subject: [PATCH 20/26] Update vendor.txt and update certifi Signed-off-by: Dan Ryan --- pipenv/vendor/certifi/__init__.py | 2 +- pipenv/vendor/certifi/cacert.pem | 23 +++++++++++++++++++++++ pipenv/vendor/vendor.txt | 7 ++++--- 3 files changed, 28 insertions(+), 4 deletions(-) diff --git a/pipenv/vendor/certifi/__init__.py b/pipenv/vendor/certifi/__init__.py index 7e1f370c96..aa329fbb4b 100644 --- a/pipenv/vendor/certifi/__init__.py +++ b/pipenv/vendor/certifi/__init__.py @@ -1,3 +1,3 @@ from .core import where, old_where -__version__ = "2018.08.13" +__version__ = "2018.08.24" diff --git a/pipenv/vendor/certifi/cacert.pem b/pipenv/vendor/certifi/cacert.pem index 24fe597fcb..85de024e71 100644 --- a/pipenv/vendor/certifi/cacert.pem +++ b/pipenv/vendor/certifi/cacert.pem @@ -4275,3 +4275,26 @@ JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R 8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= -----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GC CA" +# Serial: 44084345621038548146064804565436152554 +# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 +# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 +# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d +-----BEGIN CERTIFICATE----- +MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw +CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 +bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg +Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ +BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu +ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS +b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni +eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W +p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T +rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV +57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg +Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 +-----END CERTIFICATE----- diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index 1ff20bd4f8..2ae450272f 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -26,14 +26,15 @@ requests==2.19.1 chardet==3.0.4 idna==2.7 urllib3==1.23 - certifi==2018.8.13 + certifi==2018.8.24 requirementslib==1.1.2 attrs==18.1.0 distlib==0.2.7 packaging==17.1 pyparsing==2.2.0 pytoml==0.1.18 - requirements-parser==0.2.0 + plette==0.1.1 + tomlkit==0.4.2 shellingham==1.2.4 six==1.11.0 semver==2.8.1 @@ -42,5 +43,5 @@ toml==0.9.4 cached-property==1.4.3 vistir==0.1.4 pip-shims==0.1.2 + modutil==2.0.0 ptyprocess==0.6.0 -enum34==1.1.6 From 93903866f686c3c918695984e25f3b8e370a6383 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 25 Aug 2018 14:11:43 -0400 Subject: [PATCH 21/26] Add tomlkit for managing toml Signed-off-by: Dan Ryan --- pipenv/vendor/tomlkit/LICENSE | 20 + pipenv/vendor/tomlkit/__init__.py | 25 + pipenv/vendor/tomlkit/_compat.py | 177 ++++ pipenv/vendor/tomlkit/_utils.py | 87 ++ pipenv/vendor/tomlkit/api.py | 142 ++++ pipenv/vendor/tomlkit/container.py | 530 ++++++++++++ pipenv/vendor/tomlkit/exceptions.py | 139 +++ pipenv/vendor/tomlkit/items.py | 1002 ++++++++++++++++++++++ pipenv/vendor/tomlkit/parser.py | 1084 ++++++++++++++++++++++++ pipenv/vendor/tomlkit/toml_char.py | 47 + pipenv/vendor/tomlkit/toml_document.py | 7 + pipenv/vendor/tomlkit/toml_file.py | 24 + 12 files changed, 3284 insertions(+) create mode 100644 pipenv/vendor/tomlkit/LICENSE create mode 100644 pipenv/vendor/tomlkit/__init__.py create mode 100644 pipenv/vendor/tomlkit/_compat.py create mode 100644 pipenv/vendor/tomlkit/_utils.py create mode 100644 pipenv/vendor/tomlkit/api.py create mode 100644 pipenv/vendor/tomlkit/container.py create mode 100644 pipenv/vendor/tomlkit/exceptions.py create mode 100644 pipenv/vendor/tomlkit/items.py create mode 100644 pipenv/vendor/tomlkit/parser.py create mode 100644 pipenv/vendor/tomlkit/toml_char.py create mode 100644 pipenv/vendor/tomlkit/toml_document.py create mode 100644 pipenv/vendor/tomlkit/toml_file.py diff --git a/pipenv/vendor/tomlkit/LICENSE b/pipenv/vendor/tomlkit/LICENSE new file mode 100644 index 0000000000..44cf2b30e6 --- /dev/null +++ b/pipenv/vendor/tomlkit/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2018 Sébastien Eustace + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/pipenv/vendor/tomlkit/__init__.py b/pipenv/vendor/tomlkit/__init__.py new file mode 100644 index 0000000000..45de35fdb6 --- /dev/null +++ b/pipenv/vendor/tomlkit/__init__.py @@ -0,0 +1,25 @@ +from .api import aot +from .api import array +from .api import boolean +from .api import comment +from .api import date +from .api import datetime +from .api import document +from .api import dumps +from .api import float_ +from .api import inline_table +from .api import integer +from .api import item +from .api import key +from .api import key_value +from .api import loads +from .api import nl +from .api import parse +from .api import string +from .api import table +from .api import time +from .api import value +from .api import ws + + +__version__ = "0.4.2" diff --git a/pipenv/vendor/tomlkit/_compat.py b/pipenv/vendor/tomlkit/_compat.py new file mode 100644 index 0000000000..26296ff52f --- /dev/null +++ b/pipenv/vendor/tomlkit/_compat.py @@ -0,0 +1,177 @@ +import re +import sys + +try: + from datetime import timezone +except ImportError: + from datetime import datetime + from datetime import timedelta + from datetime import tzinfo + + class timezone(tzinfo): + __slots__ = "_offset", "_name" + + # Sentinel value to disallow None + _Omitted = object() + + def __new__(cls, offset, name=_Omitted): + if not isinstance(offset, timedelta): + raise TypeError("offset must be a timedelta") + if name is cls._Omitted: + if not offset: + return cls.utc + name = None + elif not isinstance(name, str): + raise TypeError("name must be a string") + if not cls._minoffset <= offset <= cls._maxoffset: + raise ValueError( + "offset must be a timedelta " + "strictly between -timedelta(hours=24) and " + "timedelta(hours=24)." + ) + return cls._create(offset, name) + + @classmethod + def _create(cls, offset, name=None): + self = tzinfo.__new__(cls) + self._offset = offset + self._name = name + return self + + def __getinitargs__(self): + """pickle support""" + if self._name is None: + return (self._offset,) + return (self._offset, self._name) + + def __eq__(self, other): + if type(other) != timezone: + return False + return self._offset == other._offset + + def __hash__(self): + return hash(self._offset) + + def __repr__(self): + """Convert to formal string, for repr(). + + >>> tz = timezone.utc + >>> repr(tz) + 'datetime.timezone.utc' + >>> tz = timezone(timedelta(hours=-5), 'EST') + >>> repr(tz) + "datetime.timezone(datetime.timedelta(-1, 68400), 'EST')" + """ + if self is self.utc: + return "datetime.timezone.utc" + if self._name is None: + return "%s.%s(%r)" % ( + self.__class__.__module__, + self.__class__.__qualname__, + self._offset, + ) + return "%s.%s(%r, %r)" % ( + self.__class__.__module__, + self.__class__.__qualname__, + self._offset, + self._name, + ) + + def __str__(self): + return self.tzname(None) + + def utcoffset(self, dt): + if isinstance(dt, datetime) or dt is None: + return self._offset + raise TypeError( + "utcoffset() argument must be a datetime instance" " or None" + ) + + def tzname(self, dt): + if isinstance(dt, datetime) or dt is None: + if self._name is None: + return self._name_from_offset(self._offset) + return self._name + raise TypeError("tzname() argument must be a datetime instance" " or None") + + def dst(self, dt): + if isinstance(dt, datetime) or dt is None: + return None + raise TypeError("dst() argument must be a datetime instance" " or None") + + def fromutc(self, dt): + if isinstance(dt, datetime): + if dt.tzinfo is not self: + raise ValueError("fromutc: dt.tzinfo " "is not self") + return dt + self._offset + raise TypeError("fromutc() argument must be a datetime instance" " or None") + + _maxoffset = timedelta(hours=23, minutes=59) + _minoffset = -_maxoffset + + @staticmethod + def _name_from_offset(delta): + if not delta: + return "UTC" + if delta < timedelta(0): + sign = "-" + delta = -delta + else: + sign = "+" + hours, rest = divmod(delta, timedelta(hours=1)) + minutes, rest = divmod(rest, timedelta(minutes=1)) + seconds = rest.seconds + microseconds = rest.microseconds + if microseconds: + return ("UTC{}{:02d}:{:02d}:{:02d}.{:06d}").format( + sign, hours, minutes, seconds, microseconds + ) + if seconds: + return "UTC{}{:02d}:{:02d}:{:02d}".format(sign, hours, minutes, seconds) + return "UTC{}{:02d}:{:02d}".format(sign, hours, minutes) + + timezone.utc = timezone._create(timedelta(0)) + timezone.min = timezone._create(timezone._minoffset) + timezone.max = timezone._create(timezone._maxoffset) + + +PY2 = sys.version_info[0] == 2 +PY36 = sys.version_info >= (3, 6) + +if PY2: + unicode = unicode + chr = unichr +else: + unicode = str + chr = chr + + +def decode(string, encodings=None): + if not PY2 and not isinstance(string, bytes): + return string + + if PY2 and isinstance(string, unicode): + return string + + encodings = encodings or ["utf-8", "latin1", "ascii"] + + for encoding in encodings: + try: + return string.decode(encoding) + except (UnicodeEncodeError, UnicodeDecodeError): + pass + + return string.decode(encodings[0], errors="ignore") + + +_escaped = {"b": "\b", "t": "\t", "n": "\n", "f": "\f", "r": "\r", '"': '"', "\\": "\\"} +_escapable = re.compile(r"(? Union[datetime, date, time] + m = RFC_3339_DATETIME.match(string) + if m: + year = int(m.group(1)) + month = int(m.group(2)) + day = int(m.group(3)) + hour = int(m.group(4)) + minute = int(m.group(5)) + second = int(m.group(6)) + microsecond = 0 + + if m.group(7): + microsecond = int(("{:<06s}".format(m.group(8)))[:6]) + + dt = datetime(year, month, day, hour, minute, second, microsecond) + + if m.group(9): + # Timezone + tz = m.group(9) + if tz == "Z": + tzinfo = _utc + else: + sign = m.group(11)[0] + hour_offset, minute_offset = int(m.group(12)), int(m.group(13)) + offset = timedelta(seconds=hour_offset * 3600 + minute_offset * 60) + if sign == "-": + offset = -offset + + tzinfo = timezone( + offset, "{}{}:{}".format(sign, m.group(12), m.group(13)) + ) + + return datetime( + year, month, day, hour, minute, second, microsecond, tzinfo=tzinfo + ) + else: + return datetime(year, month, day, hour, minute, second, microsecond) + + m = RFC_3339_DATE.match(string) + if m: + year = int(m.group(1)) + month = int(m.group(2)) + day = int(m.group(3)) + + return date(year, month, day) + + m = RFC_3339_TIME.match(string) + if m: + hour = int(m.group(1)) + minute = int(m.group(2)) + second = int(m.group(3)) + microsecond = 0 + + if m.group(4): + microsecond = int(("{:<06s}".format(m.group(5)))[:6]) + + return time(hour, minute, second, microsecond) + + raise ValueError("Invalid RFC 339 string") diff --git a/pipenv/vendor/tomlkit/api.py b/pipenv/vendor/tomlkit/api.py new file mode 100644 index 0000000000..e541c20c17 --- /dev/null +++ b/pipenv/vendor/tomlkit/api.py @@ -0,0 +1,142 @@ +import datetime as _datetime + +from typing import Tuple + +from ._utils import parse_rfc3339 +from .container import Container +from .items import AoT +from .items import Comment +from .items import InlineTable +from .items import Item as _Item +from .items import Array +from .items import Bool +from .items import Key +from .items import Date +from .items import DateTime +from .items import Float +from .items import Table +from .items import Integer +from .items import Trivia +from .items import Whitespace +from .items import String +from .items import item +from .parser import Parser +from .toml_document import TOMLDocument as _TOMLDocument +from .items import Time + + +def loads(string): # type: (str) -> _TOMLDocument + """ + Parses a string into a TOMLDocument. + + Alias for parse(). + """ + return parse(string) + + +def dumps(data): # type: (_TOMLDocument) -> str + """ + Dumps a TOMLDocument into a string. + """ + if not isinstance(data, _TOMLDocument) and isinstance(data, dict): + data = item(data) + + return data.as_string() + + +def parse(string): # type: (str) -> _TOMLDocument + """ + Parses a string into a TOMLDocument. + """ + return Parser(string).parse() + + +def document(): # type: () -> _TOMLDocument + """ + Returns a new TOMLDocument instance. + """ + return _TOMLDocument() + + +# Items +def integer(raw): # type: (str) -> Integer + return item(int(raw)) + + +def float_(raw): # type: (str) -> Float + return item(float(raw)) + + +def boolean(raw): # type: (str) -> Bool + return item(raw == "true") + + +def string(raw): # type: (str) -> String + return item(raw) + + +def date(raw): # type: (str) -> Date + value = parse_rfc3339(raw) + if not isinstance(value, _datetime.date): + raise ValueError("date() only accepts date strings.") + + return item(value) + + +def time(raw): # type: (str) -> Time + value = parse_rfc3339(raw) + if not isinstance(value, _datetime.time): + raise ValueError("time() only accepts time strings.") + + return item(value) + + +def datetime(raw): # type: (str) -> DateTime + value = parse_rfc3339(raw) + if not isinstance(value, _datetime.datetime): + raise ValueError("datetime() only accepts datetime strings.") + + return item(value) + + +def array(raw=None): # type: (str) -> Array + if raw is None: + raw = "[]" + + return value(raw) + + +def table(): # type: () -> Table + return Table(Container(), Trivia(), False) + + +def inline_table(): # type: () -> InlineTable + return InlineTable(Container(), Trivia()) + + +def aot(): # type: () -> AoT + return AoT([]) + + +def key(k): # type: (str) -> Key + return Key(k) + + +def value(raw): # type: (str) -> _Item + return Parser(raw)._parse_value() + + +def key_value(src): # type: (str) -> Tuple[Key, _Item] + return Parser(src)._parse_key_value() + + +def ws(src): # type: (str) -> Whitespace + return Whitespace(src, fixed=True) + + +def nl(): # type: () -> Whitespace + return ws("\n") + + +def comment(string): # type: (str) -> Comment + return Comment(Trivia(comment_ws=" ", comment="# " + string)) diff --git a/pipenv/vendor/tomlkit/container.py b/pipenv/vendor/tomlkit/container.py new file mode 100644 index 0000000000..c1d2d7c67e --- /dev/null +++ b/pipenv/vendor/tomlkit/container.py @@ -0,0 +1,530 @@ +from __future__ import unicode_literals + +from typing import Any +from typing import Dict +from typing import Generator +from typing import List +from typing import Optional +from typing import Tuple +from typing import Union + +from ._compat import decode +from .exceptions import KeyAlreadyPresent +from .exceptions import NonExistentKey +from .items import AoT +from .items import Bool +from .items import Comment +from .items import Item +from .items import Key +from .items import Null +from .items import Table +from .items import Whitespace +from .items import item as _item + + +class Container(dict): + """ + A container for items within a TOMLDocument. + """ + + def __init__(self, parsed=False): # type: (bool) -> None + self._map = {} # type: Dict[Key, int] + self._body = [] # type: List[Tuple[Optional[Key], Item]] + self._parsed = parsed + + @property + def body(self): # type: () -> List[Tuple[Optional[Key], Item]] + return self._body + + @property + def value(self): # type: () -> Dict[Any, Any] + d = {} + for k, v in self._body: + if k is None: + continue + + k = k.key + v = v.value + + if isinstance(v, Container): + v = v.value + + if k in d: + d[k].update(v) + else: + d[k] = v + + return d + + def parsing(self, parsing): # type: (bool) -> None + self._parsed = parsing + + for k, v in self._body: + if isinstance(v, Table): + v.value.parsing(parsing) + elif isinstance(v, AoT): + for t in v.body: + t.value.parsing(parsing) + + def add( + self, key, item=None + ): # type: (Union[Key, Item, str], Optional[Item]) -> Container + """ + Adds an item to the current Container. + """ + if item is None: + if not isinstance(key, (Comment, Whitespace)): + raise ValueError( + "Non comment/whitespace items must have an associated key" + ) + + key, item = None, key + + return self.append(key, item) + + def append(self, key, item): # type: (Union[Key, str], Item) -> Container + if not isinstance(key, Key) and key is not None: + key = Key(key) + + if not isinstance(item, Item): + item = _item(item) + + if isinstance(item, (AoT, Table)) and item.name is None: + item.name = key.key + + if ( + isinstance(item, Table) + and self._body + and not self._parsed + and not item.trivia.indent + ): + item.trivia.indent = "\n" + + if isinstance(item, AoT) and self._body and not self._parsed: + if item and "\n" not in item[0].trivia.indent: + item[0].trivia.indent = "\n" + item[0].trivia.indent + else: + self.append(None, Whitespace("\n")) + + if key is not None and key in self: + current = self._body[self._map[key]][1] + if isinstance(item, Table): + if not isinstance(current, (Table, AoT)): + raise KeyAlreadyPresent(key) + + if item.is_aot_element(): + # New AoT element found later on + # Adding it to the current AoT + if not isinstance(current, AoT): + current = AoT([current, item], parsed=self._parsed) + + self._replace(key, key, current) + else: + current.append(item) + + return self + elif current.is_super_table(): + if item.is_super_table(): + for k, v in item.value.body: + current.append(k, v) + + return self + else: + raise KeyAlreadyPresent(key) + elif isinstance(item, AoT): + if not isinstance(current, AoT): + raise KeyAlreadyPresent(key) + + for table in item.body: + current.append(table) + + return self + else: + raise KeyAlreadyPresent(key) + + is_table = isinstance(item, (Table, AoT)) + if key is not None and self._body and not self._parsed: + # If there is already at least one table in the current container + # an the given item is not a table, we need to find the last + # item that is not a table and insert after it + # If not such item exists, insert at the top of the table + key_after = None + idx = 0 + for k, v in self._body: + if isinstance(v, Null): + # This happens only after deletion + continue + + if isinstance(v, Whitespace) and not v.is_fixed(): + continue + + if not is_table and isinstance(v, (Table, AoT)): + break + + key_after = k or idx + idx += 1 + + if key_after is not None: + if isinstance(key_after, int): + if key_after + 1 < len(self._body) - 1: + return self._insert_at(key_after + 1, key, item) + else: + previous_item = self._body[-1][1] + if ( + not isinstance(previous_item, Whitespace) + and not is_table + and "\n" not in previous_item.trivia.trail + ): + previous_item.trivia.trail += "\n" + else: + return self._insert_after(key_after, key, item) + else: + return self._insert_at(0, key, item) + + self._map[key] = len(self._body) + + self._body.append((key, item)) + + if key is not None: + super(Container, self).__setitem__(key.key, item.value) + + return self + + def remove(self, key): # type: (Union[Key, str]) -> Container + if not isinstance(key, Key): + key = Key(key) + + idx = self._map.pop(key, None) + if idx is None: + raise NonExistentKey(key) + + self._body[idx] = (None, Null()) + + super(Container, self).__delitem__(key.key) + + return self + + def _insert_after( + self, key, other_key, item + ): # type: (Union[str, Key], Union[str, Key], Union[Item, Any]) -> Container + if key is None: + raise ValueError("Key cannot be null in insert_after()") + + if key not in self: + raise NonExistentKey(key) + + if not isinstance(key, Key): + key = Key(key) + + if not isinstance(other_key, Key): + other_key = Key(other_key) + + item = _item(item) + + idx = self._map[key] + current_item = self._body[idx][1] + if "\n" not in current_item.trivia.trail: + current_item.trivia.trail += "\n" + + # Increment indices after the current index + for k, v in self._map.items(): + if v > idx: + self._map[k] = v + 1 + + self._map[other_key] = idx + 1 + self._body.insert(idx + 1, (other_key, item)) + + if key is not None: + super(Container, self).__setitem__(other_key.key, item.value) + + return self + + def _insert_at( + self, idx, key, item + ): # type: (int, Union[str, Key], Union[Item, Any]) -> Container + if idx > len(self._body) - 1: + raise ValueError("Unable to insert at position {}".format(idx)) + + if not isinstance(key, Key): + key = Key(key) + + item = _item(item) + + if idx > 0: + previous_item = self._body[idx - 1][1] + if ( + not isinstance(previous_item, Whitespace) + and not isinstance(item, (AoT, Table)) + and "\n" not in previous_item.trivia.trail + ): + previous_item.trivia.trail += "\n" + + # Increment indices after the current index + for k, v in self._map.items(): + if v >= idx: + self._map[k] = v + 1 + + self._map[key] = idx + self._body.insert(idx, (key, item)) + + if key is not None: + super(Container, self).__setitem__(key.key, item.value) + + return self + + def item(self, key): # type: (Union[Key, str]) -> Item + if not isinstance(key, Key): + key = Key(key) + + idx = self._map.get(key, None) + if idx is None: + raise NonExistentKey(key) + + return self._body[idx][1] + + def last_item(self): # type: () -> Optional[Item] + if self._body: + return self._body[-1][1] + + def as_string(self, prefix=None): # type: () -> str + s = "" + for k, v in self._body: + if k is not None: + if False: + key = k.as_string() + + for _k, _v in v.value.body: + if _k is None: + s += v.as_string() + elif isinstance(_v, Table): + s += v.as_string(prefix=key) + else: + _key = key + if prefix is not None: + _key = prefix + "." + _key + + s += "{}{}{}{}{}{}{}".format( + _v.trivia.indent, + _key + "." + decode(_k.as_string()), + _k.sep, + decode(_v.as_string()), + _v.trivia.comment_ws, + decode(_v.trivia.comment), + _v.trivia.trail, + ) + elif isinstance(v, Table): + s += self._render_table(k, v) + elif isinstance(v, AoT): + s += self._render_aot(k, v) + else: + s += self._render_simple_item(k, v) + else: + s += self._render_simple_item(k, v) + + return s + + def _render_table( + self, key, table, prefix=None + ): # (Key, Table, Optional[str]) -> str + cur = "" + + if table.display_name is not None: + _key = table.display_name + else: + _key = key.as_string() + + if prefix is not None: + _key = prefix + "." + _key + + if not table.is_super_table(): + open_, close = "[", "]" + if table.is_aot_element(): + open_, close = "[[", "]]" + + cur += "{}{}{}{}{}{}{}".format( + table.trivia.indent, + open_, + decode(_key), + close, + table.trivia.comment_ws, + decode(table.trivia.comment), + table.trivia.trail, + ) + + for k, v in table.value.body: + if isinstance(v, Table): + if v.is_super_table(): + if k.is_dotted() and not key.is_dotted(): + # Dotted key inside table + cur += self._render_table(k, v) + else: + cur += self._render_table(k, v, prefix=_key) + else: + cur += self._render_table(k, v, prefix=_key) + elif isinstance(v, AoT): + cur += self._render_aot(k, v, prefix=_key) + else: + cur += self._render_simple_item( + k, v, prefix=_key if key.is_dotted() else None + ) + + return cur + + def _render_aot(self, key, aot, prefix=None): + _key = key.as_string() + if prefix is not None: + _key = prefix + "." + _key + + cur = "" + _key = decode(_key) + for table in aot.body: + cur += self._render_aot_table(table, prefix=_key) + + return cur + + def _render_aot_table(self, table, prefix=None): # (Table, Optional[str]) -> str + cur = "" + + _key = prefix or "" + + if not table.is_super_table(): + open_, close = "[[", "]]" + + cur += "{}{}{}{}{}{}{}".format( + table.trivia.indent, + open_, + decode(_key), + close, + table.trivia.comment_ws, + decode(table.trivia.comment), + table.trivia.trail, + ) + + for k, v in table.value.body: + if isinstance(v, Table): + if v.is_super_table(): + if k.is_dotted(): + # Dotted key inside table + cur += self._render_table(k, v) + else: + cur += self._render_table(k, v, prefix=_key) + else: + cur += self._render_table(k, v, prefix=_key) + elif isinstance(v, AoT): + cur += self._render_aot(k, v, prefix=_key) + else: + cur += self._render_simple_item(k, v) + + return cur + + def _render_simple_item(self, key, item, prefix=None): + if key is None: + return item.as_string() + + _key = key.as_string() + if prefix is not None: + _key = prefix + "." + _key + + return "{}{}{}{}{}{}{}".format( + item.trivia.indent, + decode(_key), + key.sep, + decode(item.as_string()), + item.trivia.comment_ws, + decode(item.trivia.comment), + item.trivia.trail, + ) + + # Dictionary methods + + def keys(self): # type: () -> Generator[str] + for k, _ in self._body: + if k is None: + continue + + yield k.key + + def values(self): # type: () -> Generator[Item] + for k, v in self._body: + if k is None: + continue + + yield v.value + + def items(self): # type: () -> Generator[Item] + for k, v in self.value.items(): + if k is None: + continue + + yield k, v + + def __contains__(self, key): # type: (Union[Key, str]) -> bool + if not isinstance(key, Key): + key = Key(key) + + return key in self._map + + def __getitem__(self, key): # type: (Union[Key, str]) -> Item + if not isinstance(key, Key): + key = Key(key) + + idx = self._map.get(key, None) + if idx is None: + raise NonExistentKey(key) + + item = self._body[idx][1] + + return item.value + + def __setitem__(self, key, value): # type: (Union[Key, str], Any) -> None + if key is not None and key in self: + self._replace(key, key, value) + else: + self.append(key, value) + + def __delitem__(self, key): # type: (Union[Key, str]) -> None + self.remove(key) + + def _replace( + self, key, new_key, value + ): # type: (Union[Key, str], Union[Key, str], Item) -> None + if not isinstance(key, Key): + key = Key(key) + + if not isinstance(new_key, Key): + new_key = Key(new_key) + + idx = self._map.get(key, None) + if idx is None: + raise NonExistentKey(key) + + self._replace_at(idx, new_key, value) + + def _replace_at( + self, idx, new_key, value + ): # type: (int, Union[Key, str], Item) -> None + k, v = self._body[idx] + + self._map[new_key] = self._map.pop(k) + + value = _item(value) + + # Copying trivia + if not isinstance(value, (Whitespace, AoT)): + value.trivia.indent = v.trivia.indent + value.trivia.comment_ws = v.trivia.comment_ws + value.trivia.comment = v.trivia.comment + value.trivia.trail = v.trivia.trail + + self._body[idx] = (new_key, value) + + super(Container, self).__setitem__(new_key.key, value.value) + + def __str__(self): # type: () -> str + return str(self.value) + + def __eq__(self, other): # type: (Dict) -> bool + if not isinstance(other, dict): + return NotImplemented + + return self.value == other diff --git a/pipenv/vendor/tomlkit/exceptions.py b/pipenv/vendor/tomlkit/exceptions.py new file mode 100644 index 0000000000..8d48bf198f --- /dev/null +++ b/pipenv/vendor/tomlkit/exceptions.py @@ -0,0 +1,139 @@ +from typing import Optional + + +class TOMLKitError(Exception): + + pass + + +class ParseError(ValueError, TOMLKitError): + """ + This error occurs when the parser encounters a syntax error + in the TOML being parsed. The error references the line and + location within the line where the error was encountered. + """ + + def __init__( + self, line, col, message=None + ): # type: (int, int, Optional[str]) -> None + self._line = line + self._col = col + + if message is None: + message = "TOML parse error" + + super(ParseError, self).__init__( + "{} at line {} col {}".format(message, self._line, self._col) + ) + + +class MixedArrayTypesError(ParseError): + """ + An array was found that had two or more element types. + """ + + def __init__(self, line, col): # type: (int, int) -> None + message = "Mixed types found in array" + + super(MixedArrayTypesError, self).__init__(line, col, message=message) + + +class InvalidNumberOrDateError(ParseError): + """ + A numeric or date field was improperly specified. + """ + + def __init__(self, line, col): # type: (int, int) -> None + message = "Invalid number or date format" + + super(InvalidNumberOrDateError, self).__init__(line, col, message=message) + + +class UnexpectedCharError(ParseError): + """ + An unexpected character was found during parsing. + """ + + def __init__(self, line, col, char): # type: (int, int, str) -> None + message = "Unexpected character: {}".format(repr(char)) + + super(UnexpectedCharError, self).__init__(line, col, message=message) + + +class EmptyKeyError(ParseError): + """ + An empty key was found during parsing. + """ + + def __init__(self, line, col): # type: (int, int) -> None + message = "Empty key" + + super(EmptyKeyError, self).__init__(line, col, message=message) + + +class EmptyTableNameError(ParseError): + """ + An empty table name was found during parsing. + """ + + def __init__(self, line, col): # type: (int, int) -> None + message = "Empty table name" + + super(EmptyTableNameError, self).__init__(line, col, message=message) + + +class InvalidCharInStringError(ParseError): + """ + The string being parsed contains an invalid character. + """ + + def __init__(self, line, col, char): # type: (int, int, str) -> None + message = "Invalid character '{}' in string".format(char) + + super(InvalidCharInStringError, self).__init__(line, col, message=message) + + +class UnexpectedEofError(ParseError): + """ + The TOML being parsed ended before the end of a statement. + """ + + def __init__(self, line, col): # type: (int, int) -> None + message = "Unexpected end of file" + + super(UnexpectedEofError, self).__init__(line, col, message=message) + + +class InternalParserError(ParseError): + """ + An error that indicates a bug in the parser. + """ + + def __init__(self, line, col, message=None): # type: (int, int) -> None + msg = "Internal parser error" + if message: + msg += " ({})".format(message) + + super(InternalParserError, self).__init__(line, col, message=msg) + + +class NonExistentKey(KeyError, TOMLKitError): + """ + A non-existent key was used. + """ + + def __init__(self, key): + message = 'Key "{}" does not exist.'.format(key) + + super(NonExistentKey, self).__init__(message) + + +class KeyAlreadyPresent(TOMLKitError): + """ + An already present key was used. + """ + + def __init__(self, key): + message = 'Key "{}" already exists.'.format(key) + + super(KeyAlreadyPresent, self).__init__(message) diff --git a/pipenv/vendor/tomlkit/items.py b/pipenv/vendor/tomlkit/items.py new file mode 100644 index 0000000000..747dbd5090 --- /dev/null +++ b/pipenv/vendor/tomlkit/items.py @@ -0,0 +1,1002 @@ +from __future__ import unicode_literals + +import re +import string + +from datetime import date +from datetime import datetime +from datetime import time +from enum import Enum +from typing import Any +from typing import Dict +from typing import Generator +from typing import List +from typing import Optional +from typing import Union + + +from ._compat import PY2 +from ._compat import decode +from ._compat import unicode + + +def item(value, _parent=None): + from .container import Container + + if isinstance(value, Item): + return value + + if isinstance(value, bool): + return Bool(value, Trivia()) + elif isinstance(value, int): + return Integer(value, Trivia(), str(value)) + elif isinstance(value, float): + return Float(value, Trivia(), str(value)) + elif isinstance(value, dict): + val = Table(Container(), Trivia(), False) + for k, v in sorted(value.items(), key=lambda i: (isinstance(i[1], dict), i[0])): + val[k] = item(v, _parent=val) + + return val + elif isinstance(value, list): + if value and isinstance(value[0], dict): + a = AoT([]) + else: + a = Array([], Trivia()) + + for v in value: + if isinstance(v, dict): + table = Table(Container(), Trivia(), True) + + for k, _v in sorted( + v.items(), key=lambda i: (isinstance(i[1], dict), i[0]) + ): + i = item(_v) + if isinstance(table, InlineTable): + i.trivia.trail = "" + + table[k] = item(i) + + v = table + + a.append(v) + + return a + elif isinstance(value, (str, unicode)): + escaped = decode(value).replace('"', '\\"').replace("\\\\", "\\") + + return String(StringType.SLB, value, escaped, Trivia()) + elif isinstance(value, datetime): + return DateTime(value, Trivia(), value.isoformat().replace("+00:00", "Z")) + elif isinstance(value, date): + return Date(value, Trivia(), value.isoformat()) + elif isinstance(value, time): + return Time(value, Trivia(), value.isoformat()) + + raise ValueError("Invalid type {}".format(type(value))) + + +class StringType(Enum): + + SLB = '"' + MLB = '"""' + SLL = "'" + MLL = "'''" + + def is_literal(self): # type: () -> bool + return self in {StringType.SLL, StringType.MLL} + + def is_multiline(self): # type: () -> bool + return self in {StringType.MLB, StringType.MLL} + + +class Trivia: + """ + Trivia information (aka metadata). + """ + + def __init__( + self, indent=None, comment_ws=None, comment=None, trail=None + ): # type: (str, str, str, str) -> None + # Whitespace before a value. + self.indent = indent or "" + # Whitespace after a value, but before a comment. + self.comment_ws = comment_ws or "" + # Comment, starting with # character, or empty string if no comment. + self.comment = comment or "" + # Trailing newline. + if trail is None: + trail = "\n" + + self.trail = trail + + +class KeyType(Enum): + """ + The type of a Key. + + Keys can be bare (unquoted), or quoted using basic ("), or literal (') + quotes following the same escaping rules as single-line StringType. + """ + + Bare = "" + Basic = '"' + Literal = "'" + + +class Key: + """ + A key value. + """ + + def __init__(self, k, t=None, sep=None, dotted=False): # type: (str) -> None + if t is None: + if any( + [c not in string.ascii_letters + string.digits + "-" + "_" for c in k] + ): + t = KeyType.Basic + else: + t = KeyType.Bare + + self.t = t + if sep is None: + sep = " = " + + self.sep = sep + self.key = k + self._dotted = dotted + + @property + def delimiter(self): # type: () -> str + return self.t.value + + def is_dotted(self): # type: () -> bool + return self._dotted + + def as_string(self): # type: () -> str + return "{}{}{}".format(self.delimiter, self.key, self.delimiter) + + def __hash__(self): # type: () -> int + return hash(self.key) + + def __eq__(self, other): # type: (Key) -> bool + return self.key == other.key + + def __str__(self): # type: () -> str + return self.as_string() + + def __repr__(self): # type: () -> str + return "".format(self.as_string()) + + +class Item(object): + """ + An item within a TOML document. + """ + + def __init__(self, trivia): # type: (Trivia) -> None + self._trivia = trivia + + @property + def trivia(self): # type: () -> Trivia + return self._trivia + + @property + def discriminant(self): # type: () -> int + raise NotImplementedError() + + def as_string(self): # type: () -> str + raise NotImplementedError() + + # Helpers + + def comment(self, comment): # type: (str) -> Item + if not comment.strip().startswith("#"): + comment = "# " + comment + + self._trivia.comment_ws = " " + self._trivia.comment = comment + + return self + + def indent(self, indent): # type: (int) -> Item + if self._trivia.indent.startswith("\n"): + self._trivia.indent = "\n" + " " * indent + else: + self._trivia.indent = " " * indent + + return self + + +class Whitespace(Item): + """ + A whitespace literal. + """ + + def __init__(self, s, fixed=False): # type: (str, bool) -> None + self._s = s + self._fixed = fixed + + @property + def s(self): # type: () -> str + return self._s + + @property + def value(self): # type: () -> str + return self._s + + @property + def trivia(self): # type: () -> Trivia + raise RuntimeError("Called trivia on a Whitespace variant.") + + @property + def discriminant(self): # type: () -> int + return 0 + + def is_fixed(self): # type: () -> bool + return self._fixed + + def as_string(self): # type: () -> str + return self._s + + def __repr__(self): # type: () -> str + return "<{} {}>".format(self.__class__.__name__, repr(self._s)) + + +class Comment(Item): + """ + A comment literal. + """ + + @property + def discriminant(self): # type: () -> int + return 1 + + def as_string(self): # type: () -> str + return "{}{}{}".format( + self._trivia.indent, decode(self._trivia.comment), self._trivia.trail + ) + + def __str__(self): # type: () -> str + return "{}{}".format(self._trivia.indent, decode(self._trivia.comment)) + + +class Integer(int, Item): + """ + An integer literal. + """ + + def __new__(cls, value, trivia, raw): # type: (int, Trivia, str) -> Integer + return super(Integer, cls).__new__(cls, value) + + def __init__(self, _, trivia, raw): # type: (int, Trivia, str) -> None + super(Integer, self).__init__(trivia) + + self._raw = raw + self._sign = False + + if re.match("^[+\-]\d+$", raw): + self._sign = True + + @property + def discriminant(self): # type: () -> int + return 2 + + @property + def value(self): # type: () -> int + return self + + def as_string(self): # type: () -> str + return self._raw + + def __add__(self, other): + result = super(Integer, self).__add__(other) + + return self._new(result) + + def __radd__(self, other): + result = super(Integer, self).__radd__(other) + + if isinstance(other, Integer): + return self._new(result) + + return result + + def __sub__(self, other): + result = super(Integer, self).__sub__(other) + + return self._new(result) + + def __rsub__(self, other): + result = super(Integer, self).__rsub__(other) + + if isinstance(other, Integer): + return self._new(result) + + return result + + def _new(self, result): + raw = str(result) + + if self._sign: + sign = "+" if result >= 0 else "-" + raw = sign + raw + + return Integer(result, self._trivia, raw) + + +class Float(float, Item): + """ + A float literal. + """ + + def __new__(cls, value, trivia, raw): # type: (float, Trivia, str) -> Integer + return super(Float, cls).__new__(cls, value) + + def __init__(self, _, trivia, raw): # type: (float, Trivia, str) -> None + super(Float, self).__init__(trivia) + + self._raw = raw + self._sign = False + + if re.match("^[+\-].+$", raw): + self._sign = True + + @property + def discriminant(self): # type: () -> int + return 3 + + @property + def value(self): # type: () -> float + return self + + def as_string(self): # type: () -> str + return self._raw + + def __add__(self, other): + result = super(Float, self).__add__(other) + + return self._new(result) + + def __radd__(self, other): + result = super(Float, self).__radd__(other) + + if isinstance(other, Float): + return self._new(result) + + return result + + def __sub__(self, other): + result = super(Float, self).__sub__(other) + + return self._new(result) + + def __rsub__(self, other): + result = super(Float, self).__rsub__(other) + + if isinstance(other, Float): + return self._new(result) + + return result + + def _new(self, result): + raw = str(result) + + if self._sign: + sign = "+" if result >= 0 else "-" + raw = sign + raw + + return Float(result, self._trivia, raw) + + +class Bool(Item): + """ + A boolean literal. + """ + + def __init__(self, value, trivia): # type: (float, Trivia) -> None + super(Bool, self).__init__(trivia) + + self._value = value + + @property + def discriminant(self): # type: () -> int + return 4 + + @property + def value(self): # type: () -> bool + return self._value + + def as_string(self): # type: () -> str + return str(self._value).lower() + + +class DateTime(datetime, Item): + """ + A datetime literal. + """ + + def __new__(cls, value, *_): # type: (datetime, ...) -> datetime + return datetime.__new__( + cls, + value.year, + value.month, + value.day, + value.hour, + value.minute, + value.second, + value.microsecond, + tzinfo=value.tzinfo, + ) + + def __init__(self, _, trivia, raw): # type: (datetime, Trivia, str) -> None + super(DateTime, self).__init__(trivia) + + self._raw = raw + + @property + def discriminant(self): # type: () -> int + return 5 + + @property + def value(self): # type: () -> datetime + return self + + def as_string(self): # type: () -> str + return self._raw + + def __add__(self, other): + result = super(DateTime, self).__add__(other) + + return self._new(result) + + def __sub__(self, other): + result = super(DateTime, self).__sub__(other) + + return self._new(result) + + def _new(self, result): + raw = result.isoformat() + + return DateTime(result, self._trivia, raw) + + +class Date(date, Item): + """ + A date literal. + """ + + def __new__(cls, value, *_): # type: (date, ...) -> date + return date.__new__(cls, value.year, value.month, value.day) + + def __init__(self, _, trivia, raw): # type: (date, Trivia, str) -> None + super(Date, self).__init__(trivia) + + self._raw = raw + + @property + def discriminant(self): # type: () -> int + return 6 + + @property + def value(self): # type: () -> date + return self + + def as_string(self): # type: () -> str + return self._raw + + def __add__(self, other): + result = super(Date, self).__add__(other) + + return self._new(result) + + def __sub__(self, other): + result = super(Date, self).__sub__(other) + + return self._new(result) + + def _new(self, result): + raw = result.isoformat() + + return Date(result, self._trivia, raw) + + +class Time(time, Item): + """ + A time literal. + """ + + def __new__(cls, value, *_): # type: (time, ...) -> time + return time.__new__( + cls, value.hour, value.minute, value.second, value.microsecond + ) + + def __init__(self, _, trivia, raw): # type: (time, Trivia, str) -> None + super(Time, self).__init__(trivia) + + self._raw = raw + + @property + def discriminant(self): # type: () -> int + return 7 + + @property + def value(self): # type: () -> time + return self + + def as_string(self): # type: () -> str + return self._raw + + +class Array(Item, list): + """ + An array literal + """ + + def __init__(self, value, trivia): # type: (list, Trivia) -> None + super(Array, self).__init__(trivia) + + list.__init__( + self, [v.value for v in value if not isinstance(v, (Whitespace, Comment))] + ) + + self._value = value + + @property + def discriminant(self): # type: () -> int + return 8 + + @property + def value(self): # type: () -> list + return self + + def is_homogeneous(self): # type: () -> bool + if not self: + return True + + discriminants = [ + i.discriminant + for i in self._value + if not isinstance(i, (Whitespace, Comment)) + ] + + return len(set(discriminants)) == 1 + + def as_string(self): # type: () -> str + return "[{}]".format("".join(v.as_string() for v in self._value)) + + def append(self, _item): # type: () -> None + if self._value: + self._value.append(Whitespace(", ")) + + it = item(_item) + super(Array, self).append(it.value) + + self._value.append(it) + + if not self.is_homogeneous(): + raise ValueError("Array has mixed types elements") + + if not PY2: + + def clear(self): + super(Array, self).clear() + + self._value.clear() + + def __iadd__(self, other): # type: (list) -> Array + if not isinstance(other, list): + return NotImplemented + + for v in other: + self.append(v) + + return self + + def __delitem__(self, key): + super(Array, self).__delitem__(key) + + j = 0 if key >= 0 else -1 + for i, v in enumerate(self._value if key >= 0 else reversed(self._value)): + if key < 0: + i = -i - 1 + + if isinstance(v, (Comment, Whitespace)): + continue + + if j == key: + del self._value[i] + + if i < 0 and abs(i) > len(self._value): + i += 1 + + if i < len(self._value) - 1 and isinstance(self._value[i], Whitespace): + del self._value[i] + + break + + j += 1 if key >= 0 else -1 + + def __str__(self): + return str( + [v.value for v in self._value if not isinstance(v, (Whitespace, Comment))] + ) + + def __repr__(self): + return str(self) + + +class Table(Item, dict): + """ + A table literal. + """ + + def __init__( + self, + value, + trivia, + is_aot_element, + is_super_table=False, + name=None, + display_name=None, + ): # type: (tomlkit.container.Container, Trivia, bool) -> None + super(Table, self).__init__(trivia) + + self.name = name + self.display_name = display_name + self._value = value + self._is_aot_element = is_aot_element + self._is_super_table = is_super_table + + for k, v in self._value.body: + if k is not None: + super(Table, self).__setitem__(k.key, v) + + @property + def value(self): # type: () -> tomlkit.container.Container + return self._value + + @property + def discriminant(self): # type: () -> int + return 9 + + @property + def value(self): # type: () -> tomlkit.container.Container + return self._value + + def add(self, key, item=None): # type: (Union[Key, Item, str], Any) -> Item + if item is None: + if not isinstance(key, (Comment, Whitespace)): + raise ValueError( + "Non comment/whitespace items must have an associated key" + ) + + key, item = None, key + + return self.append(key, item) + + def append(self, key, _item): # type: (Union[Key, str], Any) -> Table + """ + Appends a (key, item) to the table. + """ + if not isinstance(_item, Item): + _item = item(_item) + + self._value.append(key, _item) + + if isinstance(key, Key): + key = key.key + + if key is not None: + super(Table, self).__setitem__(key, _item) + + m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent) + if not m: + return self + + indent = m.group(1) + + if not isinstance(_item, Whitespace): + m = re.match("(?s)^([^ ]*)(.*)$", _item.trivia.indent) + if not m: + _item.trivia.indent = indent + else: + _item.trivia.indent = m.group(1) + indent + m.group(2) + + return self + + def remove(self, key): # type: (Union[Key, str]) -> Table + self._value.remove(key) + + if isinstance(key, Key): + key = key.key + + if key is not None: + super(Table, self).__delitem__(key) + + return self + + def is_aot_element(self): # type: () -> bool + return self._is_aot_element + + def is_super_table(self): # type: () -> bool + return self._is_super_table + + def as_string(self, prefix=None): # type: () -> str + return self._value.as_string(prefix=prefix) + + # Helpers + + def indent(self, indent): # type: (int) -> Table + super(Table, self).indent(indent) + + m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent) + if not m: + indent = "" + else: + indent = m.group(1) + + for k, item in self._value.body: + if not isinstance(item, Whitespace): + item.trivia.indent = indent + item.trivia.indent + + return self + + def keys(self): # type: () -> Generator[str] + for k in self._value.keys(): + yield k + + def values(self): # type: () -> Generator[Item] + for v in self._value.values(): + yield v + + def items(self): # type: () -> Generator[Item] + for k, v in self._value.items(): + yield k, v + + def __contains__(self, key): # type: (Union[Key, str]) -> bool + return key in self._value + + def __getitem__(self, key): # type: (Union[Key, str]) -> Item + return self._value[key] + + def __setitem__(self, key, value): # type: (Union[Key, str], Any) -> None + self.append(key, value) + + def __delitem__(self, key): # type: (Union[Key, str]) -> None + self.remove(key) + + def __repr__(self): + return super(Table, self).__repr__() + + +class InlineTable(Item, dict): + """ + An inline table literal. + """ + + def __init__( + self, value, trivia + ): # type: (tomlkit.container.Container, Trivia) -> None + super(InlineTable, self).__init__(trivia) + + self._value = value + + for k, v in self._value.body: + if k is not None: + super(InlineTable, self).__setitem__(k.key, v) + + @property + def discriminant(self): # type: () -> int + return 10 + + @property + def value(self): # type: () -> Dict + return self._value + + def append(self, key, _item): # type: (Union[Key, str], Any) -> InlineTable + """ + Appends a (key, item) to the table. + """ + if not isinstance(_item, Item): + _item = item(_item) + + if not isinstance(_item, (Whitespace, Comment)): + if not _item.trivia.indent and len(self._value) > 0: + _item.trivia.indent = " " + + self._value.append(key, _item) + + if isinstance(key, Key): + key = key.key + + if key is not None: + super(InlineTable, self).__setitem__(key, _item) + + return self + + def remove(self, key): # type: (Union[Key, str]) -> InlineTable + self._value.remove(key) + + if isinstance(key, Key): + key = key.key + + if key is not None: + super(InlineTable, self).__delitem__(key) + + return self + + def as_string(self): # type: () -> str + buf = "{" + for i, (k, v) in enumerate(self._value.body): + if k is None: + if i == len(self._value.body) - 1: + buf = buf.rstrip(",") + + buf += v.as_string() + + continue + + buf += "{}{}{}{}{}{}".format( + v.trivia.indent, + k.as_string(), + k.sep, + v.as_string(), + v.trivia.comment, + v.trivia.trail.replace("\n", ""), + ) + + if i != len(self._value.body) - 1: + buf += "," + + buf += "}" + + return buf + + def keys(self): # type: () -> Generator[str] + for k in self._value.keys(): + yield k + + def values(self): # type: () -> Generator[Item] + for v in self._value.values(): + yield v + + def items(self): # type: () -> Generator[Item] + for k, v in self._value.items(): + yield k, v + + def __contains__(self, key): # type: (Union[Key, str]) -> bool + return key in self._value + + def __getitem__(self, key): # type: (Union[Key, str]) -> Item + return self._value[key] + + def __setitem__(self, key, value): # type: (Union[Key, str], Any) -> None + self.append(key, value) + + def __delitem__(self, key): # type: (Union[Key, str]) -> None + self.remove(key) + + def __repr__(self): + return super(InlineTable, self).__repr__() + + +class String(unicode, Item): + """ + A string literal. + """ + + def __new__(cls, t, value, original, trivia): + return super(String, cls).__new__(cls, value) + + def __init__( + self, t, _, original, trivia + ): # type: (StringType, str, original, Trivia) -> None + super(String, self).__init__(trivia) + + self._t = t + self._original = original + + @property + def discriminant(self): # type: () -> int + return 11 + + @property + def value(self): # type: () -> str + return self + + def as_string(self): # type: () -> str + return "{}{}{}".format(self._t.value, decode(self._original), self._t.value) + + def __add__(self, other): + result = super(String, self).__add__(other) + + return self._new(result) + + def __sub__(self, other): + result = super(String, self).__sub__(other) + + return self._new(result) + + def _new(self, result): + return String(self._t, result, result, self._trivia) + + +class AoT(Item, list): + """ + An array of table literal + """ + + def __init__( + self, body, name=None, parsed=False + ): # type: (List[Table], Optional[str]) -> None + self.name = None + self._body = [] + self._parsed = parsed + + super(AoT, self).__init__(Trivia(trail="")) + + for table in body: + self.append(table) + + @property + def body(self): # type: () -> List[Table] + return self._body + + @property + def discriminant(self): # type: () -> int + return 12 + + @property + def value(self): # type: () -> List[Dict[Any, Any]] + return [v.value for v in self._body] + + def append(self, table): # type: (Table) -> Table + m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent) + if m: + indent = m.group(1) + + m = re.match("(?s)^([^ ]*)(.*)$", table.trivia.indent) + if not m: + table.trivia.indent = indent + else: + table.trivia.indent = m.group(1) + indent + m.group(2) + + if not self._parsed and "\n" not in table.trivia.indent and self._body: + table.trivia.indent = "\n" + table.trivia.indent + + self._body.append(table) + + super(AoT, self).append(table) + + return table + + def as_string(self): # type: () -> str + b = "" + for table in self._body: + b += table.as_string(prefix=self.name) + + return b + + def __repr__(self): # type: () -> str + return "".format(self.value) + + +class Null(Item): + """ + A null item. + """ + + def __init__(self): # type: () -> None + pass + + @property + def discriminant(self): # type: () -> int + return -1 + + @property + def value(self): # type: () -> None + return None + + def as_string(self): # type: () -> str + return "" diff --git a/pipenv/vendor/tomlkit/parser.py b/pipenv/vendor/tomlkit/parser.py new file mode 100644 index 0000000000..b55a3fe442 --- /dev/null +++ b/pipenv/vendor/tomlkit/parser.py @@ -0,0 +1,1084 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +import datetime +import itertools +import re +import string + +from copy import copy +from typing import Iterator +from typing import Optional +from typing import Tuple +from typing import Union + +from ._compat import PY2 +from ._compat import chr +from ._compat import decode +from ._utils import parse_rfc3339 +from .container import Container +from .exceptions import EmptyKeyError +from .exceptions import EmptyTableNameError +from .exceptions import InternalParserError +from .exceptions import InvalidCharInStringError +from .exceptions import InvalidNumberOrDateError +from .exceptions import MixedArrayTypesError +from .exceptions import ParseError +from .exceptions import UnexpectedCharError +from .exceptions import UnexpectedEofError +from .items import AoT +from .items import Array +from .items import Bool +from .items import Comment +from .items import Date +from .items import DateTime +from .items import Float +from .items import InlineTable +from .items import Integer +from .items import Key +from .items import KeyType +from .items import Null +from .items import String +from .items import StringType +from .items import Table +from .items import Time +from .items import Trivia +from .items import Whitespace +from .toml_char import TOMLChar +from .toml_document import TOMLDocument + + +class Parser: + """ + Parser for TOML documents. + """ + + def __init__(self, string): # type: (str) -> None + # Input to parse + self._src = decode(string) # type: str + # Iterator used for getting characters from src. + self._chars = iter([(i, TOMLChar(c)) for i, c in enumerate(self._src)]) + # Current byte offset into src. + self._idx = 0 + # Current character + self._current = TOMLChar("") # type: TOMLChar + # Index into src between which and idx slices will be extracted + self._marker = 0 + + self._aot_stack = [] + + self.inc() + + def extract(self): # type: () -> str + """ + Extracts the value between marker and index + """ + if self.end(): + return self._src[self._marker :] + else: + return self._src[self._marker : self._idx] + + def inc(self): # type: () -> bool + """ + Increments the parser if the end of the input has not been reached. + Returns whether or not it was able to advance. + """ + try: + self._idx, self._current = next(self._chars) + + return True + except StopIteration: + self._idx = len(self._src) + self._current = TOMLChar("\0") + + return False + + def inc_n(self, n): # type: (int) -> bool + """ + Increments the parser by n characters + if the end of the input has not been reached. + """ + for _ in range(n): + if not self.inc(): + return False + + return True + + def end(self): # type: () -> bool + """ + Returns True if the parser has reached the end of the input. + """ + return self._idx >= len(self._src) or self._current == "\0" + + def mark(self): # type: () -> None + """ + Sets the marker to the index's current position + """ + self._marker = self._idx + + def parse(self): # type: () -> TOMLDocument + body = TOMLDocument(True) + + # Take all keyvals outside of tables/AoT's. + while not self.end(): + # Break out if a table is found + if self._current == "[": + break + + # Otherwise, take and append one KV + item = self._parse_item() + if not item: + break + + key, value = item + if key is not None and key.is_dotted(): + # We actually have a table + self._handle_dotted_key(body, key, value) + elif not self._merge_ws(value, body): + body.append(key, value) + + self.mark() + + while not self.end(): + key, value = self._parse_table() + if isinstance(value, Table) and value.is_aot_element(): + # This is just the first table in an AoT. Parse the rest of the array + # along with it. + value = self._parse_aot(value, key.key) + + body.append(key, value) + + body.parsing(False) + + return body + + def _merge_ws(self, item, container): # type: (Item, Container) -> bool + """ + Merges the given Item with the last one currently in the given Container if + both are whitespace items. + + Returns True if the items were merged. + """ + last = container.last_item() + if not last: + return False + + if not isinstance(item, Whitespace) or not isinstance(last, Whitespace): + return False + + start = self._idx - (len(last.s) + len(item.s)) + container.body[-1] = ( + container.body[-1][0], + Whitespace(self._src[start : self._idx]), + ) + + return True + + def parse_error(self, kind=ParseError, args=None): # type: () -> None + """ + Creates a generic "parse error" at the current position. + """ + line, col = self._to_linecol(self._idx) + + if args: + return kind(line, col, *args) + else: + return kind(line, col) + + def _to_linecol(self, offset): # type: (int) -> Tuple[int, int] + cur = 0 + for i, line in enumerate(self._src.splitlines()): + if cur + len(line) + 1 > offset: + return (i + 1, offset - cur) + + cur += len(line) + 1 + + return len(self._src.splitlines()), 0 + + def _is_child(self, parent, child): # type: (str, str) -> bool + """ + Returns whether a key is strictly a child of another key. + AoT siblings are not considered children of one another. + """ + parent_parts = tuple(self._split_table_name(parent)) + child_parts = tuple(self._split_table_name(child)) + + if parent_parts == child_parts: + return False + + return parent_parts == child_parts[: len(parent_parts)] + + def _split_table_name(self, name): # type: (str) -> Generator[Key] + in_name = False + current = "" + t = KeyType.Bare + for c in name: + c = TOMLChar(c) + + if c == ".": + if in_name: + current += c + continue + + if not current: + raise self.parse_error() + + yield Key(current, t=t, sep="") + + current = "" + t = KeyType.Bare + continue + elif c in {"'", '"'}: + if in_name: + if t == KeyType.Literal and c == '"': + current += c + continue + + if c != t.value: + raise self.parse_error() + + in_name = False + else: + in_name = True + t = KeyType.Literal if c == "'" else KeyType.Basic + + continue + elif in_name or c.is_bare_key_char(): + current += c + else: + raise self.parse_error() + + if current: + yield Key(current, t=t, sep="") + + def _parse_item(self): # type: () -> Optional[Tuple[Optional[Key], Item]] + """ + Attempts to parse the next item and returns it, along with its key + if the item is value-like. + """ + self.mark() + saved_idx = self._save_idx() + + while True: + c = self._current + if c == "\n": + # Found a newline; Return all whitespace found up to this point. + self.inc() + + return (None, Whitespace(self.extract())) + elif c in " \t\r": + # Skip whitespace. + if not self.inc(): + return (None, Whitespace(self.extract())) + elif c == "#": + # Found a comment, parse it + indent = self.extract() + cws, comment, trail = self._parse_comment_trail() + + return (None, Comment(Trivia(indent, cws, comment, trail))) + elif c == "[": + # Found a table, delegate to the calling function. + return + else: + # Begining of a KV pair. + # Return to beginning of whitespace so it gets included + # as indentation for the KV about to be parsed. + self._restore_idx(*saved_idx) + key, value = self._parse_key_value(True) + + return key, value + + def _save_idx(self): # type: () -> Tuple[Iterator, int, str] + if PY2: + return itertools.tee(self._chars)[1], self._idx, self._current + + return copy(self._chars), self._idx, self._current + + def _restore_idx(self, chars, idx, current): # type: (Iterator, int, str) -> None + if PY2: + self._chars = iter( + [(i + idx, TOMLChar(c)) for i, c in enumerate(self._src[idx:])] + ) + next(self._chars) + else: + self._chars = chars + + self._idx = idx + self._current = current + + def _parse_comment_trail(self): # type: () -> Tuple[str, str, str] + """ + Returns (comment_ws, comment, trail) + If there is no comment, comment_ws and comment will + simply be empty. + """ + if self.end(): + return "", "", "" + + comment = "" + comment_ws = "" + self.mark() + + while True: + c = self._current + + if c == "\n": + break + elif c == "#": + comment_ws = self.extract() + + self.mark() + self.inc() # Skip # + + # The comment itself + while not self.end() and not self._current.is_nl() and self.inc(): + pass + + comment = self.extract() + self.mark() + + break + elif c in " \t\r,": + self.inc() + else: + raise self.parse_error(UnexpectedCharError, (c)) + + if self.end(): + break + + while self._current.is_spaces() and self.inc(): + pass + + if self._current == "\r": + self.inc() + + if self._current == "\n": + self.inc() + + trail = "" + if self._idx != self._marker or self._current.is_ws(): + trail = self.extract() + + return comment_ws, comment, trail + + def _parse_key_value( + self, parse_comment=False, inline=True + ): # type: (bool, bool) -> (Key, Item) + # Leading indent + self.mark() + + while self._current.is_spaces() and self.inc(): + pass + + indent = self.extract() + + # Key + key = self._parse_key() + if not key.key.strip(): + raise self.parse_error(EmptyKeyError) + + self.mark() + + found_equals = self._current == "=" + while self._current.is_kv_sep() and self.inc(): + if self._current == "=": + if found_equals: + raise self.parse_error(UnexpectedCharError, ("=",)) + else: + found_equals = True + pass + + key.sep = self.extract() + + # Value + val = self._parse_value() + + # Comment + if parse_comment: + cws, comment, trail = self._parse_comment_trail() + meta = val.trivia + meta.comment_ws = cws + meta.comment = comment + meta.trail = trail + else: + val.trivia.trail = "" + + val.trivia.indent = indent + + return key, val + + def _parse_key(self): # type: () -> Key + """ + Parses a Key at the current position; + WS before the key must be exhausted first at the callsite. + """ + if self._current in "\"'": + return self._parse_quoted_key() + else: + return self._parse_bare_key() + + def _parse_quoted_key(self): # type: () -> Key + """ + Parses a key enclosed in either single or double quotes. + """ + quote_style = self._current + key_type = None + dotted = False + for t in KeyType: + if t.value == quote_style: + key_type = t + break + + if key_type is None: + raise RuntimeError("Should not have entered _parse_quoted_key()") + + self.inc() + self.mark() + + while self._current != quote_style and self.inc(): + pass + + key = self.extract() + + if self._current == ".": + self.inc() + dotted = True + key += "." + self._parse_key().as_string() + key_type = KeyType.Bare + else: + self.inc() + + return Key(key, key_type, "", dotted) + + def _parse_bare_key(self): # type: () -> Key + """ + Parses a bare key. + """ + key_type = None + dotted = False + + self.mark() + while self._current.is_bare_key_char() and self.inc(): + pass + + key = self.extract() + + if self._current == ".": + self.inc() + dotted = True + key += "." + self._parse_key().as_string() + key_type = KeyType.Bare + + return Key(key, key_type, "", dotted) + + def _handle_dotted_key( + self, container, key, value + ): # type: (Container, Key) -> None + names = tuple(self._split_table_name(key.key)) + name = names[0] + name._dotted = True + if name in container: + table = container.item(name) + else: + table = Table(Container(True), Trivia(), False, is_super_table=True) + container.append(name, table) + + for i, _name in enumerate(names[1:]): + if i == len(names) - 2: + _name.sep = key.sep + + table.append(_name, value) + else: + _name._dotted = True + if _name in table.value: + table = table.value.item(_name) + else: + table.append( + _name, + Table( + Container(True), + Trivia(), + False, + is_super_table=i < len(names) - 2, + ), + ) + + table = table[_name] + + def _parse_value(self): # type: () -> Item + """ + Attempts to parse a value at the current position. + """ + self.mark() + trivia = Trivia() + + c = self._current + if c == '"': + return self._parse_basic_string() + elif c == "'": + return self._parse_literal_string() + elif c == "t" and self._src[self._idx :].startswith("true"): + # Boolean: true + self.inc_n(4) + + return Bool(True, trivia) + elif c == "f" and self._src[self._idx :].startswith("false"): + # Boolean: true + self.inc_n(5) + + return Bool(False, trivia) + elif c == "[": + # Array + elems = [] # type: List[Item] + self.inc() + + while self._current != "]": + self.mark() + while self._current.is_ws() or self._current == ",": + self.inc() + + if self._idx != self._marker: + elems.append(Whitespace(self.extract())) + + if self._current == "]": + break + + if self._current == "#": + cws, comment, trail = self._parse_comment_trail() + + next_ = Comment(Trivia("", cws, comment, trail)) + else: + next_ = self._parse_value() + + elems.append(next_) + + self.inc() + + try: + res = Array(elems, trivia) + except ValueError: + raise self.parse_error(MixedArrayTypesError) + + if res.is_homogeneous(): + return res + + raise self.parse_error(MixedArrayTypesError) + elif c == "{": + # Inline table + elems = Container(True) + self.inc() + + while self._current != "}": + self.mark() + while self._current.is_spaces() or self._current == ",": + self.inc() + + if self._idx != self._marker: + ws = self.extract().lstrip(",") + if ws: + elems.append(None, Whitespace(ws)) + + if self._current == "}": + break + + key, val = self._parse_key_value(False, inline=True) + elems.append(key, val) + + self.inc() + + return InlineTable(elems, trivia) + elif c in string.digits + "+-" or self._peek(4) in { + "+inf", + "-inf", + "inf", + "+nan", + "-nan", + "nan", + }: + # Integer, Float, Date, Time or DateTime + while self._current not in " \t\n\r#,]}" and self.inc(): + pass + + raw = self.extract() + + item = self._parse_number(raw, trivia) + if item is not None: + return item + + try: + res = parse_rfc3339(raw) + except ValueError: + res = None + + if res is None: + raise self.parse_error(InvalidNumberOrDateError) + + if isinstance(res, datetime.datetime): + return DateTime(res, trivia, raw) + elif isinstance(res, datetime.time): + return Time(res, trivia, raw) + elif isinstance(res, datetime.date): + return Date(res, trivia, raw) + else: + raise self.parse_error(InvalidNumberOrDateError) + else: + raise self.parse_error(UnexpectedCharError, (c)) + + def _parse_number(self, raw, trivia): # type: (str, Trivia) -> Optional[Item] + # Leading zeros are not allowed + sign = "" + if raw.startswith(("+", "-")): + sign = raw[0] + raw = raw[1:] + + if ( + len(raw) > 1 + and raw.startswith("0") + and not raw.startswith(("0.", "0o", "0x", "0b")) + ): + return + + if raw.startswith(("0o", "0x", "0b")) and sign: + return + + digits = "[0-9]" + base = 10 + if raw.startswith("0b"): + digits = "[01]" + base = 2 + elif raw.startswith("0o"): + digits = "[0-7]" + base = 8 + elif raw.startswith("0x"): + digits = "[0-9a-f]" + base = 16 + + # Underscores should be surrounded by digits + clean = re.sub("(?i)(?<={})_(?={})".format(digits, digits), "", raw) + + if "_" in clean: + return + + if clean.endswith("."): + return + + try: + return Integer(int(sign + clean, base), trivia, sign + raw) + except ValueError: + try: + return Float(float(sign + clean), trivia, sign + raw) + except ValueError: + return + + def _parse_literal_string(self): # type: () -> Item + return self._parse_string("'") + + def _parse_basic_string(self): # type: () -> Item + return self._parse_string('"') + + def _parse_string(self, delim): # type: (str) -> Item + multiline = False + value = "" + + if delim == "'": + str_type = StringType.SLL + else: + str_type = StringType.SLB + + # Skip opening delim + if not self.inc(): + return self.parse_error(UnexpectedEofError) + + if self._current == delim: + self.inc() + + if self._current == delim: + multiline = True + if delim == "'": + str_type = StringType.MLL + else: + str_type = StringType.MLB + + if not self.inc(): + return self.parse_error(UnexpectedEofError) + else: + # Empty string + return String(str_type, "", "", Trivia()) + + self.mark() + if self._current == "\n": + # The first new line should be discarded + self.inc() + + previous = None + escaped = False + while True: + if ( + previous != "\\" + or previous == "\\" + and (escaped or str_type.is_literal()) + ) and self._current == delim: + val = self.extract() + + if multiline: + stop = True + for _ in range(3): + if self._current != delim: + # Not a triple quote, leave in result as-is. + stop = False + + # Adding back the quote character + value += delim + break + + self.inc() # TODO: Handle EOF + + if not stop: + continue + else: + self.inc() + + return String(str_type, value, val, Trivia()) + else: + escape_vals = { + "b": "\b", + "t": "\t", + "n": "\n", + "f": "\f", + "r": "\r", + "\\": "\\", + '"': '"', + } + if previous == "\\" and self._current.is_ws() and multiline: + while self._current.is_ws(): + previous = self._current + + self.inc() + continue + + if self._current == delim: + continue + + if previous == "\\": + if self._current == "\\" and not escaped: + if not str_type.is_literal(): + escaped = True + else: + value += self._current + + previous = self._current + + if not self.inc(): + raise self.parse_error(UnexpectedEofError) + + continue + elif self._current in escape_vals and not escaped: + if not str_type.is_literal(): + value = value[:-1] + value += escape_vals[self._current] + else: + value += self._current + elif self._current in {"u", "U"} and not escaped: + # Maybe unicode + u, ue = self._peek_unicode(self._current == "U") + if u is not None: + value = value[:-1] + value += u + self.inc_n(len(ue)) + else: + if not escaped and not str_type.is_literal(): + raise self.parse_error( + InvalidCharInStringError, (self._current,) + ) + + value += self._current + else: + if not escaped and not str_type.is_literal(): + raise self.parse_error( + InvalidCharInStringError, (self._current,) + ) + + value += self._current + + if self._current.is_ws() and multiline and not escaped: + continue + else: + value += self._current + + if escaped: + escaped = False + + previous = self._current + if not self.inc(): + raise self.parse_error(UnexpectedEofError) + + if previous == "\\" and self._current.is_ws() and multiline: + value = value[:-1] + + def _parse_table( + self, parent_name=None + ): # type: (Optional[str]) -> Tuple[Key, Union[Table, AoT]] + """ + Parses a table element. + """ + if self._current != "[": + raise self.parse_error( + InternalParserError, + ("_parse_table() called on non-bracket character.",), + ) + + indent = self.extract() + self.inc() # Skip opening bracket + + if self.end(): + raise self.parse_error(UnexpectedEofError) + + is_aot = False + if self._current == "[": + if not self.inc(): + raise self.parse_error(UnexpectedEofError) + + is_aot = True + + # Key + self.mark() + while self._current != "]" and self.inc(): + if self.end(): + raise self.parse_error(UnexpectedEofError) + + pass + + name = self.extract() + if not name.strip(): + raise self.parse_error(EmptyTableNameError) + + key = Key(name, sep="") + name_parts = tuple(self._split_table_name(name)) + missing_table = False + if parent_name: + parent_name_parts = tuple(self._split_table_name(parent_name)) + else: + parent_name_parts = tuple() + + if len(name_parts) > len(parent_name_parts) + 1: + missing_table = True + + name_parts = name_parts[len(parent_name_parts) :] + + values = Container(True) + + self.inc() # Skip closing bracket + if is_aot: + # TODO: Verify close bracket + self.inc() + + cws, comment, trail = self._parse_comment_trail() + + result = Null() + + if len(name_parts) > 1: + if missing_table: + # Missing super table + # i.e. a table initialized like this: [foo.bar] + # without initializing [foo] + # + # So we have to create the parent tables + table = Table( + Container(True), + Trivia(indent, cws, comment, trail), + is_aot and name_parts[0].key in self._aot_stack, + is_super_table=True, + name=name_parts[0].key, + ) + + result = table + key = name_parts[0] + + for i, _name in enumerate(name_parts[1:]): + if _name in table: + child = table[_name] + else: + child = Table( + Container(True), + Trivia(indent, cws, comment, trail), + is_aot and i == len(name_parts[1:]) - 1, + is_super_table=i < len(name_parts[1:]) - 1, + name=_name.key, + display_name=name if i == len(name_parts[1:]) - 1 else None, + ) + + if is_aot and i == len(name_parts[1:]) - 1: + table.append(_name, AoT([child], name=table.name, parsed=True)) + else: + table.append(_name, child) + + table = child + values = table.value + else: + if name_parts: + key = name_parts[0] + + while not self.end(): + item = self._parse_item() + if item: + _key, item = item + if not self._merge_ws(item, values): + if _key is not None and _key.is_dotted(): + self._handle_dotted_key(values, _key, item) + else: + values.append(_key, item) + else: + if self._current == "[": + is_aot_next, name_next = self._peek_table() + + if self._is_child(name, name_next): + key_next, table_next = self._parse_table(name) + + values.append(key_next, table_next) + + # Picking up any sibling + while not self.end(): + _, name_next = self._peek_table() + + if not self._is_child(name, name_next): + break + + key_next, table_next = self._parse_table(name) + + values.append(key_next, table_next) + + break + else: + raise self.parse_error( + InternalParserError, + ("_parse_item() returned None on a non-bracket character.",), + ) + + if isinstance(result, Null): + result = Table( + values, + Trivia(indent, cws, comment, trail), + is_aot, + name=name, + display_name=name, + ) + + if is_aot and (not self._aot_stack or name != self._aot_stack[-1]): + result = self._parse_aot(result, name) + + return key, result + + def _peek_table(self): # type: () -> Tuple[bool, str] + """ + Peeks ahead non-intrusively by cloning then restoring the + initial state of the parser. + + Returns the name of the table about to be parsed, + as well as whether it is part of an AoT. + """ + # Save initial state + idx = self._save_idx() + marker = self._marker + + if self._current != "[": + raise self.parse_error( + InternalParserError, ("_peek_table() entered on non-bracket character",) + ) + + # AoT + self.inc() + is_aot = False + if self._current == "[": + self.inc() + is_aot = True + + self.mark() + + while self._current != "]" and self.inc(): + table_name = self.extract() + + # Restore initial state + self._restore_idx(*idx) + self._marker = marker + + return is_aot, table_name + + def _parse_aot(self, first, name_first): # type: (Table, str) -> AoT + """ + Parses all siblings of the provided table first and bundles them into + an AoT. + """ + payload = [first] + self._aot_stack.append(name_first) + while not self.end(): + is_aot_next, name_next = self._peek_table() + if is_aot_next and name_next == name_first: + _, table = self._parse_table(name_first) + payload.append(table) + else: + break + + self._aot_stack.pop() + + return AoT(payload, parsed=True) + + def _peek(self, n): # type: (int) -> str + """ + Peeks ahead n characters. + + n is the max number of characters that will be peeked. + """ + idx = self._save_idx() + buf = "" + for _ in range(n): + if self._current not in " \t\n\r#,]}": + buf += self._current + self.inc() + continue + + break + + self._restore_idx(*idx) + + return buf + + def _peek_unicode(self, is_long): # type: () -> Tuple[bool, str] + """ + Peeks ahead non-intrusively by cloning then restoring the + initial state of the parser. + + Returns the unicode value is it's a valid one else None. + """ + # Save initial state + idx = self._save_idx() + marker = self._marker + + if self._current not in {"u", "U"}: + raise self.parse_error( + InternalParserError, ("_peek_unicode() entered on non-unicode value") + ) + + # AoT + self.inc() # Dropping prefix + self.mark() + + if is_long: + chars = 8 + else: + chars = 4 + + if not self.inc_n(chars): + value, extracted = None, None + else: + extracted = self.extract() + + try: + value = chr(int(extracted, 16)) + except ValueError: + value = None + + # Restore initial state + self._restore_idx(*idx) + self._marker = marker + + return value, extracted diff --git a/pipenv/vendor/tomlkit/toml_char.py b/pipenv/vendor/tomlkit/toml_char.py new file mode 100644 index 0000000000..8a3bf9e1ca --- /dev/null +++ b/pipenv/vendor/tomlkit/toml_char.py @@ -0,0 +1,47 @@ +import string + +from ._compat import unicode + + +class TOMLChar(unicode): + def __init__(self, c): + super(TOMLChar, self).__init__() + + if len(self) > 1: + raise ValueError("A TOML character must be of length 1") + + def is_bare_key_char(self): # type: () -> bool + """ + Whether the character is a valid bare key name or not. + """ + return self in string.ascii_letters + string.digits + "-" + "_" + + def is_kv_sep(self): # type: () -> bool + """ + Whether the character is a valid key/value separator ot not. + """ + return self in "= \t" + + def is_int_float_char(self): # type: () -> bool + """ + Whether the character if a valid integer or float value character or not. + """ + return self in string.digits + "+" + "-" + "_" + "." + "e" + + def is_ws(self): # type: () -> bool + """ + Whether the character is a whitespace character or not. + """ + return self in " \t\r\n" + + def is_nl(self): # type: () -> bool + """ + Whether the character is a new line character or not. + """ + return self in "\n\r" + + def is_spaces(self): # type: () -> bool + """ + Whether the character is a space or not + """ + return self in " \t" diff --git a/pipenv/vendor/tomlkit/toml_document.py b/pipenv/vendor/tomlkit/toml_document.py new file mode 100644 index 0000000000..b485e3029e --- /dev/null +++ b/pipenv/vendor/tomlkit/toml_document.py @@ -0,0 +1,7 @@ +from .container import Container + + +class TOMLDocument(Container): + """ + A TOML document. + """ diff --git a/pipenv/vendor/tomlkit/toml_file.py b/pipenv/vendor/tomlkit/toml_file.py new file mode 100644 index 0000000000..3b416664dd --- /dev/null +++ b/pipenv/vendor/tomlkit/toml_file.py @@ -0,0 +1,24 @@ +import io + +from typing import Any +from typing import Dict + +from .api import loads +from .toml_document import TOMLDocument + + +class TOMLFile(object): + """ + Represents a TOML file. + """ + + def __init__(self, path): # type: (str) -> None + self._path = path + + def read(self): # type: () -> TOMLDocument + with io.open(self._path, encoding="utf-8") as f: + return loads(f.read()) + + def write(self, data): # type: (TOMLDocument) -> None + with io.open(self._path, "w", encoding="utf-8") as f: + f.write(data.as_string()) From 0b54d3c27952cac7f75b1fac108a9f273255d06c Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 25 Aug 2018 14:12:16 -0400 Subject: [PATCH 22/26] Vendor plette Signed-off-by: Dan Ryan --- pipenv/vendor/plette/LICENSE | 13 ++ pipenv/vendor/plette/__init__.py | 9 ++ pipenv/vendor/plette/lockfiles.py | 151 ++++++++++++++++++++++ pipenv/vendor/plette/models/__init__.py | 20 +++ pipenv/vendor/plette/models/base.py | 132 +++++++++++++++++++ pipenv/vendor/plette/models/hashes.py | 51 ++++++++ pipenv/vendor/plette/models/packages.py | 45 +++++++ pipenv/vendor/plette/models/scripts.py | 79 ++++++++++++ pipenv/vendor/plette/models/sections.py | 123 ++++++++++++++++++ pipenv/vendor/plette/models/sources.py | 45 +++++++ pipenv/vendor/plette/pipfiles.py | 161 ++++++++++++++++++++++++ 11 files changed, 829 insertions(+) create mode 100644 pipenv/vendor/plette/LICENSE create mode 100644 pipenv/vendor/plette/__init__.py create mode 100644 pipenv/vendor/plette/lockfiles.py create mode 100644 pipenv/vendor/plette/models/__init__.py create mode 100644 pipenv/vendor/plette/models/base.py create mode 100644 pipenv/vendor/plette/models/hashes.py create mode 100644 pipenv/vendor/plette/models/packages.py create mode 100644 pipenv/vendor/plette/models/scripts.py create mode 100644 pipenv/vendor/plette/models/sections.py create mode 100644 pipenv/vendor/plette/models/sources.py create mode 100644 pipenv/vendor/plette/pipfiles.py diff --git a/pipenv/vendor/plette/LICENSE b/pipenv/vendor/plette/LICENSE new file mode 100644 index 0000000000..b9077766e9 --- /dev/null +++ b/pipenv/vendor/plette/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2018, Tzu-ping Chung + +Permission to use, copy, modify, and distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/pipenv/vendor/plette/__init__.py b/pipenv/vendor/plette/__init__.py new file mode 100644 index 0000000000..c99c1bc1c1 --- /dev/null +++ b/pipenv/vendor/plette/__init__.py @@ -0,0 +1,9 @@ +__all__ = [ + "__version__", + "Lockfile", "Pipfile", +] + +__version__ = '0.1.1' + +from .lockfiles import Lockfile +from .pipfiles import Pipfile diff --git a/pipenv/vendor/plette/lockfiles.py b/pipenv/vendor/plette/lockfiles.py new file mode 100644 index 0000000000..fe97a52149 --- /dev/null +++ b/pipenv/vendor/plette/lockfiles.py @@ -0,0 +1,151 @@ +from __future__ import unicode_literals + +import json + +import six + +from .models import DataView, Meta, PackageCollection + + +class _LockFileEncoder(json.JSONEncoder): + """A specilized JSON encoder to convert loaded data into a lock file. + + This adds a few characteristics to the encoder: + + * The JSON is always prettified with indents and spaces. + * The output is always UTF-8-encoded text, never binary, even on Python 2. + """ + def __init__(self): + super(_LockFileEncoder, self).__init__( + indent=4, separators=(",", ": "), sort_keys=True, + ) + + def encode(self, obj): + content = super(_LockFileEncoder, self).encode(obj) + if not isinstance(content, six.text_type): + content = content.decode("utf-8") + content += "\n" + return content + + def iterencode(self, obj): + for chunk in super(_LockFileEncoder, self).iterencode(obj): + if not isinstance(chunk, six.text_type): + chunk = chunk.decode("utf-8") + yield chunk + yield "\n" + + +LOCKFILE_SECTIONS = { + "_meta": Meta, + "default": PackageCollection, + "develop": PackageCollection, +} + +PIPFILE_SPEC_CURRENT = 6 + + +class Lockfile(DataView): + """Representation of a Pipfile.lock. + """ + __SCHEMA__ = { + "_meta": {"type": "dict", "required": True}, + "default": {"type": "dict", "required": True}, + "develop": {"type": "dict", "required": True}, + } + + @classmethod + def validate(cls, data): + super(Lockfile, cls).validate(data) + for key, klass in LOCKFILE_SECTIONS.items(): + klass.validate(data[key]) + + @classmethod + def load(cls, f, encoding=None): + if encoding is None: + data = json.load(f) + else: + data = json.loads(f.read().decode(encoding)) + return cls(data) + + @classmethod + def with_meta_from(cls, pipfile): + data = { + "_meta": { + "hash": pipfile.get_hash()._data, + "pipfile-spec": PIPFILE_SPEC_CURRENT, + "requires": pipfile._data.get("requires", {}).copy(), + "sources": pipfile.sources._data.copy(), + }, + "default": {}, + "develop": {}, + } + return cls(data) + + def __getitem__(self, key): + value = self._data[key] + try: + return LOCKFILE_SECTIONS[key](value) + except KeyError: + return value + + def __setitem__(self, key, value): + if isinstance(value, DataView): + self._data[key] = value._data + else: + self._data[key] = value + + def is_up_to_date(self, pipfile): + return self.meta.hash == pipfile.get_hash() + + def dump(self, f, encoding=None): + encoder = _LockFileEncoder() + if encoding is None: + for chunk in encoder.iterencode(self._data): + f.write(chunk) + else: + content = encoder.encode(self._data) + f.write(content.encode(encoding)) + + @property + def meta(self): + try: + return self["_meta"] + except KeyError: + raise AttributeError("meta") + + @meta.setter + def meta(self, value): + self["_meta"] = value + + @property + def _meta(self): + try: + return self["_meta"] + except KeyError: + raise AttributeError("meta") + + @_meta.setter + def _meta(self, value): + self["_meta"] = value + + @property + def default(self): + try: + return self["default"] + except KeyError: + raise AttributeError("default") + + @default.setter + def default(self, value): + self["default"] = value + + @property + def develop(self): + try: + return self["develop"] + except KeyError: + raise AttributeError("develop") + + @develop.setter + def develop(self, value): + self["develop"] = value diff --git a/pipenv/vendor/plette/models/__init__.py b/pipenv/vendor/plette/models/__init__.py new file mode 100644 index 0000000000..42b8c49f49 --- /dev/null +++ b/pipenv/vendor/plette/models/__init__.py @@ -0,0 +1,20 @@ +__all__ = [ + "DataView", "DataViewCollection", "DataViewMapping", "DataViewSequence", + "validate", "ValidationError", + "Hash", "Package", "Requires", "Source", "Script", + "Meta", "PackageCollection", "ScriptCollection", "SourceCollection", +] + +from .base import ( + DataView, DataViewCollection, DataViewMapping, DataViewSequence, + validate, ValidationError, +) + +from .hashes import Hash +from .packages import Package +from .scripts import Script +from .sources import Source + +from .sections import ( + Meta, Requires, PackageCollection, ScriptCollection, SourceCollection, +) diff --git a/pipenv/vendor/plette/models/base.py b/pipenv/vendor/plette/models/base.py new file mode 100644 index 0000000000..e8bbd4faf7 --- /dev/null +++ b/pipenv/vendor/plette/models/base.py @@ -0,0 +1,132 @@ +try: + import cerberus +except ImportError: + cerberus = None + + +class ValidationError(ValueError): + def __init__(self, value, validator): + super(ValidationError, self).__init__(value) + self.validator = validator + + +VALIDATORS = {} + + +def validate(cls, data): + if not cerberus: # Skip validation if Cerberus is not available. + return + schema = cls.__SCHEMA__ + key = id(schema) + try: + v = VALIDATORS[key] + except KeyError: + v = VALIDATORS[key] = cerberus.Validator(schema, allow_unknown=True) + if v.validate(data, normalize=False): + return + raise ValidationError(data, v) + + +class DataView(object): + """A "view" to a data. + + Validates the input mapping on creation. A subclass is expected to + provide a `__SCHEMA__` class attribute specifying a validator schema, + or a concrete Cerberus validator object. + """ + def __init__(self, data): + self.validate(data) + self._data = data + + def __repr__(self): + return "{0}({1!r})".format(type(self).__name__, self._data) + + def __eq__(self, other): + if not isinstance(other, type(self)): + raise TypeError("cannot compare {0!r} with {1!r}".format( + type(self).__name__, type(other).__name__, + )) + return self._data == other._data + + def __getitem__(self, key): + return self._data[key] + + def __setitem__(self, key, value): + self._data[key] = value + + def get(self, key, default=None): + try: + return self[key] + except KeyError: + return default + + @classmethod + def validate(cls, data): + return validate(cls, data) + + +class DataViewCollection(DataView): + """A collection of dataview. + + Subclasses are expected to assign a class attribute `item_class` to specify + how items should be coerced when accessed. The item class should conform to + the `DataView` protocol. + + You should not instantiate an instance from this class, but from one of its + subclasses instead. + """ + item_class = None + + def __repr__(self): + return "{0}({1!r})".format(type(self).__name__, self._data) + + def __len__(self): + return len(self._data) + + def __getitem__(self, key): + return self.item_class(self._data[key]) + + def __setitem__(self, key, value): + if isinstance(value, self.item_class): + value = value._data + self._data[key] = value + + def __delitem__(self, key): + del self._data[key] + + +class DataViewMapping(DataViewCollection): + """A mapping of dataview. + + The keys are primitive values, while values are instances of `item_class`. + """ + @classmethod + def validate(cls, data): + for d in data.values(): + cls.item_class.validate(d) + + def __iter__(self): + return iter(self._data) + + def keys(self): + return self._data.keys() + + def values(self): + return [self[k] for k in self._data] + + def items(self): + return [(k, self[k]) for k in self._data] + + +class DataViewSequence(DataViewCollection): + """A sequence of dataview. + + Each entry is an instance of `item_class`. + """ + @classmethod + def validate(cls, data): + for d in data: + cls.item_class.validate(d) + + def __iter__(self): + return (self.item_class(d) for d in self._data) diff --git a/pipenv/vendor/plette/models/hashes.py b/pipenv/vendor/plette/models/hashes.py new file mode 100644 index 0000000000..d35d312e33 --- /dev/null +++ b/pipenv/vendor/plette/models/hashes.py @@ -0,0 +1,51 @@ +from .base import DataView + + +class Hash(DataView): + """A hash. + """ + __SCHEMA__ = { + "__hash__": { + "type": "list", "minlength": 1, "maxlength": 1, + "schema": { + "type": "list", "minlength": 2, "maxlength": 2, + "schema": {"type": "string"}, + }, + }, + } + + @classmethod + def validate(cls, data): + super(Hash, cls).validate({"__hash__": list(data.items())}) + + @classmethod + def from_hash(cls, ins): + """Interpolation to the hash result of `hashlib`. + """ + return cls({ins.name: ins.hexdigest()}) + + @classmethod + def from_line(cls, value): + try: + name, value = value.split(":", 1) + except ValueError: + name = "sha256" + return cls({name: value}) + + def __eq__(self, other): + if not isinstance(other, Hash): + raise TypeError("cannot compare Hash with {0!r}".format( + type(other).__name__, + )) + return self._data == other._data + + @property + def name(self): + return next(iter(self._data.keys())) + + @property + def value(self): + return next(iter(self._data.values())) + + def as_line(self): + return "{0[0]}:{0[1]}".format(next(iter(self._data.items()))) diff --git a/pipenv/vendor/plette/models/packages.py b/pipenv/vendor/plette/models/packages.py new file mode 100644 index 0000000000..5bb36215eb --- /dev/null +++ b/pipenv/vendor/plette/models/packages.py @@ -0,0 +1,45 @@ +import six + +from .base import DataView + + +class Package(DataView): + """A package requirement specified in a Pipfile. + + This is the base class of variants appearing in either `[packages]` or + `[dev-packages]` sections of a Pipfile. + """ + # The extra layer is intentional. Cerberus does not allow top-level keys + # to have oneof_schema (at least I can't do it), so we wrap this in a + # top-level key. The Requirement model class implements extra hacks to + # make this work. + __SCHEMA__ = { + "__package__": { + "oneof_type": ["string", "dict"], + }, + } + + @classmethod + def validate(cls, data): + # HACK: Make this validatable for Cerberus. See comments in validation + # side for more information. + return super(Package, cls).validate({"__package__": data}) + + def __getattr__(self, key): + if isinstance(self._data, six.string_types): + if key == "version": + return self._data + raise AttributeError(key) + try: + return self._data[key] + except KeyError: + pass + raise AttributeError(key) + + def __setattr__(self, key, value): + if key == "_data": + super(Package, self).__setattr__(key, value) + elif key == "version" and isinstance(self._data, six.string_types): + self._data = value + else: + self._data[key] = value diff --git a/pipenv/vendor/plette/models/scripts.py b/pipenv/vendor/plette/models/scripts.py new file mode 100644 index 0000000000..7f34d816e2 --- /dev/null +++ b/pipenv/vendor/plette/models/scripts.py @@ -0,0 +1,79 @@ +import re +import shlex + +import six + +from .base import DataView + + +class Script(DataView): + """Parse a script line (in Pipfile's [scripts] section). + + This always works in POSIX mode, even on Windows. + """ + # This extra layer is intentional. Cerberus does not allow validation of + # non-mapping inputs, so we wrap this in a top-level key. The Script model + # class implements extra hacks to make this work. + __SCHEMA__ = { + "__script__": { + "oneof_type": ["string", "list"], "required": True, "empty": False, + "schema": {"type": "string"}, + }, + } + + def __init__(self, data): + super(Script, self).__init__(data) + if isinstance(data, six.string_types): + data = shlex.split(data) + self._parts = [data[0]] + self._parts.extend(data[1:]) + + @classmethod + def validate(cls, data): + # HACK: Make this validatable for Cerberus. See comments in validation + # side for more information. + return super(Script, cls).validate({"__script__": data}) + + def __repr__(self): + return "Script({0!r})".format(self._parts) + + @property + def command(self): + return self._parts[0] + + @property + def args(self): + return self._parts[1:] + + def cmdify(self, extra_args=None): + """Encode into a cmd-executable string. + + This re-implements CreateProcess's quoting logic to turn a list of + arguments into one single string for the shell to interpret. + + * All double quotes are escaped with a backslash. + * Existing backslashes before a quote are doubled, so they are all + escaped properly. + * Backslashes elsewhere are left as-is; cmd will interpret them + literally. + + The result is then quoted into a pair of double quotes to be grouped. + + An argument is intentionally not quoted if it does not contain + whitespaces. This is done to be compatible with Windows built-in + commands that don't work well with quotes, e.g. everything with `echo`, + and DOS-style (forward slash) switches. + + The intended use of this function is to pre-process an argument list + before passing it into ``subprocess.Popen(..., shell=True)``. + + See also: https://docs.python.org/3/library/subprocess.html + """ + parts = list(self._parts) + if extra_args: + parts.extend(extra_args) + return " ".join( + arg if not next(re.finditer(r'\s', arg), None) + else '"{0}"'.format(re.sub(r'(\\*)"', r'\1\1\\"', arg)) + for arg in parts + ) diff --git a/pipenv/vendor/plette/models/sections.py b/pipenv/vendor/plette/models/sections.py new file mode 100644 index 0000000000..893ab549b6 --- /dev/null +++ b/pipenv/vendor/plette/models/sections.py @@ -0,0 +1,123 @@ +from .base import DataView, DataViewMapping, DataViewSequence +from .hashes import Hash +from .packages import Package +from .scripts import Script +from .sources import Source + + +class PackageCollection(DataViewMapping): + item_class = Package + + +class ScriptCollection(DataViewMapping): + item_class = Script + + +class SourceCollection(DataViewSequence): + item_class = Source + + +class Requires(DataView): + """Representation of the `[requires]` section in a Pipfile. + """ + __SCHEMA__ = { + "python_version": { + "type": "string", + "excludes": ["python_full_version"], + }, + "python_full_version": { + "type": "string", + "excludes": ["python_version"], + }, + } + + @property + def python_version(self): + try: + return self._data["python_version"] + except KeyError: + raise AttributeError("python_version") + + @property + def python_full_version(self): + try: + return self._data["python_full_version"] + except KeyError: + raise AttributeError("python_full_version") + + +META_SECTIONS = { + "hash": Hash, + "requires": Requires, + "sources": SourceCollection, +} + + +class Meta(DataView): + """Representation of the `_meta` section in a Pipfile.lock. + """ + __SCHEMA__ = { + "hash": {"type": "dict", "required": True}, + "pipfile-spec": {"type": "integer", "required": True, "min": 0}, + "requires": {"type": "dict", "required": True}, + "sources": {"type": "list", "required": True}, + } + + @classmethod + def validate(cls, data): + super(Meta, cls).validate(data) + for key, klass in META_SECTIONS.items(): + klass.validate(data[key]) + + def __getitem__(self, key): + value = super(Meta, self).__getitem__(key) + try: + return META_SECTIONS[key](value) + except KeyError: + return value + + def __setitem__(self, key, value): + if isinstance(value, DataView): + self._data[key] = value._data + else: + self._data[key] = value + + @property + def hash_(self): + return self["hash"] + + @hash_.setter + def hash_(self, value): + self["hash"] = value + + @property + def hash(self): + return self["hash"] + + @hash.setter + def hash(self, value): + self["hash"] = value + + @property + def pipfile_spec(self): + return self["pipfile-spec"] + + @pipfile_spec.setter + def pipfile_spec(self, value): + self["pipfile-spec"] = value + + @property + def requires(self): + return self["requires"] + + @requires.setter + def requires(self, value): + self["requires"] = value + + @property + def sources(self): + return self["sources"] + + @sources.setter + def sources(self, value): + self["sources"] = value diff --git a/pipenv/vendor/plette/models/sources.py b/pipenv/vendor/plette/models/sources.py new file mode 100644 index 0000000000..dc2529a064 --- /dev/null +++ b/pipenv/vendor/plette/models/sources.py @@ -0,0 +1,45 @@ +import os + +from .base import DataView + + +class Source(DataView): + """Information on a "simple" Python package index. + + This could be PyPI, or a self-hosted index server, etc. The server + specified by the `url` attribute is expected to provide the "simple" + package API. + """ + __SCHEMA__ = { + "name": {"type": "string", "required": True}, + "url": {"type": "string", "required": True}, + "verify_ssl": {"type": "boolean", "required": True}, + } + + @property + def name(self): + return self._data["name"] + + @name.setter + def name(self, value): + self._data["name"] = value + + @property + def url(self): + return self._data["url"] + + @url.setter + def url(self, value): + self._data["url"] = value + + @property + def verify_ssl(self): + return self._data["verify_ssl"] + + @verify_ssl.setter + def verify_ssl(self, value): + self._data["verify_ssl"] = value + + @property + def url_expanded(self): + return os.path.expandvars(self._data["url"]) diff --git a/pipenv/vendor/plette/pipfiles.py b/pipenv/vendor/plette/pipfiles.py new file mode 100644 index 0000000000..95f413de9a --- /dev/null +++ b/pipenv/vendor/plette/pipfiles.py @@ -0,0 +1,161 @@ +from __future__ import unicode_literals + +import hashlib +import json + +import six +import tomlkit + +from .models import ( + DataView, Hash, Requires, + PackageCollection, ScriptCollection, SourceCollection, +) + + +PIPFILE_SECTIONS = { + "source": SourceCollection, + "packages": PackageCollection, + "dev-packages": PackageCollection, + "requires": Requires, + "scripts": ScriptCollection, +} + +DEFAULT_SOURCE_TOML = """\ +[[source]] +name = "pypi" +url = "https://pypi.org/simple" +verify_ssl = true +""" + + +class Pipfile(DataView): + """Representation of a Pipfile. + """ + __SCHEMA__ = {} + + @classmethod + def validate(cls, data): + # HACK: DO NOT CALL `super().validate()` here!! + # Cerberus seems to break TOML Kit's inline table preservation if it + # is not at the top-level. Fortunately the spec doesn't have nested + # non-inlined tables, so we're OK as long as validation is only + # performed at section-level. validation is performed. + for key, klass in PIPFILE_SECTIONS.items(): + if key not in data: + continue + klass.validate(data[key]) + + @classmethod + def load(cls, f, encoding=None): + content = f.read() + if encoding is not None: + content = content.decode(encoding) + data = tomlkit.loads(content) + if "source" not in data: + # HACK: There is no good way to prepend a section to an existing + # TOML document, but there's no good way to copy non-structural + # content from one TOML document to another either. Modify the + # TOML content directly, and load the new in-memory document. + sep = "" if content.startswith("\n") else "\n" + content = DEFAULT_SOURCE_TOML + sep + content + data = tomlkit.loads(content) + return cls(data) + + def __getitem__(self, key): + value = self._data[key] + try: + return PIPFILE_SECTIONS[key](value) + except KeyError: + return value + + def __setitem__(self, key, value): + if isinstance(value, DataView): + self._data[key] = value._data + else: + self._data[key] = value + + def get_hash(self): + data = { + "_meta": { + "sources": self._data["source"], + "requires": self._data.get("requires", {}), + }, + "default": self._data.get("packages", {}), + "develop": self._data.get("dev-packages", {}), + } + content = json.dumps(data, sort_keys=True, separators=(",", ":")) + if isinstance(content, six.text_type): + content = content.encode("utf-8") + return Hash.from_hash(hashlib.sha256(content)) + + def dump(self, f, encoding=None): + content = tomlkit.dumps(self._data) + if encoding is not None: + content = content.encode(encoding) + f.write(content) + + @property + def sources(self): + try: + return self["source"] + except KeyError: + raise AttributeError("sources") + + @sources.setter + def sources(self, value): + self["source"] = value + + @property + def source(self): + try: + return self["source"] + except KeyError: + raise AttributeError("source") + + @source.setter + def source(self, value): + self["source"] = value + + @property + def packages(self): + try: + return self["packages"] + except KeyError: + raise AttributeError("packages") + + @packages.setter + def packages(self, value): + self["packages"] = value + + @property + def dev_packages(self): + try: + return self["dev-packages"] + except KeyError: + raise AttributeError("dev-packages") + + @dev_packages.setter + def dev_packages(self, value): + self["dev-packages"] = value + + @property + def requires(self): + try: + return self["requires"] + except KeyError: + raise AttributeError("requires") + + @requires.setter + def requires(self, value): + self["requires"] = value + + @property + def scripts(self): + try: + return self["scripts"] + except KeyError: + raise AttributeError("scripts") + + @scripts.setter + def scripts(self, value): + self["scripts"] = value From a657e9dcfabcd5bef9d926767044d1b4e07bfbd4 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 25 Aug 2018 14:13:23 -0400 Subject: [PATCH 23/26] Update news Signed-off-by: Dan Ryan --- news/2639.vendor | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/news/2639.vendor b/news/2639.vendor index e0d5516d93..9b454d9b91 100644 --- a/news/2639.vendor +++ b/news/2639.vendor @@ -1,4 +1,4 @@ -- Vendored new libraries ``vistir`` and ``pip-shims``. +- Vendored new libraries ``vistir`` and ``pip-shims``, ``tomlkit``, ``modutil``, and ``plette``. - Update vendored libraries: - ``scandir`` to ``1.9.0`` @@ -6,7 +6,7 @@ - ``semver`` to ``2.8.1`` - ``shellingham`` to ``1.2.4`` - ``pytoml`` to ``0.1.18`` - - ``certifi`` to ``2018.8.13`` + - ``certifi`` to ``2018.8.24`` - ``ptyprocess`` to ``0.6.0`` - ``requirementslib`` to ``1.1.2`` - ``pythonfinder`` to ``1.0.2`` From bd7b6004feb26359842fba396b0bff7e15d6ef93 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 25 Aug 2018 14:35:00 -0400 Subject: [PATCH 24/26] Update requirementslib and re-patch piptools - Fix attempted hashing of `ssh://` urls - Fix URL parsing and conversions of `git` and `git+ssh` style vcs urls - Fixes #2653 - Fixes #2446 - Update lockfile - Vendor typing - Add typing to setup.py - Add click completion patch and update vendoring task - Fix vendoring script - Vendor enum34 - Update requirementslib and cleanup vcs check functionality - Move enum and adjust patched click completion - Update click completion - Bump requirementslib again - Move typing to backports, update tests - Update tomlkit imports and backport typing module Signed-off-by: Dan Ryan --- Pipfile.lock | 144 +- pipenv/patched/piptools/repositories/pypi.py | 2 +- pipenv/project.py | 2 +- pipenv/utils.py | 16 +- pipenv/vendor/backports/__init__.py | 2 + pipenv/vendor/backports/enum/LICENSE | 32 + pipenv/vendor/backports/enum/README | 3 + pipenv/vendor/backports/enum/__init__.py | 837 +++++++ pipenv/vendor/backports/typing/__init__.py | 1 + pipenv/vendor/backports/typing/typing.py | 2205 +++++++++++++++++ pipenv/vendor/click_completion/__init__.py | 8 +- pipenv/vendor/click_completion/core.py | 9 +- pipenv/vendor/requirements/LICENSE.rst | 29 - pipenv/vendor/requirements/__init__.py | 22 - pipenv/vendor/requirements/fragment.py | 44 - pipenv/vendor/requirements/parser.py | 50 - pipenv/vendor/requirements/requirement.py | 220 -- pipenv/vendor/requirements/vcs.py | 30 - pipenv/vendor/requirementslib/__init__.py | 2 +- .../vendor/requirementslib/models/lockfile.py | 33 +- .../vendor/requirementslib/models/pipfile.py | 1 + .../requirementslib/models/requirements.py | 8 +- pipenv/vendor/requirementslib/models/utils.py | 2 +- pipenv/vendor/tomlkit/api.py | 5 +- pipenv/vendor/tomlkit/container.py | 23 +- pipenv/vendor/tomlkit/exceptions.py | 5 +- pipenv/vendor/tomlkit/items.py | 26 +- pipenv/vendor/tomlkit/parser.py | 14 +- pipenv/vendor/tomlkit/toml_file.py | 8 +- pipenv/vendor/typing.LICENSE | 254 ++ pipenv/vendor/vendor.txt | 4 +- setup.py | 3 +- tasks/vendoring/__init__.py | 70 +- .../vendoring/patches/patched/piptools.patch | 2 +- .../vendor/click-completion-enum-import.patch | 39 + .../vendor/tomlkit-typing-imports.patch | 141 ++ tests/integration/test_install_markers.py | 6 +- tests/unit/test_utils.py | 7 +- 38 files changed, 3788 insertions(+), 521 deletions(-) create mode 100644 pipenv/vendor/backports/enum/LICENSE create mode 100644 pipenv/vendor/backports/enum/README create mode 100644 pipenv/vendor/backports/enum/__init__.py create mode 100644 pipenv/vendor/backports/typing/__init__.py create mode 100644 pipenv/vendor/backports/typing/typing.py delete mode 100644 pipenv/vendor/requirements/LICENSE.rst delete mode 100644 pipenv/vendor/requirements/__init__.py delete mode 100644 pipenv/vendor/requirements/fragment.py delete mode 100644 pipenv/vendor/requirements/parser.py delete mode 100644 pipenv/vendor/requirements/requirement.py delete mode 100644 pipenv/vendor/requirements/vcs.py create mode 100644 pipenv/vendor/typing.LICENSE create mode 100644 tasks/vendoring/patches/vendor/click-completion-enum-import.patch create mode 100644 tasks/vendoring/patches/vendor/tomlkit-typing-imports.patch diff --git a/Pipfile.lock b/Pipfile.lock index db2d429f7d..1f0fd5bc6a 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -5,11 +5,13 @@ }, "pipfile-spec": 6, "requires": {}, - "sources": [{ - "name": "pypi", - "url": "https://pypi.org/simple", - "verify_ssl": true - }] + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": true + } + ] }, "default": {}, "develop": { @@ -22,10 +24,11 @@ }, "apipkg": { "hashes": [ - "sha256:2e38399dbe842891fe85392601aab8f40a8f4cc5a9053c326de35a1cc0297ac6", - "sha256:65d2aa68b28e7d31233bb2ba8eb31cda40e4671f8ac2d6b241e358c9652a74b9" + "sha256:37228cda29411948b422fae072f57e31d3396d2ee1c9783775980ee9c9990af6", + "sha256:58587dd4dc3daefad0487f6d9ae32b4542b185e1c36db6993290e7c41ca2b47c" ], - "version": "==1.4" + "markers": "python_version >= '2.7' and python_version != '3.0.*' and python_version != '3.1.*' and python_version != '3.2.*' and python_version != '3.3.*'", + "version": "==1.5" }, "appdirs": { "hashes": [ @@ -34,12 +37,20 @@ ], "version": "==1.4.3" }, + "argparse": { + "hashes": [ + "sha256:62b089a55be1d8949cd2bc7e0df0bddb9e028faefc8c32038cc84862aefdd6e4", + "sha256:c31647edb69fd3d465a847ea3157d37bed1f95f19760b11a47aa91c04b666314" + ], + "markers": "python_version == '2.6'", + "version": "==1.4.0" + }, "arpeggio": { "hashes": [ - "sha256:828ea85ca3c7a99125dc83000ca170c1ea1105c75cbf67a513c2e16e05e36f67", - "sha256:984a53471327bbb69ed528cac98fa6d42c1676300d047fc13fada69dd5f84ce4" + "sha256:a5258b84f76661d558492fa87e42db634df143685a0e51802d59cae7daad8732", + "sha256:dc5c0541e7cc2c6033dc0338133436abfac53655624784736e9bc8bd35e56583" ], - "version": "==1.8.0" + "version": "==1.9.0" }, "atomicwrites": { "hashes": [ @@ -73,10 +84,10 @@ }, "certifi": { "hashes": [ - "sha256:13e698f54293db9f89122b0581843a782ad0934a4fe0172d2a980ba77fc61bb7", - "sha256:9fa520c1bacfb634fa7af20a76bcbd3d5fb390481724c597da32c719a7dca4b0" + "sha256:376690d6f16d32f9d1fe8932551d80b23e9d393a8578c5633a2ed39a64861638", + "sha256:456048c7e371c089d0a77a5212fb37a2c2dce1e24146e3b7e0261736aaeaa22a" ], - "version": "==2018.4.16" + "version": "==2018.8.24" }, "chardet": { "hashes": [ @@ -131,6 +142,7 @@ "sha256:a7a84d5fa07a089186a329528f127c9d73b9de57f1a1131b82bb5320ee651f6a", "sha256:fc155a6b553c66c838d1a22dba1dc9f5f505c43285a878c6f74a79c024750b83" ], + "markers": "python_version >= '2.7' and python_version != '3.0.*' and python_version != '3.1.*' and python_version != '3.2.*' and python_version != '3.3.*'", "version": "==1.5.0" }, "flake8": { @@ -161,9 +173,17 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.0'", + "markers": "python_version < '3.3'", "version": "==1.0.2" }, + "futures": { + "hashes": [ + "sha256:9ec02aa7d674acb8618afb127e27fde7fc68994c0437ad759fa094a574adb265", + "sha256:ec0a6cb848cc212002b9828c3e34c675e0c9ff6741dc445cab6fdd4e1085d1f1" + ], + "markers": "python_version < '3' and python_version >= '2.6'", + "version": "==3.2.0" + }, "idna": { "hashes": [ "sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e", @@ -187,12 +207,12 @@ }, "invoke": { "hashes": [ - "sha256:21274204515dca62206470b088bbcf9d41ffda82b3715b90e01d71b7a4681921", - "sha256:4a4cc031db311cbfb3fdd8ec93a06c892533c27b931f4be14b24c97cd042b14e", - "sha256:621b6564f992c37166e16090d7e7cccb3b922e03a58e980dfa5e543a931b652f" + "sha256:1c2cf54c9b9af973ad9704d8ba81b225117cab612568cacbfb3fc42958cc20a9", + "sha256:334495ea16e73948894e9535019f87a88a44b73e7977492b12c2d1b5085f8197", + "sha256:54bdd3fd0245abd1185e05359fd2e4f26be0657cfe7d7bb1bed735e054fa53ab" ], "index": "pypi", - "version": "==1.0.0" + "version": "==1.1.1" }, "isort": { "hashes": [ @@ -247,11 +267,11 @@ }, "more-itertools": { "hashes": [ - "sha256:2b6b9893337bfd9166bee6a62c2b0c9fe7735dcf85948b387ec8cba30e85d8e8", - "sha256:6703844a52d3588f951883005efcf555e49566a48afd4db4e965d69b883980d3", - "sha256:a18d870ef2ffca2b8463c0070ad17b5978056f403fb64e3f15fe62a52db21cc0" + "sha256:c187a73da93e7a8acc0001572aebc7e3c69daf7bf6881a2cea10650bd4420092", + "sha256:c476b5d3a34e12d40130bc2f935028b5f636df8f372dc2c1c01dc19681b2039e", + "sha256:fcbfeaea0be121980e15bc97b3817b5202ca73d0eae185b4550cbfce2a3ebb3d" ], - "version": "==4.2.0" + "version": "==4.3.0" }, "packaging": { "hashes": [ @@ -262,10 +282,10 @@ }, "parso": { "hashes": [ - "sha256:8105449d86d858e53ce3e0044ede9dd3a395b1c9716c696af8aa3787158ab806", - "sha256:d250235e52e8f9fc5a80cc2a5f804c9fefd886b2e67a2b1099cf085f403f8e33" + "sha256:35704a43a3c113cce4de228ddb39aab374b8004f4f2407d070b6a2ca784ce8a2", + "sha256:895c63e93b94ac1e1690f5fdd40b65f07c8171e3e53cbd7793b5b96c0e0a7f24" ], - "version": "==0.3.0" + "version": "==0.3.1" }, "parver": { "hashes": [ @@ -275,12 +295,20 @@ "index": "pypi", "version": "==0.1.1" }, + "pathlib2": { + "hashes": [ + "sha256:8eb170f8d0d61825e09a95b38be068299ddeda82f35e96c3301a8a5e7604cb83", + "sha256:d1aa2a11ba7b8f7b21ab852b1fb5afb277e1bb99d5dfc663380b5015c0d80c5a" + ], + "markers": "python_version < '3.6'", + "version": "==2.3.2" + }, "pbr": { "hashes": [ - "sha256:3747c6f017f2dc099986c325239661948f9f5176f6880d9fdef164cb664cd665", - "sha256:a9c27eb8f0e24e786e544b2dbaedb729c9d8546342b5a6818d8eda098ad4340d" + "sha256:1b8be50d938c9bb75d0eaf7eda111eec1bf6dc88a62a6412e33bf077457e0f45", + "sha256:b486975c0cafb6beeb50ca0e17ba047647f229087bd74e37f4a7e2cac17d2caa" ], - "version": "==4.0.4" + "version": "==4.2.0" }, "pipenv": { "editable": true, @@ -295,19 +323,16 @@ }, "pluggy": { "hashes": [ - "sha256:7f8ae7f5bdf75671a718d2daf0a64b7885f74510bcd98b1a0bb420eb9a9d0cff", - "sha256:d345c8fe681115900d6da8d048ba67c25df42973bda370783cd58826442dcd7c", - "sha256:e160a7fcf25762bb60efc7e171d4497ff1d8d2d75a3d0df7a21b76821ecbf5c5" + "sha256:6e3836e39f4d36ae72840833db137f7b7d35105079aee6ec4a62d9f80d594dd1", + "sha256:95eb8364a4708392bae89035f45341871286a333f749c3141c20573d2b3876e1" ], - "markers": "python_version >= '2.7' and python_version != '3.1.*' and python_version != '3.2.*' and python_version != '3.0.*' and python_version != '3.3.*'", - "version": "==0.6.0" + "version": "==0.7.1" }, "py": { "hashes": [ "sha256:3fd59af7435864e1a243790d322d763925431213b6b8529c6ca71081ace3bbf7", "sha256:e31fb2767eb657cbde86c454f02e99cb846d3cd9d61b318525140214fdc0e98e" ], - "markers": "python_version >= '2.7' and python_version != '3.1.*' and python_version != '3.2.*' and python_version != '3.0.*' and python_version != '3.3.*'", "version": "==1.5.4" }, "pycodestyle": { @@ -340,11 +365,11 @@ }, "pytest": { "hashes": [ - "sha256:8ea01fc4fcc8e1b1e305252b4bc80a1528019ab99fd3b88666c9dc38d754406c", - "sha256:90898786b3d0b880b47645bae7b51aa9bbf1e9d1e4510c2cfd15dd65c70ea0cd" + "sha256:3459a123ad5532852d36f6f4501dfe1acf4af1dd9541834a164666aa40395b02", + "sha256:96bfd45dbe863b447a3054145cd78a9d7f31475d2bce6111b133c0cc4f305118" ], "index": "pypi", - "version": "==3.6.2" + "version": "==3.7.2" }, "pytest-forked": { "hashes": [ @@ -367,11 +392,11 @@ }, "pytest-xdist": { "hashes": [ - "sha256:be2662264b035920ba740ed6efb1c816a83c8a22253df7766d129f6a7bfdbd35", - "sha256:e8f5744acc270b3e7d915bdb4d5f471670f049b6fbd163d4cbd52203b075d30f" + "sha256:0875deac20f6d96597036bdf63970887a6f36d28289c2f6682faf652dfea687b", + "sha256:28e25e79698b2662b648319d3971c0f9ae0e6500f88258ccb9b153c31110ba9b" ], "index": "pypi", - "version": "==1.22.2" + "version": "==1.23.0" }, "pytz": { "hashes": [ @@ -397,10 +422,27 @@ }, "rope": { "hashes": [ - "sha256:a09edfd2034fd50099a67822f9bd851fbd0f4e98d3b87519f6267b60e50d80d1" + "sha256:a108c445e1cd897fe19272ab7877d172e7faf3d4148c80e7d20faba42ea8f7b2" ], "index": "pypi", - "version": "==0.10.7" + "version": "==0.11.0" + }, + "scandir": { + "hashes": [ + "sha256:04b8adb105f2ed313a7c2ef0f1cf7aff4871aa7a1883fa4d8c44b5551ab052d6", + "sha256:1444134990356c81d12f30e4b311379acfbbcd03e0bab591de2696a3b126d58e", + "sha256:1b5c314e39f596875e5a95dd81af03730b338c277c54a454226978d5ba95dbb6", + "sha256:346619f72eb0ddc4cf355ceffd225fa52506c92a2ff05318cfabd02a144e7c4e", + "sha256:44975e209c4827fc18a3486f257154d34ec6eaec0f90fef0cca1caa482db7064", + "sha256:61859fd7e40b8c71e609c202db5b0c1dbec0d5c7f1449dec2245575bdc866792", + "sha256:a5e232a0bf188362fa00123cc0bb842d363a292de7126126df5527b6a369586a", + "sha256:c14701409f311e7a9b7ec8e337f0815baf7ac95776cc78b419a1e6d49889a383", + "sha256:c7708f29d843fc2764310732e41f0ce27feadde453261859ec0fca7865dfc41b", + "sha256:c9009c527929f6e25604aec39b0a43c3f831d2947d89d6caaab22f057b7055c8", + "sha256:f5c71e29b4e2af7ccdc03a020c626ede51da471173b4a6ad1e904f2b2e04b4bd" + ], + "markers": "python_version < '3.5'", + "version": "==1.9.0" }, "six": { "hashes": [ @@ -437,7 +479,7 @@ "sha256:68ca7ff70785cbe1e7bccc71a48b5b6d965d79ca50629606c7861a21b206d9dd", "sha256:9de47f375baf1ea07cdb3436ff39d7a9c76042c10a769c52353ec46e4e8fc3b9" ], - "markers": "python_version >= '2.7' and python_version != '3.1.*' and python_version != '3.2.*' and python_version != '3.0.*' and python_version != '3.3.*'", + "markers": "python_version >= '2.7' and python_version != '3.0.*' and python_version != '3.1.*' and python_version != '3.2.*' and python_version != '3.3.*'", "version": "==1.1.0" }, "stdeb": { @@ -464,15 +506,15 @@ "towncrier": { "editable": true, "git": "https://github.com/hawkowl/towncrier.git", - "ref": "491eec5e07e971894aa41b7c6f7892023d81bb81" + "ref": "3d600a813b8bb4277b8bd77360e54531ce274b58" }, "tqdm": { "hashes": [ - "sha256:224291ee0d8c52d91b037fd90806f48c79bcd9994d3b0abc9e44b946a908fccd", - "sha256:77b8424d41b31e68f437c6dd9cd567aebc9a860507cb42fbd880a5f822d966fe" + "sha256:5ef526702c0d265d5a960a3b27f3971fac13c26cf0fb819294bfa71fc6026c88", + "sha256:a3364bd83ce4777320b862e3c8a93d7da91e20a95f06ef79bed7dd71c654cafa" ], - "markers": "python_version != '3.1.*' and python_version != '3.0.*' and python_version >= '2.6'", - "version": "==4.23.4" + "markers": "python_version != '3.0.*' and python_version != '3.1.*' and python_version >= '2.6'", + "version": "==4.25.0" }, "twine": { "hashes": [ @@ -496,7 +538,7 @@ "sha256:a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf", "sha256:b5725a0bd4ba422ab0e66e89e030c806576753ea3ee08554382c14e685d117b5" ], - "markers": "python_version != '3.1.*' and python_version < '4' and python_version != '3.2.*' and python_version >= '2.6' and python_version != '3.0.*' and python_version != '3.3.*'", + "markers": "python_version != '3.2.*' and python_version != '3.3.*' and python_version != '3.0.*' and python_version != '3.1.*' and python_version < '4' and python_version >= '2.6'", "version": "==1.23" }, "virtualenv": { @@ -522,4 +564,4 @@ "version": "==0.14.1" } } -} \ No newline at end of file +} diff --git a/pipenv/patched/piptools/repositories/pypi.py b/pipenv/patched/piptools/repositories/pypi.py index bf7ebd2da6..e3156f6a6b 100644 --- a/pipenv/patched/piptools/repositories/pypi.py +++ b/pipenv/patched/piptools/repositories/pypi.py @@ -72,7 +72,7 @@ def get_hash(self, location): # hash url WITH fragment hash_value = self.get(new_location.url) if not hash_value: - hash_value = self._get_file_hash(new_location) + hash_value = self._get_file_hash(new_location) if not new_location.url.startswith("ssh") else None hash_value = hash_value.encode('utf8') if can_hash: self.set(new_location.url, hash_value) diff --git a/pipenv/project.py b/pipenv/project.py index c6e9e4faf9..c15dc4cf31 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -24,7 +24,6 @@ proper_case, find_requirements, is_editable, - is_vcs, cleanup_toml, is_installable_file, is_valid_url, @@ -45,6 +44,7 @@ PIPENV_PYTHON, PIPENV_DEFAULT_PYTHON_VERSION, ) +from requirementslib.utils import is_vcs def _normalized(p): diff --git a/pipenv/utils.py b/pipenv/utils.py index fa7a809b1c..cdb375dd26 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -605,21 +605,6 @@ def is_editable(pipfile_entry): return False -def is_vcs(pipfile_entry): - from .vendor import requirements - - """Determine if dictionary entry from Pipfile is for a vcs dependency.""" - if hasattr(pipfile_entry, "keys"): - return any(key for key in pipfile_entry.keys() if key in VCS_LIST) - - elif isinstance(pipfile_entry, six.string_types): - return bool( - requirements.requirement.VCS_REGEX.match(clean_git_uri(pipfile_entry)) - ) - - return False - - def is_installable_file(path): """Determine if a path can potentially be installed""" from .patched.notpip._internal.utils.misc import is_installable_dir @@ -757,6 +742,7 @@ def split_section(input_file, section_suffix, test_function): def split_file(file_dict): """Split VCS and editable dependencies out from file.""" + from .vendor.requirementslib.utils import is_vcs sections = { "vcs": is_vcs, "editable": lambda x: hasattr(x, "keys") and x.get("editable"), diff --git a/pipenv/vendor/backports/__init__.py b/pipenv/vendor/backports/__init__.py index c263b4e373..791e7ec6aa 100644 --- a/pipenv/vendor/backports/__init__.py +++ b/pipenv/vendor/backports/__init__.py @@ -4,3 +4,5 @@ __path__ = extend_path(__path__, __name__) from . import weakref from . import shutil_get_terminal_size +from . import enum + diff --git a/pipenv/vendor/backports/enum/LICENSE b/pipenv/vendor/backports/enum/LICENSE new file mode 100644 index 0000000000..9003b8850e --- /dev/null +++ b/pipenv/vendor/backports/enum/LICENSE @@ -0,0 +1,32 @@ +Copyright (c) 2013, Ethan Furman. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + + Redistributions of source code must retain the above + copyright notice, this list of conditions and the + following disclaimer. + + Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials + provided with the distribution. + + Neither the name Ethan Furman nor the names of any + contributors may be used to endorse or promote products + derived from this software without specific prior written + permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. diff --git a/pipenv/vendor/backports/enum/README b/pipenv/vendor/backports/enum/README new file mode 100644 index 0000000000..aa2333d8df --- /dev/null +++ b/pipenv/vendor/backports/enum/README @@ -0,0 +1,3 @@ +enum34 is the new Python stdlib enum module available in Python 3.4 +backported for previous versions of Python from 2.4 to 3.3. +tested on 2.6, 2.7, and 3.3+ diff --git a/pipenv/vendor/backports/enum/__init__.py b/pipenv/vendor/backports/enum/__init__.py new file mode 100644 index 0000000000..d6ffb3a40f --- /dev/null +++ b/pipenv/vendor/backports/enum/__init__.py @@ -0,0 +1,837 @@ +"""Python Enumerations""" + +import sys as _sys + +__all__ = ['Enum', 'IntEnum', 'unique'] + +version = 1, 1, 6 + +pyver = float('%s.%s' % _sys.version_info[:2]) + +try: + any +except NameError: + def any(iterable): + for element in iterable: + if element: + return True + return False + +try: + from collections import OrderedDict +except ImportError: + OrderedDict = None + +try: + basestring +except NameError: + # In Python 2 basestring is the ancestor of both str and unicode + # in Python 3 it's just str, but was missing in 3.1 + basestring = str + +try: + unicode +except NameError: + # In Python 3 unicode no longer exists (it's just str) + unicode = str + +class _RouteClassAttributeToGetattr(object): + """Route attribute access on a class to __getattr__. + + This is a descriptor, used to define attributes that act differently when + accessed through an instance and through a class. Instance access remains + normal, but access to an attribute through a class will be routed to the + class's __getattr__ method; this is done by raising AttributeError. + + """ + def __init__(self, fget=None): + self.fget = fget + + def __get__(self, instance, ownerclass=None): + if instance is None: + raise AttributeError() + return self.fget(instance) + + def __set__(self, instance, value): + raise AttributeError("can't set attribute") + + def __delete__(self, instance): + raise AttributeError("can't delete attribute") + + +def _is_descriptor(obj): + """Returns True if obj is a descriptor, False otherwise.""" + return ( + hasattr(obj, '__get__') or + hasattr(obj, '__set__') or + hasattr(obj, '__delete__')) + + +def _is_dunder(name): + """Returns True if a __dunder__ name, False otherwise.""" + return (name[:2] == name[-2:] == '__' and + name[2:3] != '_' and + name[-3:-2] != '_' and + len(name) > 4) + + +def _is_sunder(name): + """Returns True if a _sunder_ name, False otherwise.""" + return (name[0] == name[-1] == '_' and + name[1:2] != '_' and + name[-2:-1] != '_' and + len(name) > 2) + + +def _make_class_unpicklable(cls): + """Make the given class un-picklable.""" + def _break_on_call_reduce(self, protocol=None): + raise TypeError('%r cannot be pickled' % self) + cls.__reduce_ex__ = _break_on_call_reduce + cls.__module__ = '' + + +class _EnumDict(dict): + """Track enum member order and ensure member names are not reused. + + EnumMeta will use the names found in self._member_names as the + enumeration member names. + + """ + def __init__(self): + super(_EnumDict, self).__init__() + self._member_names = [] + + def __setitem__(self, key, value): + """Changes anything not dundered or not a descriptor. + + If a descriptor is added with the same name as an enum member, the name + is removed from _member_names (this may leave a hole in the numerical + sequence of values). + + If an enum member name is used twice, an error is raised; duplicate + values are not checked for. + + Single underscore (sunder) names are reserved. + + Note: in 3.x __order__ is simply discarded as a not necessary piece + leftover from 2.x + + """ + if pyver >= 3.0 and key in ('_order_', '__order__'): + return + elif key == '__order__': + key = '_order_' + if _is_sunder(key): + if key != '_order_': + raise ValueError('_names_ are reserved for future Enum use') + elif _is_dunder(key): + pass + elif key in self._member_names: + # descriptor overwriting an enum? + raise TypeError('Attempted to reuse key: %r' % key) + elif not _is_descriptor(value): + if key in self: + # enum overwriting a descriptor? + raise TypeError('Key already defined as: %r' % self[key]) + self._member_names.append(key) + super(_EnumDict, self).__setitem__(key, value) + + +# Dummy value for Enum as EnumMeta explicity checks for it, but of course until +# EnumMeta finishes running the first time the Enum class doesn't exist. This +# is also why there are checks in EnumMeta like `if Enum is not None` +Enum = None + + +class EnumMeta(type): + """Metaclass for Enum""" + @classmethod + def __prepare__(metacls, cls, bases): + return _EnumDict() + + def __new__(metacls, cls, bases, classdict): + # an Enum class is final once enumeration items have been defined; it + # cannot be mixed with other types (int, float, etc.) if it has an + # inherited __new__ unless a new __new__ is defined (or the resulting + # class will fail). + if type(classdict) is dict: + original_dict = classdict + classdict = _EnumDict() + for k, v in original_dict.items(): + classdict[k] = v + + member_type, first_enum = metacls._get_mixins_(bases) + __new__, save_new, use_args = metacls._find_new_(classdict, member_type, + first_enum) + # save enum items into separate mapping so they don't get baked into + # the new class + members = dict((k, classdict[k]) for k in classdict._member_names) + for name in classdict._member_names: + del classdict[name] + + # py2 support for definition order + _order_ = classdict.get('_order_') + if _order_ is None: + if pyver < 3.0: + try: + _order_ = [name for (name, value) in sorted(members.items(), key=lambda item: item[1])] + except TypeError: + _order_ = [name for name in sorted(members.keys())] + else: + _order_ = classdict._member_names + else: + del classdict['_order_'] + if pyver < 3.0: + _order_ = _order_.replace(',', ' ').split() + aliases = [name for name in members if name not in _order_] + _order_ += aliases + + # check for illegal enum names (any others?) + invalid_names = set(members) & set(['mro']) + if invalid_names: + raise ValueError('Invalid enum member name(s): %s' % ( + ', '.join(invalid_names), )) + + # save attributes from super classes so we know if we can take + # the shortcut of storing members in the class dict + base_attributes = set([a for b in bases for a in b.__dict__]) + # create our new Enum type + enum_class = super(EnumMeta, metacls).__new__(metacls, cls, bases, classdict) + enum_class._member_names_ = [] # names in random order + if OrderedDict is not None: + enum_class._member_map_ = OrderedDict() + else: + enum_class._member_map_ = {} # name->value map + enum_class._member_type_ = member_type + + # Reverse value->name map for hashable values. + enum_class._value2member_map_ = {} + + # instantiate them, checking for duplicates as we go + # we instantiate first instead of checking for duplicates first in case + # a custom __new__ is doing something funky with the values -- such as + # auto-numbering ;) + if __new__ is None: + __new__ = enum_class.__new__ + for member_name in _order_: + value = members[member_name] + if not isinstance(value, tuple): + args = (value, ) + else: + args = value + if member_type is tuple: # special case for tuple enums + args = (args, ) # wrap it one more time + if not use_args or not args: + enum_member = __new__(enum_class) + if not hasattr(enum_member, '_value_'): + enum_member._value_ = value + else: + enum_member = __new__(enum_class, *args) + if not hasattr(enum_member, '_value_'): + enum_member._value_ = member_type(*args) + value = enum_member._value_ + enum_member._name_ = member_name + enum_member.__objclass__ = enum_class + enum_member.__init__(*args) + # If another member with the same value was already defined, the + # new member becomes an alias to the existing one. + for name, canonical_member in enum_class._member_map_.items(): + if canonical_member.value == enum_member._value_: + enum_member = canonical_member + break + else: + # Aliases don't appear in member names (only in __members__). + enum_class._member_names_.append(member_name) + # performance boost for any member that would not shadow + # a DynamicClassAttribute (aka _RouteClassAttributeToGetattr) + if member_name not in base_attributes: + setattr(enum_class, member_name, enum_member) + # now add to _member_map_ + enum_class._member_map_[member_name] = enum_member + try: + # This may fail if value is not hashable. We can't add the value + # to the map, and by-value lookups for this value will be + # linear. + enum_class._value2member_map_[value] = enum_member + except TypeError: + pass + + + # If a custom type is mixed into the Enum, and it does not know how + # to pickle itself, pickle.dumps will succeed but pickle.loads will + # fail. Rather than have the error show up later and possibly far + # from the source, sabotage the pickle protocol for this class so + # that pickle.dumps also fails. + # + # However, if the new class implements its own __reduce_ex__, do not + # sabotage -- it's on them to make sure it works correctly. We use + # __reduce_ex__ instead of any of the others as it is preferred by + # pickle over __reduce__, and it handles all pickle protocols. + unpicklable = False + if '__reduce_ex__' not in classdict: + if member_type is not object: + methods = ('__getnewargs_ex__', '__getnewargs__', + '__reduce_ex__', '__reduce__') + if not any(m in member_type.__dict__ for m in methods): + _make_class_unpicklable(enum_class) + unpicklable = True + + + # double check that repr and friends are not the mixin's or various + # things break (such as pickle) + for name in ('__repr__', '__str__', '__format__', '__reduce_ex__'): + class_method = getattr(enum_class, name) + obj_method = getattr(member_type, name, None) + enum_method = getattr(first_enum, name, None) + if name not in classdict and class_method is not enum_method: + if name == '__reduce_ex__' and unpicklable: + continue + setattr(enum_class, name, enum_method) + + # method resolution and int's are not playing nice + # Python's less than 2.6 use __cmp__ + + if pyver < 2.6: + + if issubclass(enum_class, int): + setattr(enum_class, '__cmp__', getattr(int, '__cmp__')) + + elif pyver < 3.0: + + if issubclass(enum_class, int): + for method in ( + '__le__', + '__lt__', + '__gt__', + '__ge__', + '__eq__', + '__ne__', + '__hash__', + ): + setattr(enum_class, method, getattr(int, method)) + + # replace any other __new__ with our own (as long as Enum is not None, + # anyway) -- again, this is to support pickle + if Enum is not None: + # if the user defined their own __new__, save it before it gets + # clobbered in case they subclass later + if save_new: + setattr(enum_class, '__member_new__', enum_class.__dict__['__new__']) + setattr(enum_class, '__new__', Enum.__dict__['__new__']) + return enum_class + + def __bool__(cls): + """ + classes/types should always be True. + """ + return True + + def __call__(cls, value, names=None, module=None, type=None, start=1): + """Either returns an existing member, or creates a new enum class. + + This method is used both when an enum class is given a value to match + to an enumeration member (i.e. Color(3)) and for the functional API + (i.e. Color = Enum('Color', names='red green blue')). + + When used for the functional API: `module`, if set, will be stored in + the new class' __module__ attribute; `type`, if set, will be mixed in + as the first base class. + + Note: if `module` is not set this routine will attempt to discover the + calling module by walking the frame stack; if this is unsuccessful + the resulting class will not be pickleable. + + """ + if names is None: # simple value lookup + return cls.__new__(cls, value) + # otherwise, functional API: we're creating a new Enum type + return cls._create_(value, names, module=module, type=type, start=start) + + def __contains__(cls, member): + return isinstance(member, cls) and member.name in cls._member_map_ + + def __delattr__(cls, attr): + # nicer error message when someone tries to delete an attribute + # (see issue19025). + if attr in cls._member_map_: + raise AttributeError( + "%s: cannot delete Enum member." % cls.__name__) + super(EnumMeta, cls).__delattr__(attr) + + def __dir__(self): + return (['__class__', '__doc__', '__members__', '__module__'] + + self._member_names_) + + @property + def __members__(cls): + """Returns a mapping of member name->value. + + This mapping lists all enum members, including aliases. Note that this + is a copy of the internal mapping. + + """ + return cls._member_map_.copy() + + def __getattr__(cls, name): + """Return the enum member matching `name` + + We use __getattr__ instead of descriptors or inserting into the enum + class' __dict__ in order to support `name` and `value` being both + properties for enum members (which live in the class' __dict__) and + enum members themselves. + + """ + if _is_dunder(name): + raise AttributeError(name) + try: + return cls._member_map_[name] + except KeyError: + raise AttributeError(name) + + def __getitem__(cls, name): + return cls._member_map_[name] + + def __iter__(cls): + return (cls._member_map_[name] for name in cls._member_names_) + + def __reversed__(cls): + return (cls._member_map_[name] for name in reversed(cls._member_names_)) + + def __len__(cls): + return len(cls._member_names_) + + __nonzero__ = __bool__ + + def __repr__(cls): + return "" % cls.__name__ + + def __setattr__(cls, name, value): + """Block attempts to reassign Enum members. + + A simple assignment to the class namespace only changes one of the + several possible ways to get an Enum member from the Enum class, + resulting in an inconsistent Enumeration. + + """ + member_map = cls.__dict__.get('_member_map_', {}) + if name in member_map: + raise AttributeError('Cannot reassign members.') + super(EnumMeta, cls).__setattr__(name, value) + + def _create_(cls, class_name, names=None, module=None, type=None, start=1): + """Convenience method to create a new Enum class. + + `names` can be: + + * A string containing member names, separated either with spaces or + commas. Values are auto-numbered from 1. + * An iterable of member names. Values are auto-numbered from 1. + * An iterable of (member name, value) pairs. + * A mapping of member name -> value. + + """ + if pyver < 3.0: + # if class_name is unicode, attempt a conversion to ASCII + if isinstance(class_name, unicode): + try: + class_name = class_name.encode('ascii') + except UnicodeEncodeError: + raise TypeError('%r is not representable in ASCII' % class_name) + metacls = cls.__class__ + if type is None: + bases = (cls, ) + else: + bases = (type, cls) + classdict = metacls.__prepare__(class_name, bases) + _order_ = [] + + # special processing needed for names? + if isinstance(names, basestring): + names = names.replace(',', ' ').split() + if isinstance(names, (tuple, list)) and isinstance(names[0], basestring): + names = [(e, i+start) for (i, e) in enumerate(names)] + + # Here, names is either an iterable of (name, value) or a mapping. + item = None # in case names is empty + for item in names: + if isinstance(item, basestring): + member_name, member_value = item, names[item] + else: + member_name, member_value = item + classdict[member_name] = member_value + _order_.append(member_name) + # only set _order_ in classdict if name/value was not from a mapping + if not isinstance(item, basestring): + classdict['_order_'] = ' '.join(_order_) + enum_class = metacls.__new__(metacls, class_name, bases, classdict) + + # TODO: replace the frame hack if a blessed way to know the calling + # module is ever developed + if module is None: + try: + module = _sys._getframe(2).f_globals['__name__'] + except (AttributeError, ValueError): + pass + if module is None: + _make_class_unpicklable(enum_class) + else: + enum_class.__module__ = module + + return enum_class + + @staticmethod + def _get_mixins_(bases): + """Returns the type for creating enum members, and the first inherited + enum class. + + bases: the tuple of bases that was given to __new__ + + """ + if not bases or Enum is None: + return object, Enum + + + # double check that we are not subclassing a class with existing + # enumeration members; while we're at it, see if any other data + # type has been mixed in so we can use the correct __new__ + member_type = first_enum = None + for base in bases: + if (base is not Enum and + issubclass(base, Enum) and + base._member_names_): + raise TypeError("Cannot extend enumerations") + # base is now the last base in bases + if not issubclass(base, Enum): + raise TypeError("new enumerations must be created as " + "`ClassName([mixin_type,] enum_type)`") + + # get correct mix-in type (either mix-in type of Enum subclass, or + # first base if last base is Enum) + if not issubclass(bases[0], Enum): + member_type = bases[0] # first data type + first_enum = bases[-1] # enum type + else: + for base in bases[0].__mro__: + # most common: (IntEnum, int, Enum, object) + # possible: (, , + # , , + # ) + if issubclass(base, Enum): + if first_enum is None: + first_enum = base + else: + if member_type is None: + member_type = base + + return member_type, first_enum + + if pyver < 3.0: + @staticmethod + def _find_new_(classdict, member_type, first_enum): + """Returns the __new__ to be used for creating the enum members. + + classdict: the class dictionary given to __new__ + member_type: the data type whose __new__ will be used by default + first_enum: enumeration to check for an overriding __new__ + + """ + # now find the correct __new__, checking to see of one was defined + # by the user; also check earlier enum classes in case a __new__ was + # saved as __member_new__ + __new__ = classdict.get('__new__', None) + if __new__: + return None, True, True # __new__, save_new, use_args + + N__new__ = getattr(None, '__new__') + O__new__ = getattr(object, '__new__') + if Enum is None: + E__new__ = N__new__ + else: + E__new__ = Enum.__dict__['__new__'] + # check all possibles for __member_new__ before falling back to + # __new__ + for method in ('__member_new__', '__new__'): + for possible in (member_type, first_enum): + try: + target = possible.__dict__[method] + except (AttributeError, KeyError): + target = getattr(possible, method, None) + if target not in [ + None, + N__new__, + O__new__, + E__new__, + ]: + if method == '__member_new__': + classdict['__new__'] = target + return None, False, True + if isinstance(target, staticmethod): + target = target.__get__(member_type) + __new__ = target + break + if __new__ is not None: + break + else: + __new__ = object.__new__ + + # if a non-object.__new__ is used then whatever value/tuple was + # assigned to the enum member name will be passed to __new__ and to the + # new enum member's __init__ + if __new__ is object.__new__: + use_args = False + else: + use_args = True + + return __new__, False, use_args + else: + @staticmethod + def _find_new_(classdict, member_type, first_enum): + """Returns the __new__ to be used for creating the enum members. + + classdict: the class dictionary given to __new__ + member_type: the data type whose __new__ will be used by default + first_enum: enumeration to check for an overriding __new__ + + """ + # now find the correct __new__, checking to see of one was defined + # by the user; also check earlier enum classes in case a __new__ was + # saved as __member_new__ + __new__ = classdict.get('__new__', None) + + # should __new__ be saved as __member_new__ later? + save_new = __new__ is not None + + if __new__ is None: + # check all possibles for __member_new__ before falling back to + # __new__ + for method in ('__member_new__', '__new__'): + for possible in (member_type, first_enum): + target = getattr(possible, method, None) + if target not in ( + None, + None.__new__, + object.__new__, + Enum.__new__, + ): + __new__ = target + break + if __new__ is not None: + break + else: + __new__ = object.__new__ + + # if a non-object.__new__ is used then whatever value/tuple was + # assigned to the enum member name will be passed to __new__ and to the + # new enum member's __init__ + if __new__ is object.__new__: + use_args = False + else: + use_args = True + + return __new__, save_new, use_args + + +######################################################## +# In order to support Python 2 and 3 with a single +# codebase we have to create the Enum methods separately +# and then use the `type(name, bases, dict)` method to +# create the class. +######################################################## +temp_enum_dict = {} +temp_enum_dict['__doc__'] = "Generic enumeration.\n\n Derive from this class to define new enumerations.\n\n" + +def __new__(cls, value): + # all enum instances are actually created during class construction + # without calling this method; this method is called by the metaclass' + # __call__ (i.e. Color(3) ), and by pickle + if type(value) is cls: + # For lookups like Color(Color.red) + value = value.value + #return value + # by-value search for a matching enum member + # see if it's in the reverse mapping (for hashable values) + try: + if value in cls._value2member_map_: + return cls._value2member_map_[value] + except TypeError: + # not there, now do long search -- O(n) behavior + for member in cls._member_map_.values(): + if member.value == value: + return member + raise ValueError("%s is not a valid %s" % (value, cls.__name__)) +temp_enum_dict['__new__'] = __new__ +del __new__ + +def __repr__(self): + return "<%s.%s: %r>" % ( + self.__class__.__name__, self._name_, self._value_) +temp_enum_dict['__repr__'] = __repr__ +del __repr__ + +def __str__(self): + return "%s.%s" % (self.__class__.__name__, self._name_) +temp_enum_dict['__str__'] = __str__ +del __str__ + +if pyver >= 3.0: + def __dir__(self): + added_behavior = [ + m + for cls in self.__class__.mro() + for m in cls.__dict__ + if m[0] != '_' and m not in self._member_map_ + ] + return (['__class__', '__doc__', '__module__', ] + added_behavior) + temp_enum_dict['__dir__'] = __dir__ + del __dir__ + +def __format__(self, format_spec): + # mixed-in Enums should use the mixed-in type's __format__, otherwise + # we can get strange results with the Enum name showing up instead of + # the value + + # pure Enum branch + if self._member_type_ is object: + cls = str + val = str(self) + # mix-in branch + else: + cls = self._member_type_ + val = self.value + return cls.__format__(val, format_spec) +temp_enum_dict['__format__'] = __format__ +del __format__ + + +#################################### +# Python's less than 2.6 use __cmp__ + +if pyver < 2.6: + + def __cmp__(self, other): + if type(other) is self.__class__: + if self is other: + return 0 + return -1 + return NotImplemented + raise TypeError("unorderable types: %s() and %s()" % (self.__class__.__name__, other.__class__.__name__)) + temp_enum_dict['__cmp__'] = __cmp__ + del __cmp__ + +else: + + def __le__(self, other): + raise TypeError("unorderable types: %s() <= %s()" % (self.__class__.__name__, other.__class__.__name__)) + temp_enum_dict['__le__'] = __le__ + del __le__ + + def __lt__(self, other): + raise TypeError("unorderable types: %s() < %s()" % (self.__class__.__name__, other.__class__.__name__)) + temp_enum_dict['__lt__'] = __lt__ + del __lt__ + + def __ge__(self, other): + raise TypeError("unorderable types: %s() >= %s()" % (self.__class__.__name__, other.__class__.__name__)) + temp_enum_dict['__ge__'] = __ge__ + del __ge__ + + def __gt__(self, other): + raise TypeError("unorderable types: %s() > %s()" % (self.__class__.__name__, other.__class__.__name__)) + temp_enum_dict['__gt__'] = __gt__ + del __gt__ + + +def __eq__(self, other): + if type(other) is self.__class__: + return self is other + return NotImplemented +temp_enum_dict['__eq__'] = __eq__ +del __eq__ + +def __ne__(self, other): + if type(other) is self.__class__: + return self is not other + return NotImplemented +temp_enum_dict['__ne__'] = __ne__ +del __ne__ + +def __hash__(self): + return hash(self._name_) +temp_enum_dict['__hash__'] = __hash__ +del __hash__ + +def __reduce_ex__(self, proto): + return self.__class__, (self._value_, ) +temp_enum_dict['__reduce_ex__'] = __reduce_ex__ +del __reduce_ex__ + +# _RouteClassAttributeToGetattr is used to provide access to the `name` +# and `value` properties of enum members while keeping some measure of +# protection from modification, while still allowing for an enumeration +# to have members named `name` and `value`. This works because enumeration +# members are not set directly on the enum class -- __getattr__ is +# used to look them up. + +@_RouteClassAttributeToGetattr +def name(self): + return self._name_ +temp_enum_dict['name'] = name +del name + +@_RouteClassAttributeToGetattr +def value(self): + return self._value_ +temp_enum_dict['value'] = value +del value + +@classmethod +def _convert(cls, name, module, filter, source=None): + """ + Create a new Enum subclass that replaces a collection of global constants + """ + # convert all constants from source (or module) that pass filter() to + # a new Enum called name, and export the enum and its members back to + # module; + # also, replace the __reduce_ex__ method so unpickling works in + # previous Python versions + module_globals = vars(_sys.modules[module]) + if source: + source = vars(source) + else: + source = module_globals + members = dict((name, value) for name, value in source.items() if filter(name)) + cls = cls(name, members, module=module) + cls.__reduce_ex__ = _reduce_ex_by_name + module_globals.update(cls.__members__) + module_globals[name] = cls + return cls +temp_enum_dict['_convert'] = _convert +del _convert + +Enum = EnumMeta('Enum', (object, ), temp_enum_dict) +del temp_enum_dict + +# Enum has now been created +########################### + +class IntEnum(int, Enum): + """Enum where members are also (and must be) ints""" + +def _reduce_ex_by_name(self, proto): + return self.name + +def unique(enumeration): + """Class decorator that ensures only unique members exist in an enumeration.""" + duplicates = [] + for name, member in enumeration.__members__.items(): + if name != member.name: + duplicates.append((name, member.name)) + if duplicates: + duplicate_names = ', '.join( + ["%s -> %s" % (alias, name) for (alias, name) in duplicates] + ) + raise ValueError('duplicate names found in %r: %s' % + (enumeration, duplicate_names) + ) + return enumeration diff --git a/pipenv/vendor/backports/typing/__init__.py b/pipenv/vendor/backports/typing/__init__.py new file mode 100644 index 0000000000..ddef31b438 --- /dev/null +++ b/pipenv/vendor/backports/typing/__init__.py @@ -0,0 +1 @@ +from . import typing diff --git a/pipenv/vendor/backports/typing/typing.py b/pipenv/vendor/backports/typing/typing.py new file mode 100644 index 0000000000..7d8c7a343e --- /dev/null +++ b/pipenv/vendor/backports/typing/typing.py @@ -0,0 +1,2205 @@ +from __future__ import absolute_import, unicode_literals + +import abc +from abc import abstractmethod, abstractproperty +import collections +import functools +import re as stdlib_re # Avoid confusion with the re we export. +import sys +import types +import copy +try: + import collections.abc as collections_abc +except ImportError: + import collections as collections_abc # Fallback for PY3.2. + + +# Please keep __all__ alphabetized within each category. +__all__ = [ + # Super-special typing primitives. + 'Any', + 'Callable', + 'ClassVar', + 'Generic', + 'Optional', + 'Tuple', + 'Type', + 'TypeVar', + 'Union', + + # ABCs (from collections.abc). + 'AbstractSet', # collections.abc.Set. + 'GenericMeta', # subclass of abc.ABCMeta and a metaclass + # for 'Generic' and ABCs below. + 'ByteString', + 'Container', + 'ContextManager', + 'Hashable', + 'ItemsView', + 'Iterable', + 'Iterator', + 'KeysView', + 'Mapping', + 'MappingView', + 'MutableMapping', + 'MutableSequence', + 'MutableSet', + 'Sequence', + 'Sized', + 'ValuesView', + + # Structural checks, a.k.a. protocols. + 'Reversible', + 'SupportsAbs', + 'SupportsComplex', + 'SupportsFloat', + 'SupportsInt', + + # Concrete collection types. + 'Counter', + 'Deque', + 'Dict', + 'DefaultDict', + 'List', + 'Set', + 'FrozenSet', + 'NamedTuple', # Not really a type. + 'Generator', + + # One-off things. + 'AnyStr', + 'cast', + 'get_type_hints', + 'NewType', + 'no_type_check', + 'no_type_check_decorator', + 'NoReturn', + 'overload', + 'Text', + 'TYPE_CHECKING', +] + +# The pseudo-submodules 're' and 'io' are part of the public +# namespace, but excluded from __all__ because they might stomp on +# legitimate imports of those modules. + + +def _qualname(x): + if sys.version_info[:2] >= (3, 3): + return x.__qualname__ + else: + # Fall back to just name. + return x.__name__ + + +def _trim_name(nm): + whitelist = ('_TypeAlias', '_ForwardRef', '_TypingBase', '_FinalTypingBase') + if nm.startswith('_') and nm not in whitelist: + nm = nm[1:] + return nm + + +class TypingMeta(type): + """Metaclass for most types defined in typing module + (not a part of public API). + + This also defines a dummy constructor (all the work for most typing + constructs is done in __new__) and a nicer repr(). + """ + + _is_protocol = False + + def __new__(cls, name, bases, namespace): + return super(TypingMeta, cls).__new__(cls, str(name), bases, namespace) + + @classmethod + def assert_no_subclassing(cls, bases): + for base in bases: + if isinstance(base, cls): + raise TypeError("Cannot subclass %s" % + (', '.join(map(_type_repr, bases)) or '()')) + + def __init__(self, *args, **kwds): + pass + + def _eval_type(self, globalns, localns): + """Override this in subclasses to interpret forward references. + + For example, List['C'] is internally stored as + List[_ForwardRef('C')], which should evaluate to List[C], + where C is an object found in globalns or localns (searching + localns first, of course). + """ + return self + + def _get_type_vars(self, tvars): + pass + + def __repr__(self): + qname = _trim_name(_qualname(self)) + return '%s.%s' % (self.__module__, qname) + + +class _TypingBase(object): + """Internal indicator of special typing constructs.""" + __metaclass__ = TypingMeta + __slots__ = ('__weakref__',) + + def __init__(self, *args, **kwds): + pass + + def __new__(cls, *args, **kwds): + """Constructor. + + This only exists to give a better error message in case + someone tries to subclass a special typing object (not a good idea). + """ + if (len(args) == 3 and + isinstance(args[0], str) and + isinstance(args[1], tuple)): + # Close enough. + raise TypeError("Cannot subclass %r" % cls) + return super(_TypingBase, cls).__new__(cls) + + # Things that are not classes also need these. + def _eval_type(self, globalns, localns): + return self + + def _get_type_vars(self, tvars): + pass + + def __repr__(self): + cls = type(self) + qname = _trim_name(_qualname(cls)) + return '%s.%s' % (cls.__module__, qname) + + def __call__(self, *args, **kwds): + raise TypeError("Cannot instantiate %r" % type(self)) + + +class _FinalTypingBase(_TypingBase): + """Internal mix-in class to prevent instantiation. + + Prevents instantiation unless _root=True is given in class call. + It is used to create pseudo-singleton instances Any, Union, Optional, etc. + """ + + __slots__ = () + + def __new__(cls, *args, **kwds): + self = super(_FinalTypingBase, cls).__new__(cls, *args, **kwds) + if '_root' in kwds and kwds['_root'] is True: + return self + raise TypeError("Cannot instantiate %r" % cls) + + def __reduce__(self): + return _trim_name(type(self).__name__) + + +class _ForwardRef(_TypingBase): + """Internal wrapper to hold a forward reference.""" + + __slots__ = ('__forward_arg__', '__forward_code__', + '__forward_evaluated__', '__forward_value__') + + def __init__(self, arg): + super(_ForwardRef, self).__init__(arg) + if not isinstance(arg, basestring): + raise TypeError('Forward reference must be a string -- got %r' % (arg,)) + try: + code = compile(arg, '', 'eval') + except SyntaxError: + raise SyntaxError('Forward reference must be an expression -- got %r' % + (arg,)) + self.__forward_arg__ = arg + self.__forward_code__ = code + self.__forward_evaluated__ = False + self.__forward_value__ = None + + def _eval_type(self, globalns, localns): + if not self.__forward_evaluated__ or localns is not globalns: + if globalns is None and localns is None: + globalns = localns = {} + elif globalns is None: + globalns = localns + elif localns is None: + localns = globalns + self.__forward_value__ = _type_check( + eval(self.__forward_code__, globalns, localns), + "Forward references must evaluate to types.") + self.__forward_evaluated__ = True + return self.__forward_value__ + + def __eq__(self, other): + if not isinstance(other, _ForwardRef): + return NotImplemented + return (self.__forward_arg__ == other.__forward_arg__ and + self.__forward_value__ == other.__forward_value__) + + def __hash__(self): + return hash((self.__forward_arg__, self.__forward_value__)) + + def __instancecheck__(self, obj): + raise TypeError("Forward references cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Forward references cannot be used with issubclass().") + + def __repr__(self): + return '_ForwardRef(%r)' % (self.__forward_arg__,) + + +class _TypeAlias(_TypingBase): + """Internal helper class for defining generic variants of concrete types. + + Note that this is not a type; let's call it a pseudo-type. It cannot + be used in instance and subclass checks in parameterized form, i.e. + ``isinstance(42, Match[str])`` raises ``TypeError`` instead of returning + ``False``. + """ + + __slots__ = ('name', 'type_var', 'impl_type', 'type_checker') + + def __init__(self, name, type_var, impl_type, type_checker): + """Initializer. + + Args: + name: The name, e.g. 'Pattern'. + type_var: The type parameter, e.g. AnyStr, or the + specific type, e.g. str. + impl_type: The implementation type. + type_checker: Function that takes an impl_type instance. + and returns a value that should be a type_var instance. + """ + assert isinstance(name, basestring), repr(name) + assert isinstance(impl_type, type), repr(impl_type) + assert not isinstance(impl_type, TypingMeta), repr(impl_type) + assert isinstance(type_var, (type, _TypingBase)), repr(type_var) + self.name = name + self.type_var = type_var + self.impl_type = impl_type + self.type_checker = type_checker + + def __repr__(self): + return "%s[%s]" % (self.name, _type_repr(self.type_var)) + + def __getitem__(self, parameter): + if not isinstance(self.type_var, TypeVar): + raise TypeError("%s cannot be further parameterized." % self) + if self.type_var.__constraints__ and isinstance(parameter, type): + if not issubclass(parameter, self.type_var.__constraints__): + raise TypeError("%s is not a valid substitution for %s." % + (parameter, self.type_var)) + if isinstance(parameter, TypeVar) and parameter is not self.type_var: + raise TypeError("%s cannot be re-parameterized." % self) + return self.__class__(self.name, parameter, + self.impl_type, self.type_checker) + + def __eq__(self, other): + if not isinstance(other, _TypeAlias): + return NotImplemented + return self.name == other.name and self.type_var == other.type_var + + def __hash__(self): + return hash((self.name, self.type_var)) + + def __instancecheck__(self, obj): + if not isinstance(self.type_var, TypeVar): + raise TypeError("Parameterized type aliases cannot be used " + "with isinstance().") + return isinstance(obj, self.impl_type) + + def __subclasscheck__(self, cls): + if not isinstance(self.type_var, TypeVar): + raise TypeError("Parameterized type aliases cannot be used " + "with issubclass().") + return issubclass(cls, self.impl_type) + + +def _get_type_vars(types, tvars): + for t in types: + if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): + t._get_type_vars(tvars) + + +def _type_vars(types): + tvars = [] + _get_type_vars(types, tvars) + return tuple(tvars) + + +def _eval_type(t, globalns, localns): + if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): + return t._eval_type(globalns, localns) + return t + + +def _type_check(arg, msg): + """Check that the argument is a type, and return it (internal helper). + + As a special case, accept None and return type(None) instead. + Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable. + + The msg argument is a human-readable error message, e.g. + + "Union[arg, ...]: arg should be a type." + + We append the repr() of the actual value (truncated to 100 chars). + """ + if arg is None: + return type(None) + if isinstance(arg, basestring): + arg = _ForwardRef(arg) + if ( + isinstance(arg, _TypingBase) and type(arg).__name__ == '_ClassVar' or + not isinstance(arg, (type, _TypingBase)) and not callable(arg) + ): + raise TypeError(msg + " Got %.100r." % (arg,)) + # Bare Union etc. are not valid as type arguments + if ( + type(arg).__name__ in ('_Union', '_Optional') and + not getattr(arg, '__origin__', None) or + isinstance(arg, TypingMeta) and arg._gorg in (Generic, _Protocol) + ): + raise TypeError("Plain %s is not valid as type argument" % arg) + return arg + + +def _type_repr(obj): + """Return the repr() of an object, special-casing types (internal helper). + + If obj is a type, we return a shorter version than the default + type.__repr__, based on the module and qualified name, which is + typically enough to uniquely identify a type. For everything + else, we fall back on repr(obj). + """ + if isinstance(obj, type) and not isinstance(obj, TypingMeta): + if obj.__module__ == '__builtin__': + return _qualname(obj) + return '%s.%s' % (obj.__module__, _qualname(obj)) + if obj is Ellipsis: + return('...') + if isinstance(obj, types.FunctionType): + return obj.__name__ + return repr(obj) + + +class ClassVarMeta(TypingMeta): + """Metaclass for _ClassVar""" + + def __new__(cls, name, bases, namespace): + cls.assert_no_subclassing(bases) + self = super(ClassVarMeta, cls).__new__(cls, name, bases, namespace) + return self + + +class _ClassVar(_FinalTypingBase): + """Special type construct to mark class variables. + + An annotation wrapped in ClassVar indicates that a given + attribute is intended to be used as a class variable and + should not be set on instances of that class. Usage:: + + class Starship: + stats = {} # type: ClassVar[Dict[str, int]] # class variable + damage = 10 # type: int # instance variable + + ClassVar accepts only types and cannot be further subscribed. + + Note that ClassVar is not a class itself, and should not + be used with isinstance() or issubclass(). + """ + + __metaclass__ = ClassVarMeta + __slots__ = ('__type__',) + + def __init__(self, tp=None, _root=False): + self.__type__ = tp + + def __getitem__(self, item): + cls = type(self) + if self.__type__ is None: + return cls(_type_check(item, + '{} accepts only types.'.format(cls.__name__[1:])), + _root=True) + raise TypeError('{} cannot be further subscripted' + .format(cls.__name__[1:])) + + def _eval_type(self, globalns, localns): + return type(self)(_eval_type(self.__type__, globalns, localns), + _root=True) + + def __repr__(self): + r = super(_ClassVar, self).__repr__() + if self.__type__ is not None: + r += '[{}]'.format(_type_repr(self.__type__)) + return r + + def __hash__(self): + return hash((type(self).__name__, self.__type__)) + + def __eq__(self, other): + if not isinstance(other, _ClassVar): + return NotImplemented + if self.__type__ is not None: + return self.__type__ == other.__type__ + return self is other + + +ClassVar = _ClassVar(_root=True) + + +class AnyMeta(TypingMeta): + """Metaclass for Any.""" + + def __new__(cls, name, bases, namespace): + cls.assert_no_subclassing(bases) + self = super(AnyMeta, cls).__new__(cls, name, bases, namespace) + return self + + +class _Any(_FinalTypingBase): + """Special type indicating an unconstrained type. + + - Any is compatible with every type. + - Any assumed to have all methods. + - All values assumed to be instances of Any. + + Note that all the above statements are true from the point of view of + static type checkers. At runtime, Any should not be used with instance + or class checks. + """ + __metaclass__ = AnyMeta + __slots__ = () + + def __instancecheck__(self, obj): + raise TypeError("Any cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Any cannot be used with issubclass().") + + +Any = _Any(_root=True) + + +class NoReturnMeta(TypingMeta): + """Metaclass for NoReturn.""" + + def __new__(cls, name, bases, namespace): + cls.assert_no_subclassing(bases) + self = super(NoReturnMeta, cls).__new__(cls, name, bases, namespace) + return self + + +class _NoReturn(_FinalTypingBase): + """Special type indicating functions that never return. + Example:: + + from typing import NoReturn + + def stop() -> NoReturn: + raise Exception('no way') + + This type is invalid in other positions, e.g., ``List[NoReturn]`` + will fail in static type checkers. + """ + __metaclass__ = NoReturnMeta + __slots__ = () + + def __instancecheck__(self, obj): + raise TypeError("NoReturn cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("NoReturn cannot be used with issubclass().") + + +NoReturn = _NoReturn(_root=True) + + +class TypeVarMeta(TypingMeta): + def __new__(cls, name, bases, namespace): + cls.assert_no_subclassing(bases) + return super(TypeVarMeta, cls).__new__(cls, name, bases, namespace) + + +class TypeVar(_TypingBase): + """Type variable. + + Usage:: + + T = TypeVar('T') # Can be anything + A = TypeVar('A', str, bytes) # Must be str or bytes + + Type variables exist primarily for the benefit of static type + checkers. They serve as the parameters for generic types as well + as for generic function definitions. See class Generic for more + information on generic types. Generic functions work as follows: + + def repeat(x: T, n: int) -> List[T]: + '''Return a list containing n references to x.''' + return [x]*n + + def longest(x: A, y: A) -> A: + '''Return the longest of two strings.''' + return x if len(x) >= len(y) else y + + The latter example's signature is essentially the overloading + of (str, str) -> str and (bytes, bytes) -> bytes. Also note + that if the arguments are instances of some subclass of str, + the return type is still plain str. + + At runtime, isinstance(x, T) and issubclass(C, T) will raise TypeError. + + Type variables defined with covariant=True or contravariant=True + can be used do declare covariant or contravariant generic types. + See PEP 484 for more details. By default generic types are invariant + in all type variables. + + Type variables can be introspected. e.g.: + + T.__name__ == 'T' + T.__constraints__ == () + T.__covariant__ == False + T.__contravariant__ = False + A.__constraints__ == (str, bytes) + """ + + __metaclass__ = TypeVarMeta + __slots__ = ('__name__', '__bound__', '__constraints__', + '__covariant__', '__contravariant__') + + def __init__(self, name, *constraints, **kwargs): + super(TypeVar, self).__init__(name, *constraints, **kwargs) + bound = kwargs.get('bound', None) + covariant = kwargs.get('covariant', False) + contravariant = kwargs.get('contravariant', False) + self.__name__ = name + if covariant and contravariant: + raise ValueError("Bivariant types are not supported.") + self.__covariant__ = bool(covariant) + self.__contravariant__ = bool(contravariant) + if constraints and bound is not None: + raise TypeError("Constraints cannot be combined with bound=...") + if constraints and len(constraints) == 1: + raise TypeError("A single constraint is not allowed") + msg = "TypeVar(name, constraint, ...): constraints must be types." + self.__constraints__ = tuple(_type_check(t, msg) for t in constraints) + if bound: + self.__bound__ = _type_check(bound, "Bound must be a type.") + else: + self.__bound__ = None + + def _get_type_vars(self, tvars): + if self not in tvars: + tvars.append(self) + + def __repr__(self): + if self.__covariant__: + prefix = '+' + elif self.__contravariant__: + prefix = '-' + else: + prefix = '~' + return prefix + self.__name__ + + def __instancecheck__(self, instance): + raise TypeError("Type variables cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Type variables cannot be used with issubclass().") + + +# Some unconstrained type variables. These are used by the container types. +# (These are not for export.) +T = TypeVar('T') # Any type. +KT = TypeVar('KT') # Key type. +VT = TypeVar('VT') # Value type. +T_co = TypeVar('T_co', covariant=True) # Any type covariant containers. +V_co = TypeVar('V_co', covariant=True) # Any type covariant containers. +VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers. +T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant. + +# A useful type variable with constraints. This represents string types. +# (This one *is* for export!) +AnyStr = TypeVar('AnyStr', bytes, unicode) + + +def _replace_arg(arg, tvars, args): + """An internal helper function: replace arg if it is a type variable + found in tvars with corresponding substitution from args or + with corresponding substitution sub-tree if arg is a generic type. + """ + + if tvars is None: + tvars = [] + if hasattr(arg, '_subs_tree') and isinstance(arg, (GenericMeta, _TypingBase)): + return arg._subs_tree(tvars, args) + if isinstance(arg, TypeVar): + for i, tvar in enumerate(tvars): + if arg == tvar: + return args[i] + return arg + + +# Special typing constructs Union, Optional, Generic, Callable and Tuple +# use three special attributes for internal bookkeeping of generic types: +# * __parameters__ is a tuple of unique free type parameters of a generic +# type, for example, Dict[T, T].__parameters__ == (T,); +# * __origin__ keeps a reference to a type that was subscripted, +# e.g., Union[T, int].__origin__ == Union; +# * __args__ is a tuple of all arguments used in subscripting, +# e.g., Dict[T, int].__args__ == (T, int). + + +def _subs_tree(cls, tvars=None, args=None): + """An internal helper function: calculate substitution tree + for generic cls after replacing its type parameters with + substitutions in tvars -> args (if any). + Repeat the same following __origin__'s. + + Return a list of arguments with all possible substitutions + performed. Arguments that are generic classes themselves are represented + as tuples (so that no new classes are created by this function). + For example: _subs_tree(List[Tuple[int, T]][str]) == [(Tuple, int, str)] + """ + + if cls.__origin__ is None: + return cls + # Make of chain of origins (i.e. cls -> cls.__origin__) + current = cls.__origin__ + orig_chain = [] + while current.__origin__ is not None: + orig_chain.append(current) + current = current.__origin__ + # Replace type variables in __args__ if asked ... + tree_args = [] + for arg in cls.__args__: + tree_args.append(_replace_arg(arg, tvars, args)) + # ... then continue replacing down the origin chain. + for ocls in orig_chain: + new_tree_args = [] + for arg in ocls.__args__: + new_tree_args.append(_replace_arg(arg, ocls.__parameters__, tree_args)) + tree_args = new_tree_args + return tree_args + + +def _remove_dups_flatten(parameters): + """An internal helper for Union creation and substitution: flatten Union's + among parameters, then remove duplicates and strict subclasses. + """ + + # Flatten out Union[Union[...], ...]. + params = [] + for p in parameters: + if isinstance(p, _Union) and p.__origin__ is Union: + params.extend(p.__args__) + elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union: + params.extend(p[1:]) + else: + params.append(p) + # Weed out strict duplicates, preserving the first of each occurrence. + all_params = set(params) + if len(all_params) < len(params): + new_params = [] + for t in params: + if t in all_params: + new_params.append(t) + all_params.remove(t) + params = new_params + assert not all_params, all_params + # Weed out subclasses. + # E.g. Union[int, Employee, Manager] == Union[int, Employee]. + # If object is present it will be sole survivor among proper classes. + # Never discard type variables. + # (In particular, Union[str, AnyStr] != AnyStr.) + all_params = set(params) + for t1 in params: + if not isinstance(t1, type): + continue + if any(isinstance(t2, type) and issubclass(t1, t2) + for t2 in all_params - {t1} + if not (isinstance(t2, GenericMeta) and + t2.__origin__ is not None)): + all_params.remove(t1) + return tuple(t for t in params if t in all_params) + + +def _check_generic(cls, parameters): + # Check correct count for parameters of a generic cls (internal helper). + if not cls.__parameters__: + raise TypeError("%s is not a generic class" % repr(cls)) + alen = len(parameters) + elen = len(cls.__parameters__) + if alen != elen: + raise TypeError("Too %s parameters for %s; actual %s, expected %s" % + ("many" if alen > elen else "few", repr(cls), alen, elen)) + + +_cleanups = [] + + +def _tp_cache(func): + maxsize = 128 + cache = {} + _cleanups.append(cache.clear) + + @functools.wraps(func) + def inner(*args): + key = args + try: + return cache[key] + except TypeError: + # Assume it's an unhashable argument. + return func(*args) + except KeyError: + value = func(*args) + if len(cache) >= maxsize: + # If the cache grows too much, just start over. + cache.clear() + cache[key] = value + return value + + return inner + + +class UnionMeta(TypingMeta): + """Metaclass for Union.""" + + def __new__(cls, name, bases, namespace): + cls.assert_no_subclassing(bases) + return super(UnionMeta, cls).__new__(cls, name, bases, namespace) + + +class _Union(_FinalTypingBase): + """Union type; Union[X, Y] means either X or Y. + + To define a union, use e.g. Union[int, str]. Details: + + - The arguments must be types and there must be at least one. + + - None as an argument is a special case and is replaced by + type(None). + + - Unions of unions are flattened, e.g.:: + + Union[Union[int, str], float] == Union[int, str, float] + + - Unions of a single argument vanish, e.g.:: + + Union[int] == int # The constructor actually returns int + + - Redundant arguments are skipped, e.g.:: + + Union[int, str, int] == Union[int, str] + + - When comparing unions, the argument order is ignored, e.g.:: + + Union[int, str] == Union[str, int] + + - When two arguments have a subclass relationship, the least + derived argument is kept, e.g.:: + + class Employee: pass + class Manager(Employee): pass + Union[int, Employee, Manager] == Union[int, Employee] + Union[Manager, int, Employee] == Union[int, Employee] + Union[Employee, Manager] == Employee + + - Similar for object:: + + Union[int, object] == object + + - You cannot subclass or instantiate a union. + + - You can use Optional[X] as a shorthand for Union[X, None]. + """ + + __metaclass__ = UnionMeta + __slots__ = ('__parameters__', '__args__', '__origin__', '__tree_hash__') + + def __new__(cls, parameters=None, origin=None, *args, **kwds): + self = super(_Union, cls).__new__(cls, parameters, origin, *args, **kwds) + if origin is None: + self.__parameters__ = None + self.__args__ = None + self.__origin__ = None + self.__tree_hash__ = hash(frozenset(('Union',))) + return self + if not isinstance(parameters, tuple): + raise TypeError("Expected parameters=") + if origin is Union: + parameters = _remove_dups_flatten(parameters) + # It's not a union if there's only one type left. + if len(parameters) == 1: + return parameters[0] + self.__parameters__ = _type_vars(parameters) + self.__args__ = parameters + self.__origin__ = origin + # Pre-calculate the __hash__ on instantiation. + # This improves speed for complex substitutions. + subs_tree = self._subs_tree() + if isinstance(subs_tree, tuple): + self.__tree_hash__ = hash(frozenset(subs_tree)) + else: + self.__tree_hash__ = hash(subs_tree) + return self + + def _eval_type(self, globalns, localns): + if self.__args__ is None: + return self + ev_args = tuple(_eval_type(t, globalns, localns) for t in self.__args__) + ev_origin = _eval_type(self.__origin__, globalns, localns) + if ev_args == self.__args__ and ev_origin == self.__origin__: + # Everything is already evaluated. + return self + return self.__class__(ev_args, ev_origin, _root=True) + + def _get_type_vars(self, tvars): + if self.__origin__ and self.__parameters__: + _get_type_vars(self.__parameters__, tvars) + + def __repr__(self): + if self.__origin__ is None: + return super(_Union, self).__repr__() + tree = self._subs_tree() + if not isinstance(tree, tuple): + return repr(tree) + return tree[0]._tree_repr(tree) + + def _tree_repr(self, tree): + arg_list = [] + for arg in tree[1:]: + if not isinstance(arg, tuple): + arg_list.append(_type_repr(arg)) + else: + arg_list.append(arg[0]._tree_repr(arg)) + return super(_Union, self).__repr__() + '[%s]' % ', '.join(arg_list) + + @_tp_cache + def __getitem__(self, parameters): + if parameters == (): + raise TypeError("Cannot take a Union of no types.") + if not isinstance(parameters, tuple): + parameters = (parameters,) + if self.__origin__ is None: + msg = "Union[arg, ...]: each arg must be a type." + else: + msg = "Parameters to generic types must be types." + parameters = tuple(_type_check(p, msg) for p in parameters) + if self is not Union: + _check_generic(self, parameters) + return self.__class__(parameters, origin=self, _root=True) + + def _subs_tree(self, tvars=None, args=None): + if self is Union: + return Union # Nothing to substitute + tree_args = _subs_tree(self, tvars, args) + tree_args = _remove_dups_flatten(tree_args) + if len(tree_args) == 1: + return tree_args[0] # Union of a single type is that type + return (Union,) + tree_args + + def __eq__(self, other): + if isinstance(other, _Union): + return self.__tree_hash__ == other.__tree_hash__ + elif self is not Union: + return self._subs_tree() == other + else: + return self is other + + def __hash__(self): + return self.__tree_hash__ + + def __instancecheck__(self, obj): + raise TypeError("Unions cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Unions cannot be used with issubclass().") + + +Union = _Union(_root=True) + + +class OptionalMeta(TypingMeta): + """Metaclass for Optional.""" + + def __new__(cls, name, bases, namespace): + cls.assert_no_subclassing(bases) + return super(OptionalMeta, cls).__new__(cls, name, bases, namespace) + + +class _Optional(_FinalTypingBase): + """Optional type. + + Optional[X] is equivalent to Union[X, None]. + """ + + __metaclass__ = OptionalMeta + __slots__ = () + + @_tp_cache + def __getitem__(self, arg): + arg = _type_check(arg, "Optional[t] requires a single type.") + return Union[arg, type(None)] + + +Optional = _Optional(_root=True) + + +def _next_in_mro(cls): + """Helper for Generic.__new__. + + Returns the class after the last occurrence of Generic or + Generic[...] in cls.__mro__. + """ + next_in_mro = object + # Look for the last occurrence of Generic or Generic[...]. + for i, c in enumerate(cls.__mro__[:-1]): + if isinstance(c, GenericMeta) and c._gorg is Generic: + next_in_mro = cls.__mro__[i + 1] + return next_in_mro + + +def _make_subclasshook(cls): + """Construct a __subclasshook__ callable that incorporates + the associated __extra__ class in subclass checks performed + against cls. + """ + if isinstance(cls.__extra__, abc.ABCMeta): + # The logic mirrors that of ABCMeta.__subclasscheck__. + # Registered classes need not be checked here because + # cls and its extra share the same _abc_registry. + def __extrahook__(cls, subclass): + res = cls.__extra__.__subclasshook__(subclass) + if res is not NotImplemented: + return res + if cls.__extra__ in getattr(subclass, '__mro__', ()): + return True + for scls in cls.__extra__.__subclasses__(): + if isinstance(scls, GenericMeta): + continue + if issubclass(subclass, scls): + return True + return NotImplemented + else: + # For non-ABC extras we'll just call issubclass(). + def __extrahook__(cls, subclass): + if cls.__extra__ and issubclass(subclass, cls.__extra__): + return True + return NotImplemented + return classmethod(__extrahook__) + + +class GenericMeta(TypingMeta, abc.ABCMeta): + """Metaclass for generic types. + + This is a metaclass for typing.Generic and generic ABCs defined in + typing module. User defined subclasses of GenericMeta can override + __new__ and invoke super().__new__. Note that GenericMeta.__new__ + has strict rules on what is allowed in its bases argument: + * plain Generic is disallowed in bases; + * Generic[...] should appear in bases at most once; + * if Generic[...] is present, then it should list all type variables + that appear in other bases. + In addition, type of all generic bases is erased, e.g., C[int] is + stripped to plain C. + """ + + def __new__(cls, name, bases, namespace, + tvars=None, args=None, origin=None, extra=None, orig_bases=None): + """Create a new generic class. GenericMeta.__new__ accepts + keyword arguments that are used for internal bookkeeping, therefore + an override should pass unused keyword arguments to super(). + """ + if tvars is not None: + # Called from __getitem__() below. + assert origin is not None + assert all(isinstance(t, TypeVar) for t in tvars), tvars + else: + # Called from class statement. + assert tvars is None, tvars + assert args is None, args + assert origin is None, origin + + # Get the full set of tvars from the bases. + tvars = _type_vars(bases) + # Look for Generic[T1, ..., Tn]. + # If found, tvars must be a subset of it. + # If not found, tvars is it. + # Also check for and reject plain Generic, + # and reject multiple Generic[...]. + gvars = None + for base in bases: + if base is Generic: + raise TypeError("Cannot inherit from plain Generic") + if (isinstance(base, GenericMeta) and + base.__origin__ is Generic): + if gvars is not None: + raise TypeError( + "Cannot inherit from Generic[...] multiple types.") + gvars = base.__parameters__ + if gvars is None: + gvars = tvars + else: + tvarset = set(tvars) + gvarset = set(gvars) + if not tvarset <= gvarset: + raise TypeError( + "Some type variables (%s) " + "are not listed in Generic[%s]" % + (", ".join(str(t) for t in tvars if t not in gvarset), + ", ".join(str(g) for g in gvars))) + tvars = gvars + + initial_bases = bases + if extra is None: + extra = namespace.get('__extra__') + if extra is not None and type(extra) is abc.ABCMeta and extra not in bases: + bases = (extra,) + bases + bases = tuple(b._gorg if isinstance(b, GenericMeta) else b for b in bases) + + # remove bare Generic from bases if there are other generic bases + if any(isinstance(b, GenericMeta) and b is not Generic for b in bases): + bases = tuple(b for b in bases if b is not Generic) + namespace.update({'__origin__': origin, '__extra__': extra}) + self = super(GenericMeta, cls).__new__(cls, name, bases, namespace) + super(GenericMeta, self).__setattr__('_gorg', + self if not origin else origin._gorg) + + self.__parameters__ = tvars + # Be prepared that GenericMeta will be subclassed by TupleMeta + # and CallableMeta, those two allow ..., (), or [] in __args___. + self.__args__ = tuple(Ellipsis if a is _TypingEllipsis else + () if a is _TypingEmpty else + a for a in args) if args else None + # Speed hack (https://github.com/python/typing/issues/196). + self.__next_in_mro__ = _next_in_mro(self) + # Preserve base classes on subclassing (__bases__ are type erased now). + if orig_bases is None: + self.__orig_bases__ = initial_bases + + # This allows unparameterized generic collections to be used + # with issubclass() and isinstance() in the same way as their + # collections.abc counterparts (e.g., isinstance([], Iterable)). + if ( + '__subclasshook__' not in namespace and extra or + # allow overriding + getattr(self.__subclasshook__, '__name__', '') == '__extrahook__' + ): + self.__subclasshook__ = _make_subclasshook(self) + + if origin and hasattr(origin, '__qualname__'): # Fix for Python 3.2. + self.__qualname__ = origin.__qualname__ + self.__tree_hash__ = (hash(self._subs_tree()) if origin else + super(GenericMeta, self).__hash__()) + return self + + def __init__(self, *args, **kwargs): + super(GenericMeta, self).__init__(*args, **kwargs) + if isinstance(self.__extra__, abc.ABCMeta): + self._abc_registry = self.__extra__._abc_registry + self._abc_cache = self.__extra__._abc_cache + elif self.__origin__ is not None: + self._abc_registry = self.__origin__._abc_registry + self._abc_cache = self.__origin__._abc_cache + + # _abc_negative_cache and _abc_negative_cache_version + # realised as descriptors, since GenClass[t1, t2, ...] always + # share subclass info with GenClass. + # This is an important memory optimization. + @property + def _abc_negative_cache(self): + if isinstance(self.__extra__, abc.ABCMeta): + return self.__extra__._abc_negative_cache + return self._gorg._abc_generic_negative_cache + + @_abc_negative_cache.setter + def _abc_negative_cache(self, value): + if self.__origin__ is None: + if isinstance(self.__extra__, abc.ABCMeta): + self.__extra__._abc_negative_cache = value + else: + self._abc_generic_negative_cache = value + + @property + def _abc_negative_cache_version(self): + if isinstance(self.__extra__, abc.ABCMeta): + return self.__extra__._abc_negative_cache_version + return self._gorg._abc_generic_negative_cache_version + + @_abc_negative_cache_version.setter + def _abc_negative_cache_version(self, value): + if self.__origin__ is None: + if isinstance(self.__extra__, abc.ABCMeta): + self.__extra__._abc_negative_cache_version = value + else: + self._abc_generic_negative_cache_version = value + + def _get_type_vars(self, tvars): + if self.__origin__ and self.__parameters__: + _get_type_vars(self.__parameters__, tvars) + + def _eval_type(self, globalns, localns): + ev_origin = (self.__origin__._eval_type(globalns, localns) + if self.__origin__ else None) + ev_args = tuple(_eval_type(a, globalns, localns) for a + in self.__args__) if self.__args__ else None + if ev_origin == self.__origin__ and ev_args == self.__args__: + return self + return self.__class__(self.__name__, + self.__bases__, + dict(self.__dict__), + tvars=_type_vars(ev_args) if ev_args else None, + args=ev_args, + origin=ev_origin, + extra=self.__extra__, + orig_bases=self.__orig_bases__) + + def __repr__(self): + if self.__origin__ is None: + return super(GenericMeta, self).__repr__() + return self._tree_repr(self._subs_tree()) + + def _tree_repr(self, tree): + arg_list = [] + for arg in tree[1:]: + if arg == (): + arg_list.append('()') + elif not isinstance(arg, tuple): + arg_list.append(_type_repr(arg)) + else: + arg_list.append(arg[0]._tree_repr(arg)) + return super(GenericMeta, self).__repr__() + '[%s]' % ', '.join(arg_list) + + def _subs_tree(self, tvars=None, args=None): + if self.__origin__ is None: + return self + tree_args = _subs_tree(self, tvars, args) + return (self._gorg,) + tuple(tree_args) + + def __eq__(self, other): + if not isinstance(other, GenericMeta): + return NotImplemented + if self.__origin__ is None or other.__origin__ is None: + return self is other + return self.__tree_hash__ == other.__tree_hash__ + + def __hash__(self): + return self.__tree_hash__ + + @_tp_cache + def __getitem__(self, params): + if not isinstance(params, tuple): + params = (params,) + if not params and self._gorg is not Tuple: + raise TypeError( + "Parameter list to %s[...] cannot be empty" % _qualname(self)) + msg = "Parameters to generic types must be types." + params = tuple(_type_check(p, msg) for p in params) + if self is Generic: + # Generic can only be subscripted with unique type variables. + if not all(isinstance(p, TypeVar) for p in params): + raise TypeError( + "Parameters to Generic[...] must all be type variables") + if len(set(params)) != len(params): + raise TypeError( + "Parameters to Generic[...] must all be unique") + tvars = params + args = params + elif self in (Tuple, Callable): + tvars = _type_vars(params) + args = params + elif self is _Protocol: + # _Protocol is internal, don't check anything. + tvars = params + args = params + elif self.__origin__ in (Generic, _Protocol): + # Can't subscript Generic[...] or _Protocol[...]. + raise TypeError("Cannot subscript already-subscripted %s" % + repr(self)) + else: + # Subscripting a regular Generic subclass. + _check_generic(self, params) + tvars = _type_vars(params) + args = params + + prepend = (self,) if self.__origin__ is None else () + return self.__class__(self.__name__, + prepend + self.__bases__, + dict(self.__dict__), + tvars=tvars, + args=args, + origin=self, + extra=self.__extra__, + orig_bases=self.__orig_bases__) + + def __subclasscheck__(self, cls): + if self.__origin__ is not None: + # This should only be modules within the standard + # library. singledispatch is the only exception, because + # it's a Python 2 backport of functools.singledispatch. + if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools', + 'singledispatch']: + raise TypeError("Parameterized generics cannot be used with class " + "or instance checks") + return False + if self is Generic: + raise TypeError("Class %r cannot be used with class " + "or instance checks" % self) + return super(GenericMeta, self).__subclasscheck__(cls) + + def __instancecheck__(self, instance): + # Since we extend ABC.__subclasscheck__ and + # ABC.__instancecheck__ inlines the cache checking done by the + # latter, we must extend __instancecheck__ too. For simplicity + # we just skip the cache check -- instance checks for generic + # classes are supposed to be rare anyways. + if not isinstance(instance, type): + return issubclass(instance.__class__, self) + return False + + def __setattr__(self, attr, value): + # We consider all the subscripted genrics as proxies for original class + if ( + attr.startswith('__') and attr.endswith('__') or + attr.startswith('_abc_') + ): + super(GenericMeta, self).__setattr__(attr, value) + else: + super(GenericMeta, self._gorg).__setattr__(attr, value) + + +def _copy_generic(self): + """Hack to work around https://bugs.python.org/issue11480 on Python 2""" + return self.__class__(self.__name__, self.__bases__, dict(self.__dict__), + self.__parameters__, self.__args__, self.__origin__, + self.__extra__, self.__orig_bases__) + + +copy._copy_dispatch[GenericMeta] = _copy_generic + + +# Prevent checks for Generic to crash when defining Generic. +Generic = None + + +def _generic_new(base_cls, cls, *args, **kwds): + # Assure type is erased on instantiation, + # but attempt to store it in __orig_class__ + if cls.__origin__ is None: + if (base_cls.__new__ is object.__new__ and + cls.__init__ is not object.__init__): + return base_cls.__new__(cls) + else: + return base_cls.__new__(cls, *args, **kwds) + else: + origin = cls._gorg + if (base_cls.__new__ is object.__new__ and + cls.__init__ is not object.__init__): + obj = base_cls.__new__(origin) + else: + obj = base_cls.__new__(origin, *args, **kwds) + try: + obj.__orig_class__ = cls + except AttributeError: + pass + obj.__init__(*args, **kwds) + return obj + + +class Generic(object): + """Abstract base class for generic types. + + A generic type is typically declared by inheriting from + this class parameterized with one or more type variables. + For example, a generic mapping type might be defined as:: + + class Mapping(Generic[KT, VT]): + def __getitem__(self, key: KT) -> VT: + ... + # Etc. + + This class can then be used as follows:: + + def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT: + try: + return mapping[key] + except KeyError: + return default + """ + + __metaclass__ = GenericMeta + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is Generic: + raise TypeError("Type Generic cannot be instantiated; " + "it can be used only as a base class") + return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) + + +class _TypingEmpty(object): + """Internal placeholder for () or []. Used by TupleMeta and CallableMeta + to allow empty list/tuple in specific places, without allowing them + to sneak in where prohibited. + """ + + +class _TypingEllipsis(object): + """Internal placeholder for ... (ellipsis).""" + + +class TupleMeta(GenericMeta): + """Metaclass for Tuple (internal).""" + + @_tp_cache + def __getitem__(self, parameters): + if self.__origin__ is not None or self._gorg is not Tuple: + # Normal generic rules apply if this is not the first subscription + # or a subscription of a subclass. + return super(TupleMeta, self).__getitem__(parameters) + if parameters == (): + return super(TupleMeta, self).__getitem__((_TypingEmpty,)) + if not isinstance(parameters, tuple): + parameters = (parameters,) + if len(parameters) == 2 and parameters[1] is Ellipsis: + msg = "Tuple[t, ...]: t must be a type." + p = _type_check(parameters[0], msg) + return super(TupleMeta, self).__getitem__((p, _TypingEllipsis)) + msg = "Tuple[t0, t1, ...]: each t must be a type." + parameters = tuple(_type_check(p, msg) for p in parameters) + return super(TupleMeta, self).__getitem__(parameters) + + def __instancecheck__(self, obj): + if self.__args__ is None: + return isinstance(obj, tuple) + raise TypeError("Parameterized Tuple cannot be used " + "with isinstance().") + + def __subclasscheck__(self, cls): + if self.__args__ is None: + return issubclass(cls, tuple) + raise TypeError("Parameterized Tuple cannot be used " + "with issubclass().") + + +copy._copy_dispatch[TupleMeta] = _copy_generic + + +class Tuple(tuple): + """Tuple type; Tuple[X, Y] is the cross-product type of X and Y. + + Example: Tuple[T1, T2] is a tuple of two elements corresponding + to type variables T1 and T2. Tuple[int, float, str] is a tuple + of an int, a float and a string. + + To specify a variable-length tuple of homogeneous type, use Tuple[T, ...]. + """ + + __metaclass__ = TupleMeta + __extra__ = tuple + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is Tuple: + raise TypeError("Type Tuple cannot be instantiated; " + "use tuple() instead") + return _generic_new(tuple, cls, *args, **kwds) + + +class CallableMeta(GenericMeta): + """ Metaclass for Callable.""" + + def __repr__(self): + if self.__origin__ is None: + return super(CallableMeta, self).__repr__() + return self._tree_repr(self._subs_tree()) + + def _tree_repr(self, tree): + if self._gorg is not Callable: + return super(CallableMeta, self)._tree_repr(tree) + # For actual Callable (not its subclass) we override + # super(CallableMeta, self)._tree_repr() for nice formatting. + arg_list = [] + for arg in tree[1:]: + if not isinstance(arg, tuple): + arg_list.append(_type_repr(arg)) + else: + arg_list.append(arg[0]._tree_repr(arg)) + if arg_list[0] == '...': + return repr(tree[0]) + '[..., %s]' % arg_list[1] + return (repr(tree[0]) + + '[[%s], %s]' % (', '.join(arg_list[:-1]), arg_list[-1])) + + def __getitem__(self, parameters): + """A thin wrapper around __getitem_inner__ to provide the latter + with hashable arguments to improve speed. + """ + + if self.__origin__ is not None or self._gorg is not Callable: + return super(CallableMeta, self).__getitem__(parameters) + if not isinstance(parameters, tuple) or len(parameters) != 2: + raise TypeError("Callable must be used as " + "Callable[[arg, ...], result].") + args, result = parameters + if args is Ellipsis: + parameters = (Ellipsis, result) + else: + if not isinstance(args, list): + raise TypeError("Callable[args, result]: args must be a list." + " Got %.100r." % (args,)) + parameters = (tuple(args), result) + return self.__getitem_inner__(parameters) + + @_tp_cache + def __getitem_inner__(self, parameters): + args, result = parameters + msg = "Callable[args, result]: result must be a type." + result = _type_check(result, msg) + if args is Ellipsis: + return super(CallableMeta, self).__getitem__((_TypingEllipsis, result)) + msg = "Callable[[arg, ...], result]: each arg must be a type." + args = tuple(_type_check(arg, msg) for arg in args) + parameters = args + (result,) + return super(CallableMeta, self).__getitem__(parameters) + + +copy._copy_dispatch[CallableMeta] = _copy_generic + + +class Callable(object): + """Callable type; Callable[[int], str] is a function of (int) -> str. + + The subscription syntax must always be used with exactly two + values: the argument list and the return type. The argument list + must be a list of types or ellipsis; the return type must be a single type. + + There is no syntax to indicate optional or keyword arguments, + such function types are rarely used as callback types. + """ + + __metaclass__ = CallableMeta + __extra__ = collections_abc.Callable + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is Callable: + raise TypeError("Type Callable cannot be instantiated; " + "use a non-abstract subclass instead") + return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) + + +def cast(typ, val): + """Cast a value to a type. + + This returns the value unchanged. To the type checker this + signals that the return value has the designated type, but at + runtime we intentionally don't check anything (we want this + to be as fast as possible). + """ + return val + + +def _get_defaults(func): + """Internal helper to extract the default arguments, by name.""" + code = func.__code__ + pos_count = code.co_argcount + arg_names = code.co_varnames + arg_names = arg_names[:pos_count] + defaults = func.__defaults__ or () + kwdefaults = func.__kwdefaults__ + res = dict(kwdefaults) if kwdefaults else {} + pos_offset = pos_count - len(defaults) + for name, value in zip(arg_names[pos_offset:], defaults): + assert name not in res + res[name] = value + return res + + +def get_type_hints(obj, globalns=None, localns=None): + """In Python 2 this is not supported and always returns None.""" + return None + + +def no_type_check(arg): + """Decorator to indicate that annotations are not type hints. + + The argument must be a class or function; if it is a class, it + applies recursively to all methods and classes defined in that class + (but not to methods defined in its superclasses or subclasses). + + This mutates the function(s) or class(es) in place. + """ + if isinstance(arg, type): + arg_attrs = arg.__dict__.copy() + for attr, val in arg.__dict__.items(): + if val in arg.__bases__ + (arg,): + arg_attrs.pop(attr) + for obj in arg_attrs.values(): + if isinstance(obj, types.FunctionType): + obj.__no_type_check__ = True + if isinstance(obj, type): + no_type_check(obj) + try: + arg.__no_type_check__ = True + except TypeError: # built-in classes + pass + return arg + + +def no_type_check_decorator(decorator): + """Decorator to give another decorator the @no_type_check effect. + + This wraps the decorator with something that wraps the decorated + function in @no_type_check. + """ + + @functools.wraps(decorator) + def wrapped_decorator(*args, **kwds): + func = decorator(*args, **kwds) + func = no_type_check(func) + return func + + return wrapped_decorator + + +def _overload_dummy(*args, **kwds): + """Helper for @overload to raise when called.""" + raise NotImplementedError( + "You should not call an overloaded function. " + "A series of @overload-decorated functions " + "outside a stub module should always be followed " + "by an implementation that is not @overload-ed.") + + +def overload(func): + """Decorator for overloaded functions/methods. + + In a stub file, place two or more stub definitions for the same + function in a row, each decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + + In a non-stub file (i.e. a regular .py file), do the same but + follow it with an implementation. The implementation should *not* + be decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + def utf8(value): + # implementation goes here + """ + return _overload_dummy + + +class _ProtocolMeta(GenericMeta): + """Internal metaclass for _Protocol. + + This exists so _Protocol classes can be generic without deriving + from Generic. + """ + + def __instancecheck__(self, obj): + if _Protocol not in self.__bases__: + return super(_ProtocolMeta, self).__instancecheck__(obj) + raise TypeError("Protocols cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + if not self._is_protocol: + # No structural checks since this isn't a protocol. + return NotImplemented + + if self is _Protocol: + # Every class is a subclass of the empty protocol. + return True + + # Find all attributes defined in the protocol. + attrs = self._get_protocol_attrs() + + for attr in attrs: + if not any(attr in d.__dict__ for d in cls.__mro__): + return False + return True + + def _get_protocol_attrs(self): + # Get all Protocol base classes. + protocol_bases = [] + for c in self.__mro__: + if getattr(c, '_is_protocol', False) and c.__name__ != '_Protocol': + protocol_bases.append(c) + + # Get attributes included in protocol. + attrs = set() + for base in protocol_bases: + for attr in base.__dict__.keys(): + # Include attributes not defined in any non-protocol bases. + for c in self.__mro__: + if (c is not base and attr in c.__dict__ and + not getattr(c, '_is_protocol', False)): + break + else: + if (not attr.startswith('_abc_') and + attr != '__abstractmethods__' and + attr != '_is_protocol' and + attr != '_gorg' and + attr != '__dict__' and + attr != '__args__' and + attr != '__slots__' and + attr != '_get_protocol_attrs' and + attr != '__next_in_mro__' and + attr != '__parameters__' and + attr != '__origin__' and + attr != '__orig_bases__' and + attr != '__extra__' and + attr != '__tree_hash__' and + attr != '__module__'): + attrs.add(attr) + + return attrs + + +class _Protocol(object): + """Internal base class for protocol classes. + + This implements a simple-minded structural issubclass check + (similar but more general than the one-offs in collections.abc + such as Hashable). + """ + + __metaclass__ = _ProtocolMeta + __slots__ = () + + _is_protocol = True + + +# Various ABCs mimicking those in collections.abc. +# A few are simply re-exported for completeness. + +Hashable = collections_abc.Hashable # Not generic. + + +class Iterable(Generic[T_co]): + __slots__ = () + __extra__ = collections_abc.Iterable + + +class Iterator(Iterable[T_co]): + __slots__ = () + __extra__ = collections_abc.Iterator + + +class SupportsInt(_Protocol): + __slots__ = () + + @abstractmethod + def __int__(self): + pass + + +class SupportsFloat(_Protocol): + __slots__ = () + + @abstractmethod + def __float__(self): + pass + + +class SupportsComplex(_Protocol): + __slots__ = () + + @abstractmethod + def __complex__(self): + pass + + +class SupportsAbs(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __abs__(self): + pass + + +if hasattr(collections_abc, 'Reversible'): + class Reversible(Iterable[T_co]): + __slots__ = () + __extra__ = collections_abc.Reversible +else: + class Reversible(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __reversed__(self): + pass + + +Sized = collections_abc.Sized # Not generic. + + +class Container(Generic[T_co]): + __slots__ = () + __extra__ = collections_abc.Container + + +# Callable was defined earlier. + + +class AbstractSet(Sized, Iterable[T_co], Container[T_co]): + __slots__ = () + __extra__ = collections_abc.Set + + +class MutableSet(AbstractSet[T]): + __slots__ = () + __extra__ = collections_abc.MutableSet + + +# NOTE: It is only covariant in the value type. +class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co]): + __slots__ = () + __extra__ = collections_abc.Mapping + + +class MutableMapping(Mapping[KT, VT]): + __slots__ = () + __extra__ = collections_abc.MutableMapping + + +if hasattr(collections_abc, 'Reversible'): + class Sequence(Sized, Reversible[T_co], Container[T_co]): + __slots__ = () + __extra__ = collections_abc.Sequence +else: + class Sequence(Sized, Iterable[T_co], Container[T_co]): + __slots__ = () + __extra__ = collections_abc.Sequence + + +class MutableSequence(Sequence[T]): + __slots__ = () + __extra__ = collections_abc.MutableSequence + + +class ByteString(Sequence[int]): + pass + + +ByteString.register(str) +ByteString.register(bytearray) + + +class List(list, MutableSequence[T]): + __slots__ = () + __extra__ = list + + def __new__(cls, *args, **kwds): + if cls._gorg is List: + raise TypeError("Type List cannot be instantiated; " + "use list() instead") + return _generic_new(list, cls, *args, **kwds) + + +class Deque(collections.deque, MutableSequence[T]): + __slots__ = () + __extra__ = collections.deque + + def __new__(cls, *args, **kwds): + if cls._gorg is Deque: + return collections.deque(*args, **kwds) + return _generic_new(collections.deque, cls, *args, **kwds) + + +class Set(set, MutableSet[T]): + __slots__ = () + __extra__ = set + + def __new__(cls, *args, **kwds): + if cls._gorg is Set: + raise TypeError("Type Set cannot be instantiated; " + "use set() instead") + return _generic_new(set, cls, *args, **kwds) + + +class FrozenSet(frozenset, AbstractSet[T_co]): + __slots__ = () + __extra__ = frozenset + + def __new__(cls, *args, **kwds): + if cls._gorg is FrozenSet: + raise TypeError("Type FrozenSet cannot be instantiated; " + "use frozenset() instead") + return _generic_new(frozenset, cls, *args, **kwds) + + +class MappingView(Sized, Iterable[T_co]): + __slots__ = () + __extra__ = collections_abc.MappingView + + +class KeysView(MappingView[KT], AbstractSet[KT]): + __slots__ = () + __extra__ = collections_abc.KeysView + + +class ItemsView(MappingView[Tuple[KT, VT_co]], + AbstractSet[Tuple[KT, VT_co]], + Generic[KT, VT_co]): + __slots__ = () + __extra__ = collections_abc.ItemsView + + +class ValuesView(MappingView[VT_co]): + __slots__ = () + __extra__ = collections_abc.ValuesView + + +class ContextManager(Generic[T_co]): + __slots__ = () + + def __enter__(self): + return self + + @abc.abstractmethod + def __exit__(self, exc_type, exc_value, traceback): + return None + + @classmethod + def __subclasshook__(cls, C): + if cls is ContextManager: + # In Python 3.6+, it is possible to set a method to None to + # explicitly indicate that the class does not implement an ABC + # (https://bugs.python.org/issue25958), but we do not support + # that pattern here because this fallback class is only used + # in Python 3.5 and earlier. + if (any("__enter__" in B.__dict__ for B in C.__mro__) and + any("__exit__" in B.__dict__ for B in C.__mro__)): + return True + return NotImplemented + + +class Dict(dict, MutableMapping[KT, VT]): + __slots__ = () + __extra__ = dict + + def __new__(cls, *args, **kwds): + if cls._gorg is Dict: + raise TypeError("Type Dict cannot be instantiated; " + "use dict() instead") + return _generic_new(dict, cls, *args, **kwds) + + +class DefaultDict(collections.defaultdict, MutableMapping[KT, VT]): + __slots__ = () + __extra__ = collections.defaultdict + + def __new__(cls, *args, **kwds): + if cls._gorg is DefaultDict: + return collections.defaultdict(*args, **kwds) + return _generic_new(collections.defaultdict, cls, *args, **kwds) + + +class Counter(collections.Counter, Dict[T, int]): + __slots__ = () + __extra__ = collections.Counter + + def __new__(cls, *args, **kwds): + if cls._gorg is Counter: + return collections.Counter(*args, **kwds) + return _generic_new(collections.Counter, cls, *args, **kwds) + + +# Determine what base class to use for Generator. +if hasattr(collections_abc, 'Generator'): + # Sufficiently recent versions of 3.5 have a Generator ABC. + _G_base = collections_abc.Generator +else: + # Fall back on the exact type. + _G_base = types.GeneratorType + + +class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co]): + __slots__ = () + __extra__ = _G_base + + def __new__(cls, *args, **kwds): + if cls._gorg is Generator: + raise TypeError("Type Generator cannot be instantiated; " + "create a subclass instead") + return _generic_new(_G_base, cls, *args, **kwds) + + +# Internal type variable used for Type[]. +CT_co = TypeVar('CT_co', covariant=True, bound=type) + + +# This is not a real generic class. Don't use outside annotations. +class Type(Generic[CT_co]): + """A special construct usable to annotate class objects. + + For example, suppose we have the following classes:: + + class User: ... # Abstract base for User classes + class BasicUser(User): ... + class ProUser(User): ... + class TeamUser(User): ... + + And a function that takes a class argument that's a subclass of + User and returns an instance of the corresponding class:: + + U = TypeVar('U', bound=User) + def new_user(user_class: Type[U]) -> U: + user = user_class() + # (Here we could write the user object to a database) + return user + + joe = new_user(BasicUser) + + At this point the type checker knows that joe has type BasicUser. + """ + __slots__ = () + __extra__ = type + + +def NamedTuple(typename, fields): + """Typed version of namedtuple. + + Usage:: + + Employee = typing.NamedTuple('Employee', [('name', str), ('id', int)]) + + This is equivalent to:: + + Employee = collections.namedtuple('Employee', ['name', 'id']) + + The resulting class has one extra attribute: _field_types, + giving a dict mapping field names to types. (The field names + are in the _fields attribute, which is part of the namedtuple + API.) + """ + fields = [(n, t) for n, t in fields] + cls = collections.namedtuple(typename, [n for n, t in fields]) + cls._field_types = dict(fields) + # Set the module to the caller's module (otherwise it'd be 'typing'). + try: + cls.__module__ = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + pass + return cls + + +def NewType(name, tp): + """NewType creates simple unique types with almost zero + runtime overhead. NewType(name, tp) is considered a subtype of tp + by static type checkers. At runtime, NewType(name, tp) returns + a dummy function that simply returns its argument. Usage:: + + UserId = NewType('UserId', int) + + def name_by_id(user_id): + # type: (UserId) -> str + ... + + UserId('user') # Fails type check + + name_by_id(42) # Fails type check + name_by_id(UserId(42)) # OK + + num = UserId(5) + 1 # type: int + """ + + def new_type(x): + return x + + # Some versions of Python 2 complain because of making all strings unicode + new_type.__name__ = str(name) + new_type.__supertype__ = tp + return new_type + + +# Python-version-specific alias (Python 2: unicode; Python 3: str) +Text = unicode + + +# Constant that's True when type checking, but False here. +TYPE_CHECKING = False + + +class IO(Generic[AnyStr]): + """Generic base class for TextIO and BinaryIO. + + This is an abstract, generic version of the return of open(). + + NOTE: This does not distinguish between the different possible + classes (text vs. binary, read vs. write vs. read/write, + append-only, unbuffered). The TextIO and BinaryIO subclasses + below capture the distinctions between text vs. binary, which is + pervasive in the interface; however we currently do not offer a + way to track the other distinctions in the type system. + """ + + __slots__ = () + + @abstractproperty + def mode(self): + pass + + @abstractproperty + def name(self): + pass + + @abstractmethod + def close(self): + pass + + @abstractproperty + def closed(self): + pass + + @abstractmethod + def fileno(self): + pass + + @abstractmethod + def flush(self): + pass + + @abstractmethod + def isatty(self): + pass + + @abstractmethod + def read(self, n=-1): + pass + + @abstractmethod + def readable(self): + pass + + @abstractmethod + def readline(self, limit=-1): + pass + + @abstractmethod + def readlines(self, hint=-1): + pass + + @abstractmethod + def seek(self, offset, whence=0): + pass + + @abstractmethod + def seekable(self): + pass + + @abstractmethod + def tell(self): + pass + + @abstractmethod + def truncate(self, size=None): + pass + + @abstractmethod + def writable(self): + pass + + @abstractmethod + def write(self, s): + pass + + @abstractmethod + def writelines(self, lines): + pass + + @abstractmethod + def __enter__(self): + pass + + @abstractmethod + def __exit__(self, type, value, traceback): + pass + + +class BinaryIO(IO[bytes]): + """Typed version of the return of open() in binary mode.""" + + __slots__ = () + + @abstractmethod + def write(self, s): + pass + + @abstractmethod + def __enter__(self): + pass + + +class TextIO(IO[unicode]): + """Typed version of the return of open() in text mode.""" + + __slots__ = () + + @abstractproperty + def buffer(self): + pass + + @abstractproperty + def encoding(self): + pass + + @abstractproperty + def errors(self): + pass + + @abstractproperty + def line_buffering(self): + pass + + @abstractproperty + def newlines(self): + pass + + @abstractmethod + def __enter__(self): + pass + + +class io(object): + """Wrapper namespace for IO generic classes.""" + + __all__ = ['IO', 'TextIO', 'BinaryIO'] + IO = IO + TextIO = TextIO + BinaryIO = BinaryIO + + +io.__name__ = __name__ + b'.io' +sys.modules[io.__name__] = io + + +Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')), + lambda p: p.pattern) +Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')), + lambda m: m.re.pattern) + + +class re(object): + """Wrapper namespace for re type aliases.""" + + __all__ = ['Pattern', 'Match'] + Pattern = Pattern + Match = Match + + +re.__name__ = __name__ + b'.re' +sys.modules[re.__name__] = re diff --git a/pipenv/vendor/click_completion/__init__.py b/pipenv/vendor/click_completion/__init__.py index 1443c8f27c..b849ae2342 100644 --- a/pipenv/vendor/click_completion/__init__.py +++ b/pipenv/vendor/click_completion/__init__.py @@ -6,7 +6,13 @@ import six from click import ParamType -from enum import Enum +if six.PY3: + try: + from enum import Enum + except ImportError: + from pipenv.vendor.backports.enum import Enum +else: + from pipenv.vendor.backports.enum import Enum from click_completion.core import completion_configuration, get_code, install, shells, resolve_ctx, get_choices, \ startswith, Shell diff --git a/pipenv/vendor/click_completion/core.py b/pipenv/vendor/click_completion/core.py index 2ede6eff41..dc47d471e2 100644 --- a/pipenv/vendor/click_completion/core.py +++ b/pipenv/vendor/click_completion/core.py @@ -10,7 +10,14 @@ import click from click import Option, Argument, MultiCommand, echo -from enum import Enum +import six +if six.PY3: + try: + from enum import Enum + except ImportError: + from pipenv.vendor.backports.enum import Enum +else: + from pipenv.vendor.backports.enum import Enum from click_completion.lib import resolve_ctx, split_args, single_quote, double_quote, get_auto_shell diff --git a/pipenv/vendor/requirements/LICENSE.rst b/pipenv/vendor/requirements/LICENSE.rst deleted file mode 100644 index 20a9573d79..0000000000 --- a/pipenv/vendor/requirements/LICENSE.rst +++ /dev/null @@ -1,29 +0,0 @@ -License -======= - -Requirements Parser is licensed under the BSD license. - -Copyright (c) 2012 - 2013, David Fischer - -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -- Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. -- Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY David Fischer ''AS IS'' AND ANY -EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL David Fischer BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - diff --git a/pipenv/vendor/requirements/__init__.py b/pipenv/vendor/requirements/__init__.py deleted file mode 100644 index 36349d2a18..0000000000 --- a/pipenv/vendor/requirements/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -from .parser import parse # noqa - -_MAJOR = 0 -_MINOR = 2 -_PATCH = 0 - - -def version_tuple(): - ''' - Returns a 3-tuple of ints that represent the version - ''' - return (_MAJOR, _MINOR, _PATCH) - - -def version(): - ''' - Returns a string representation of the version - ''' - return '%d.%d.%d' % (version_tuple()) - - -__version__ = version() diff --git a/pipenv/vendor/requirements/fragment.py b/pipenv/vendor/requirements/fragment.py deleted file mode 100644 index 2c29ad6618..0000000000 --- a/pipenv/vendor/requirements/fragment.py +++ /dev/null @@ -1,44 +0,0 @@ -import re - -# Copied from pipenv.patched.notpip -# https://github.com/pypa/pip/blob/281eb61b09d87765d7c2b92f6982b3fe76ccb0af/pip/index.py#L947 -HASH_ALGORITHMS = set(['sha1', 'sha224', 'sha384', 'sha256', 'sha512', 'md5']) - -extras_require_search = re.compile( - r'(?P.+)\[(?P[^\]]+)\]').search - - -def parse_fragment(fragment_string): - """Takes a fragment string nd returns a dict of the components""" - fragment_string = fragment_string.lstrip('#') - - try: - return dict( - key_value_string.split('=') - for key_value_string in fragment_string.split('&') - ) - except ValueError: - raise ValueError( - 'Invalid fragment string {fragment_string}'.format( - fragment_string=fragment_string - ) - ) - - -def get_hash_info(d): - """Returns the first matching hashlib name and value from a dict""" - for key in d.keys(): - if key.lower() in HASH_ALGORITHMS: - return key, d[key] - - return None, None - - -def parse_extras_require(egg): - if egg is not None: - match = extras_require_search(egg) - if match is not None: - name = match.group('name') - extras = match.group('extras') - return name, [extra.strip() for extra in extras.split(',')] - return egg, [] diff --git a/pipenv/vendor/requirements/parser.py b/pipenv/vendor/requirements/parser.py deleted file mode 100644 index 024c905fcf..0000000000 --- a/pipenv/vendor/requirements/parser.py +++ /dev/null @@ -1,50 +0,0 @@ -import os -import warnings - -from .requirement import Requirement - - -def parse(reqstr): - """ - Parse a requirements file into a list of Requirements - - See: pip/req.py:parse_requirements() - - :param reqstr: a string or file like object containing requirements - :returns: a *generator* of Requirement objects - """ - filename = getattr(reqstr, 'name', None) - try: - # Python 2.x compatibility - if not isinstance(reqstr, basestring): - reqstr = reqstr.read() - except NameError: - # Python 3.x only - if not isinstance(reqstr, str): - reqstr = reqstr.read() - - for line in reqstr.splitlines(): - line = line.strip() - if line == '': - continue - elif not line or line.startswith('#'): - # comments are lines that start with # only - continue - elif line.startswith('-r') or line.startswith('--requirement'): - _, new_filename = line.split() - new_file_path = os.path.join(os.path.dirname(filename or '.'), - new_filename) - with open(new_file_path) as f: - for requirement in parse(f): - yield requirement - elif line.startswith('-f') or line.startswith('--find-links') or \ - line.startswith('-i') or line.startswith('--index-url') or \ - line.startswith('--extra-index-url') or \ - line.startswith('--no-index'): - warnings.warn('Private repos not supported. Skipping.') - continue - elif line.startswith('-Z') or line.startswith('--always-unzip'): - warnings.warn('Unused option --always-unzip. Skipping.') - continue - else: - yield Requirement.parse(line) diff --git a/pipenv/vendor/requirements/requirement.py b/pipenv/vendor/requirements/requirement.py deleted file mode 100644 index 61f1c4b764..0000000000 --- a/pipenv/vendor/requirements/requirement.py +++ /dev/null @@ -1,220 +0,0 @@ -from __future__ import unicode_literals -import re -from pkg_resources import Requirement as Req - -from .fragment import get_hash_info, parse_fragment, parse_extras_require -from .vcs import VCS, VCS_SCHEMES - - -URI_REGEX = re.compile( - r'^(?Phttps?|file|ftps?)://(?P[^#]+)' - r'(#(?P\S+))?' -) - -VCS_REGEX = re.compile( - r'^(?P{0})://'.format(r'|'.join( - [scheme.replace('+', r'\+') for scheme in VCS_SCHEMES])) + - r'((?P[^/@]+)@)?' - r'(?P[^#@]+)' - r'(@(?P[^#]+))?' - r'(#(?P\S+))?' -) - -# This matches just about everyting -LOCAL_REGEX = re.compile( - r'^((?Pfile)://)?' - r'(?P[^#]+)' + - r'(#(?P\S+))?' -) - - -class Requirement(object): - """ - Represents a single requirement - - Typically instances of this class are created with ``Requirement.parse``. - For local file requirements, there's no verification that the file - exists. This class attempts to be *dict-like*. - - See: http://www.pip-installer.org/en/latest/logic.html - - **Members**: - - * ``line`` - the actual requirement line being parsed - * ``editable`` - a boolean whether this requirement is "editable" - * ``local_file`` - a boolean whether this requirement is a local file/path - * ``specifier`` - a boolean whether this requirement used a requirement - specifier (eg. "django>=1.5" or "requirements") - * ``vcs`` - a string specifying the version control system - * ``revision`` - a version control system specifier - * ``name`` - the name of the requirement - * ``uri`` - the URI if this requirement was specified by URI - * ``subdirectory`` - the subdirectory fragment of the URI - * ``path`` - the local path to the requirement - * ``hash_name`` - the type of hashing algorithm indicated in the line - * ``hash`` - the hash value indicated by the requirement line - * ``extras`` - a list of extras for this requirement - (eg. "mymodule[extra1, extra2]") - * ``specs`` - a list of specs for this requirement - (eg. "mymodule>1.5,<1.6" => [('>', '1.5'), ('<', '1.6')]) - """ - - def __init__(self, line): - # Do not call this private method - self.line = line - self.editable = False - self.local_file = False - self.specifier = False - self.vcs = None - self.name = None - self.subdirectory = None - self.uri = None - self.path = None - self.revision = None - self.hash_name = None - self.hash = None - self.extras = [] - self.specs = [] - - def __repr__(self): - return ''.format(self.line) - - def __getitem__(self, key): - return getattr(self, key) - - def keys(self): - return self.__dict__.keys() - - @classmethod - def parse_editable(cls, line): - """ - Parses a Requirement from an "editable" requirement which is either - a local project path or a VCS project URI. - - See: pip/req.py:from_editable() - - :param line: an "editable" requirement - :returns: a Requirement instance for the given line - :raises: ValueError on an invalid requirement - """ - - req = cls('-e {0}'.format(line)) - req.editable = True - vcs_match = VCS_REGEX.match(line) - local_match = LOCAL_REGEX.match(line) - - if vcs_match is not None: - groups = vcs_match.groupdict() - if groups.get('login'): - req.uri = '{scheme}://{login}@{path}'.format(**groups) - else: - req.uri = '{scheme}://{path}'.format(**groups) - req.revision = groups['revision'] - if groups['fragment']: - fragment = parse_fragment(groups['fragment']) - egg = fragment.get('egg') - req.name, req.extras = parse_extras_require(egg) - req.hash_name, req.hash = get_hash_info(fragment) - req.subdirectory = fragment.get('subdirectory') - for vcs in VCS: - if req.uri.startswith(vcs): - req.vcs = vcs - else: - assert local_match is not None, 'This should match everything' - groups = local_match.groupdict() - req.local_file = True - if groups['fragment']: - fragment = parse_fragment(groups['fragment']) - egg = fragment.get('egg') - req.name, req.extras = parse_extras_require(egg) - req.hash_name, req.hash = get_hash_info(fragment) - req.subdirectory = fragment.get('subdirectory') - req.path = groups['path'] - - return req - - @classmethod - def parse_line(cls, line): - """ - Parses a Requirement from a non-editable requirement. - - See: pip/req.py:from_line() - - :param line: a "non-editable" requirement - :returns: a Requirement instance for the given line - :raises: ValueError on an invalid requirement - """ - - req = cls(line) - - vcs_match = VCS_REGEX.match(line) - uri_match = URI_REGEX.match(line) - local_match = LOCAL_REGEX.match(line) - - if vcs_match is not None: - groups = vcs_match.groupdict() - if groups.get('login'): - req.uri = '{scheme}://{login}@{path}'.format(**groups) - else: - req.uri = '{scheme}://{path}'.format(**groups) - req.revision = groups['revision'] - if groups['fragment']: - fragment = parse_fragment(groups['fragment']) - egg = fragment.get('egg') - req.name, req.extras = parse_extras_require(egg) - req.hash_name, req.hash = get_hash_info(fragment) - req.subdirectory = fragment.get('subdirectory') - for vcs in VCS: - if req.uri.startswith(vcs): - req.vcs = vcs - elif uri_match is not None: - groups = uri_match.groupdict() - req.uri = '{scheme}://{path}'.format(**groups) - if groups['fragment']: - fragment = parse_fragment(groups['fragment']) - egg = fragment.get('egg') - req.name, req.extras = parse_extras_require(egg) - req.hash_name, req.hash = get_hash_info(fragment) - req.subdirectory = fragment.get('subdirectory') - if groups['scheme'] == 'file': - req.local_file = True - elif '#egg=' in line: - # Assume a local file match - assert local_match is not None, 'This should match everything' - groups = local_match.groupdict() - req.local_file = True - if groups['fragment']: - fragment = parse_fragment(groups['fragment']) - egg = fragment.get('egg') - name, extras = parse_extras_require(egg) - req.name = fragment.get('egg') - req.hash_name, req.hash = get_hash_info(fragment) - req.subdirectory = fragment.get('subdirectory') - req.path = groups['path'] - else: - # This is a requirement specifier. - # Delegate to pkg_resources and hope for the best - req.specifier = True - pkg_req = Req.parse(line) - req.name = pkg_req.unsafe_name - req.extras = list(pkg_req.extras) - req.specs = pkg_req.specs - return req - - @classmethod - def parse(cls, line): - """ - Parses a Requirement from a line of a requirement file. - - :param line: a line of a requirement file - :returns: a Requirement instance for the given line - :raises: ValueError on an invalid requirement - """ - - if line.startswith('-e') or line.startswith('--editable'): - # Editable installs are either a local project path - # or a VCS project URI - return cls.parse_editable( - re.sub(r'^(-e|--editable=?)\s*', '', line)) - - return cls.parse_line(line) diff --git a/pipenv/vendor/requirements/vcs.py b/pipenv/vendor/requirements/vcs.py deleted file mode 100644 index f5317b230a..0000000000 --- a/pipenv/vendor/requirements/vcs.py +++ /dev/null @@ -1,30 +0,0 @@ -from __future__ import unicode_literals - -VCS = [ - 'git', - 'hg', - 'svn', - 'bzr', -] - -VCS_SCHEMES = [ - 'git', - 'git+https', - 'git+ssh', - 'git+git', - 'hg+http', - 'hg+https', - 'hg+static-http', - 'hg+ssh', - 'svn', - 'svn+svn', - 'svn+http', - 'svn+https', - 'svn+ssh', - 'bzr+http', - 'bzr+https', - 'bzr+ssh', - 'bzr+sftp', - 'bzr+ftp', - 'bzr+lp', -] diff --git a/pipenv/vendor/requirementslib/__init__.py b/pipenv/vendor/requirementslib/__init__.py index faba3b44ce..910db3d516 100644 --- a/pipenv/vendor/requirementslib/__init__.py +++ b/pipenv/vendor/requirementslib/__init__.py @@ -1,5 +1,5 @@ # -*- coding=utf-8 -*- -__version__ = '1.1.2' +__version__ = '1.1.5' from .exceptions import RequirementError diff --git a/pipenv/vendor/requirementslib/models/lockfile.py b/pipenv/vendor/requirementslib/models/lockfile.py index 92055d6e2c..f9ca97b83e 100644 --- a/pipenv/vendor/requirementslib/models/lockfile.py +++ b/pipenv/vendor/requirementslib/models/lockfile.py @@ -25,8 +25,8 @@ def preferred_newlines(f): class Lockfile(plette.lockfiles.Lockfile): def __init__(self, *args, **kwargs): path = kwargs.pop("path", None) - self.requirements = kwargs.pop("requirements", []) - self.dev_requirements = kwargs.pop("dev_requirements", []) + self._requirements = kwargs.pop("requirements", []) + self._dev_requirements = kwargs.pop("dev_requirements", []) self.path = Path(path) if path else None self.newlines = u"\n" super(Lockfile, self).__init__(*args, **kwargs) @@ -56,27 +56,36 @@ def create(cls, project_path, lockfile_name="Pipfile.lock"): if not isinstance(project_path, Path): project_path = Path(project_path) lockfile_path = project_path / lockfile_name - requirements = [] - dev_requirements = [] with lockfile_path.open(encoding="utf-8") as f: lockfile = super(Lockfile, cls).load(f) lockfile.newlines = preferred_newlines(f) - for k in lockfile["develop"].keys(): - dev_requirements.append(Requirement.from_pipfile(k, lockfile.develop[k]._data)) - for k in lockfile["default"].keys(): - requirements.append(Requirement.from_pipfile(k, lockfile.default[k]._data)) - lockfile.requirements = requirements - lockfile.dev_requirements = dev_requirements lockfile.path = lockfile_path return lockfile + def get_requirements(self, dev=False): + section = self.develop if dev else self.default + for k in section.keys(): + yield Requirement.from_pipfile(k, section[k]._data) + + @property + def dev_requirements(self): + if not self._dev_requirements: + self._dev_requirements = list(self.get_requirements(dev=True)) + return self._dev_requirements + + @property + def requirements(self): + if not self._requirements: + self._requirements = list(self.get_requirements(dev=False)) + return self._requirements + @property def dev_requirements_list(self): - return [r.as_pipfile() for r in self.dev_requirements] + return [{name: entry._data} for name, entry in self.develop.items()] @property def requirements_list(self): - return [r.as_pipfile() for r in self.requirements] + return [{name: entry._data} for name, entry in self.develop.items()] def write(self): open_kwargs = {"newline": self.newlines} diff --git a/pipenv/vendor/requirementslib/models/pipfile.py b/pipenv/vendor/requirementslib/models/pipfile.py index f952252498..2bfd8996bc 100644 --- a/pipenv/vendor/requirementslib/models/pipfile.py +++ b/pipenv/vendor/requirementslib/models/pipfile.py @@ -4,6 +4,7 @@ from vistir.compat import Path from .requirements import Requirement +from ..exceptions import RequirementError import plette.pipfiles diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py index 1baaa265ac..03c351847a 100644 --- a/pipenv/vendor/requirementslib/models/requirements.py +++ b/pipenv/vendor/requirementslib/models/requirements.py @@ -605,7 +605,8 @@ def from_pipfile(cls, name, pipfile): composed_uri = add_ssh_scheme_to_git_uri( "{0}+{1}".format(key, pipfile.get(key)) ).split("+", 1)[1] - is_url = is_valid_url(pipfile.get(key)) or is_valid_url(composed_uri) + url_keys = [pipfile.get(key), composed_uri] + is_url = any(validity_fn(url_key) for url_key in url_keys for validity_fn in [is_valid_url, is_file_url]) target_key = "uri" if is_url else "path" creation_args[target_key] = pipfile.get(key) else: @@ -889,12 +890,15 @@ def as_line(self, sources=None, include_hashes=True, include_extras=True): If `sources` is omitted or falsy, no index information will be included in the requirement line. """ + include_specifiers = True if self.specifiers else False if self.is_vcs: include_extras = False + if self.is_file_or_url or self.is_vcs: + include_specifiers = False parts = [ self.req.line_part, self.extras_as_pip if include_extras else "", - self.specifiers if self.specifiers else "", + self.specifiers if include_specifiers else "", self.markers_as_pip, ] if include_hashes: diff --git a/pipenv/vendor/requirementslib/models/utils.py b/pipenv/vendor/requirementslib/models/utils.py index 6fd55b6ff2..6999204c87 100644 --- a/pipenv/vendor/requirementslib/models/utils.py +++ b/pipenv/vendor/requirementslib/models/utils.py @@ -117,7 +117,7 @@ def strip_ssh_from_git_uri(uri): def add_ssh_scheme_to_git_uri(uri): - """Cleans VCS uris from pipenv.patched.notpip format""" + """Cleans VCS uris from pip format""" if isinstance(uri, six.string_types): # Add scheme for parsing purposes, this is also what pip does if uri.startswith("git+") and "://" not in uri: diff --git a/pipenv/vendor/tomlkit/api.py b/pipenv/vendor/tomlkit/api.py index e541c20c17..d36375e867 100644 --- a/pipenv/vendor/tomlkit/api.py +++ b/pipenv/vendor/tomlkit/api.py @@ -1,6 +1,9 @@ import datetime as _datetime -from typing import Tuple +try: + from typing import Tuple +except ImportError: + from pipenv.vendor.backports.typing import Tuple from ._utils import parse_rfc3339 from .container import Container diff --git a/pipenv/vendor/tomlkit/container.py b/pipenv/vendor/tomlkit/container.py index c1d2d7c67e..c7595a5286 100644 --- a/pipenv/vendor/tomlkit/container.py +++ b/pipenv/vendor/tomlkit/container.py @@ -1,12 +1,21 @@ from __future__ import unicode_literals -from typing import Any -from typing import Dict -from typing import Generator -from typing import List -from typing import Optional -from typing import Tuple -from typing import Union +try: + from typing import Any + from typing import Dict + from typing import Generator + from typing import List + from typing import Optional + from typing import Tuple + from typing import Union +except ImportError: + from pipenv.vendor.backports.typing import Any + from pipenv.vendor.backports.typing import Dict + from pipenv.vendor.backports.typing import Generator + from pipenv.vendor.backports.typing import List + from pipenv.vendor.backports.typing import Optional + from pipenv.vendor.backports.typing import Tuple + from pipenv.vendor.backports.typing import Union from ._compat import decode from .exceptions import KeyAlreadyPresent diff --git a/pipenv/vendor/tomlkit/exceptions.py b/pipenv/vendor/tomlkit/exceptions.py index 8d48bf198f..dae29f6fd7 100644 --- a/pipenv/vendor/tomlkit/exceptions.py +++ b/pipenv/vendor/tomlkit/exceptions.py @@ -1,4 +1,7 @@ -from typing import Optional +try: + from typing import Optional +except ImportError: + from pipenv.vendor.backports.typing import Optional class TOMLKitError(Exception): diff --git a/pipenv/vendor/tomlkit/items.py b/pipenv/vendor/tomlkit/items.py index 747dbd5090..83b17612b5 100644 --- a/pipenv/vendor/tomlkit/items.py +++ b/pipenv/vendor/tomlkit/items.py @@ -6,13 +6,25 @@ from datetime import date from datetime import datetime from datetime import time -from enum import Enum -from typing import Any -from typing import Dict -from typing import Generator -from typing import List -from typing import Optional -from typing import Union +import sys +if sys.version_info >= (3, 4): + from enum import Enum +else: + from pipenv.vendor.backports.enum import Enum +try: + from typing import Any + from typing import Dict + from typing import Generator + from typing import List + from typing import Optional + from typing import Union +except ImportError: + from pipenv.vendor.backports.typing import Any + from pipenv.vendor.backports.typing import Dict + from pipenv.vendor.backports.typing import Generator + from pipenv.vendor.backports.typing import List + from pipenv.vendor.backports.typing import Optional + from pipenv.vendor.backports.typing import Union from ._compat import PY2 diff --git a/pipenv/vendor/tomlkit/parser.py b/pipenv/vendor/tomlkit/parser.py index b55a3fe442..28c0a6e01d 100644 --- a/pipenv/vendor/tomlkit/parser.py +++ b/pipenv/vendor/tomlkit/parser.py @@ -7,10 +7,16 @@ import string from copy import copy -from typing import Iterator -from typing import Optional -from typing import Tuple -from typing import Union +try: + from typing import Iterator + from typing import Optional + from typing import Tuple + from typing import Union +except ImportError: + from pipenv.vendor.backports.typing import Iterator + from pipenv.vendor.backports.typing import Optional + from pipenv.vendor.backports.typing import Tuple + from pipenv.vendor.backports.typing import Union from ._compat import PY2 from ._compat import chr diff --git a/pipenv/vendor/tomlkit/toml_file.py b/pipenv/vendor/tomlkit/toml_file.py index 3b416664dd..3e4cc721a7 100644 --- a/pipenv/vendor/tomlkit/toml_file.py +++ b/pipenv/vendor/tomlkit/toml_file.py @@ -1,7 +1,11 @@ import io -from typing import Any -from typing import Dict +try: + from typing import Any + from typing import Dict +except ImportError: + from pipenv.vendor.backports.typing import Any + from pipenv.vendor.backports.typing import Dict from .api import loads from .toml_document import TOMLDocument diff --git a/pipenv/vendor/typing.LICENSE b/pipenv/vendor/typing.LICENSE new file mode 100644 index 0000000000..583f9f6e61 --- /dev/null +++ b/pipenv/vendor/typing.LICENSE @@ -0,0 +1,254 @@ +A. HISTORY OF THE SOFTWARE +========================== + +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. + +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. + +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations (now Zope +Corporation, see http://www.zope.com). In 2001, the Python Software +Foundation (PSF, see http://www.python.org/psf/) was formed, a +non-profit organization created specifically to own Python-related +Intellectual Property. Zope Corporation is a sponsoring member of +the PSF. + +All Python releases are Open Source (see http://www.opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2 and above 2.1.1 2001-now PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are +retained in Python alone or in any derivative version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the Internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the Internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index 2ae450272f..c5e68edff3 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -27,7 +27,7 @@ requests==2.19.1 idna==2.7 urllib3==1.23 certifi==2018.8.24 -requirementslib==1.1.2 +requirementslib==1.1.5 attrs==18.1.0 distlib==0.2.7 packaging==17.1 @@ -45,3 +45,5 @@ vistir==0.1.4 pip-shims==0.1.2 modutil==2.0.0 ptyprocess==0.6.0 +enum34==1.1.6 +typing==3.6.4 diff --git a/setup.py b/setup.py index 7e121fb7a5..426483ddd6 100644 --- a/setup.py +++ b/setup.py @@ -29,7 +29,8 @@ "virtualenv", 'requests[security];python_version<"2.7"', 'ordereddict;python_version<"2.7"', - 'enum34; python_version<"3"' + 'enum34; python_version<"3"', + 'typing; python_version<"3"' ] diff --git a/tasks/vendoring/__init__.py b/tasks/vendoring/__init__.py index c09fae7525..84e8b1ad9d 100644 --- a/tasks/vendoring/__init__.py +++ b/tasks/vendoring/__init__.py @@ -2,9 +2,8 @@ """"Vendoring script, python 3.5 needed""" # Taken from pip # see https://github.com/pypa/pip/blob/95bcf8c5f6394298035a7332c441868f3b0169f4/tasks/vendoring/__init__.py -from pathlib import Path -from pipenv._compat import TemporaryDirectory -from pipenv.utils import mkdir_p +from vistir.compat import NamedTemporaryFile, TemporaryDirectory, Path +from vistir.path import mkdir_p # from tempfile import TemporaryDirectory import tarfile import zipfile @@ -20,12 +19,14 @@ 'requirements-parser': 'requirements', 'backports.shutil_get_terminal_size': 'backports/shutil_get_terminal_size', 'backports.weakref': 'backports/weakref', + 'typing.py': 'backports/typing.py', 'shutil_backports': 'backports/shutil_get_terminal_size', 'python-dotenv': 'dotenv', 'pip-tools': 'piptools', 'setuptools': 'pkg_resources', 'msgpack-python': 'msgpack', 'attrs': 'attr', + 'enum34': 'backports/enum' } # from time to time, remove the no longer needed ones @@ -66,7 +67,9 @@ } LIBRARY_RENAMES = { - 'pip': 'pipenv.patched.notpip' + 'pip': 'pipenv.patched.notpip', + 'enum': 'backports/enum', + 'typing.py': 'backports/typing.py' } @@ -138,7 +141,7 @@ def rewrite_imports(package_dir, vendored_libs, vendor_dir): def rewrite_file_imports(item, vendored_libs, vendor_dir): """Rewrite 'import xxx' and 'from xxx import' for vendored_libs""" - log('Reading file: %s' % item) + # log('Reading file: %s' % item) try: text = item.read_text(encoding='utf-8') except UnicodeDecodeError: @@ -407,12 +410,59 @@ def rewrite_all_imports(ctx): @invoke.task -def download_licenses(ctx, vendor_dir=None, requirements_file='vendor.txt', package=None, only=False): - log('Downloading licenses') +def packages_missing_licenses(ctx, vendor_dir=None, requirements_file='vendor.txt', package=None): if not vendor_dir: vendor_dir = _get_vendor_dir(ctx) + requirements = vendor_dir.joinpath(requirements_file).read_text().splitlines() + new_requirements = [] + LICENSES = ["LICENSE-MIT", "LICENSE", "LICENSE.txt", "LICENSE.APACHE", "LICENSE.BSD"] + for i, req in enumerate(requirements): + pkg = req.strip().split("=")[0] + possible_pkgs = [pkg, pkg.replace('-', '_')] + match_found = False + if pkg in LIBRARY_DIRNAMES: + possible_pkgs.append(LIBRARY_DIRNAMES[pkg]) + for pkgpath in possible_pkgs: + pkgpath = vendor_dir.joinpath(pkgpath) + if pkgpath.exists() and pkgpath.is_dir(): + for licensepath in LICENSES: + licensepath = pkgpath.joinpath(licensepath) + if licensepath.exists(): + match_found = True + log("%s: Trying path %s... FOUND" % (pkg, licensepath)) + break + elif (pkgpath.exists() or pkgpath.parent.joinpath("{0}.py".format(pkgpath.stem)).exists()): + for licensepath in LICENSES: + licensepath = pkgpath.parent.joinpath("{0}.{1}".format(pkgpath.stem, licensepath)) + if licensepath.exists(): + match_found = True + log("%s: Trying path %s... FOUND" % (pkg, licensepath)) + break + if match_found: + break + if match_found: + continue + log("%s: No license found in %s" % (pkg, pkgpath)) + new_requirements.append(req) + return new_requirements + + +@invoke.task +def download_licenses(ctx, vendor_dir=None, requirements_file='vendor.txt', package=None, only=False, patched=False): + log('Downloading licenses') + if not vendor_dir: + if patched: + vendor_dir = _get_patched_dir(ctx) + requirements_file = 'patched.txt' + else: + vendor_dir = _get_vendor_dir(ctx) requirements_file = vendor_dir / requirements_file - requirement = "-r {0}".format(requirements_file.as_posix()) + requirements = packages_missing_licenses(ctx, vendor_dir, requirements_file, package=package) + with NamedTemporaryFile(prefix="pipenv", suffix="vendor-reqs", delete=False, mode="w") as fh: + fh.write("\n".join(requirements)) + new_requirements_file = fh.name + new_requirements_file = Path(new_requirements_file) + requirement = "-r {0}".format(new_requirements_file.as_posix()) if package: if not only: # for packages we want to add to the requirements file @@ -422,15 +472,15 @@ def download_licenses(ctx, vendor_dir=None, requirements_file='vendor.txt', pack requirement = package tmp_dir = vendor_dir / '__tmp__' # TODO: Fix this whenever it gets sorted out (see https://github.com/pypa/pip/issues/5739) - ctx.run('pip install flit') ctx.run( - 'pip download --no-binary :all: --no-build-isolation --no-deps -d {0} {1}'.format( + 'pip download --no-binary :all: --only-binary requests_download --no-deps -d {0} {1}'.format( tmp_dir.as_posix(), requirement, ) ) for sdist in tmp_dir.iterdir(): extract_license(vendor_dir, sdist) + new_requirements_file.unlink() drop_dir(tmp_dir) diff --git a/tasks/vendoring/patches/patched/piptools.patch b/tasks/vendoring/patches/patched/piptools.patch index 7d9b64d8ee..7e8a14422d 100644 --- a/tasks/vendoring/patches/patched/piptools.patch +++ b/tasks/vendoring/patches/patched/piptools.patch @@ -89,7 +89,7 @@ index 1c4b943..91902dc 100644 + # hash url WITH fragment + hash_value = self.get(new_location.url) + if not hash_value: -+ hash_value = self._get_file_hash(new_location) ++ hash_value = self._get_file_hash(new_location) if not new_location.url.startswith("ssh") else None + hash_value = hash_value.encode('utf8') + if can_hash: + self.set(new_location.url, hash_value) diff --git a/tasks/vendoring/patches/vendor/click-completion-enum-import.patch b/tasks/vendoring/patches/vendor/click-completion-enum-import.patch new file mode 100644 index 0000000000..20971bf3b6 --- /dev/null +++ b/tasks/vendoring/patches/vendor/click-completion-enum-import.patch @@ -0,0 +1,39 @@ +diff --git a/pipenv/vendor/click_completion/__init__.py b/pipenv/vendor/click_completion/__init__.py +index 1443c8f..b849ae2 100644 +--- a/pipenv/vendor/click_completion/__init__.py ++++ b/pipenv/vendor/click_completion/__init__.py +@@ -6,7 +6,13 @@ from __future__ import print_function, absolute_import + import six + + from click import ParamType +-from enum import Enum ++if six.PY3: ++ try: ++ from enum import Enum ++ except ImportError: ++ from pipenv.vendor.backports.enum import Enum ++else: ++ from pipenv.vendor.backports.enum import Enum + + from click_completion.core import completion_configuration, get_code, install, shells, resolve_ctx, get_choices, \ + startswith, Shell +diff --git a/pipenv/vendor/click_completion/core.py b/pipenv/vendor/click_completion/core.py +index 2ede6ef..dc47d47 100644 +--- a/pipenv/vendor/click_completion/core.py ++++ b/pipenv/vendor/click_completion/core.py +@@ -10,7 +10,14 @@ import subprocess + + import click + from click import Option, Argument, MultiCommand, echo +-from enum import Enum ++import six ++if six.PY3: ++ try: ++ from enum import Enum ++ except ImportError: ++ from pipenv.vendor.backports.enum import Enum ++else: ++ from pipenv.vendor.backports.enum import Enum + + from click_completion.lib import resolve_ctx, split_args, single_quote, double_quote, get_auto_shell + diff --git a/tasks/vendoring/patches/vendor/tomlkit-typing-imports.patch b/tasks/vendoring/patches/vendor/tomlkit-typing-imports.patch new file mode 100644 index 0000000000..b7e29fe08e --- /dev/null +++ b/tasks/vendoring/patches/vendor/tomlkit-typing-imports.patch @@ -0,0 +1,141 @@ +diff --git a/pipenv/vendor/tomlkit/api.py b/pipenv/vendor/tomlkit/api.py +index e541c20c..d36375e8 100644 +--- a/pipenv/vendor/tomlkit/api.py ++++ b/pipenv/vendor/tomlkit/api.py +@@ -1,6 +1,9 @@ + import datetime as _datetime + +-from typing import Tuple ++try: ++ from typing import Tuple ++except ImportError: ++ from pipenv.vendor.backports.typing import Tuple + + from ._utils import parse_rfc3339 + from .container import Container +diff --git a/pipenv/vendor/tomlkit/container.py b/pipenv/vendor/tomlkit/container.py +index c1d2d7c6..c7595a52 100644 +--- a/pipenv/vendor/tomlkit/container.py ++++ b/pipenv/vendor/tomlkit/container.py +@@ -1,12 +1,21 @@ + from __future__ import unicode_literals + +-from typing import Any +-from typing import Dict +-from typing import Generator +-from typing import List +-from typing import Optional +-from typing import Tuple +-from typing import Union ++try: ++ from typing import Any ++ from typing import Dict ++ from typing import Generator ++ from typing import List ++ from typing import Optional ++ from typing import Tuple ++ from typing import Union ++except ImportError: ++ from pipenv.vendor.backports.typing import Any ++ from pipenv.vendor.backports.typing import Dict ++ from pipenv.vendor.backports.typing import Generator ++ from pipenv.vendor.backports.typing import List ++ from pipenv.vendor.backports.typing import Optional ++ from pipenv.vendor.backports.typing import Tuple ++ from pipenv.vendor.backports.typing import Union + + from ._compat import decode + from .exceptions import KeyAlreadyPresent +diff --git a/pipenv/vendor/tomlkit/exceptions.py b/pipenv/vendor/tomlkit/exceptions.py +index 8d48bf19..dae29f6f 100644 +--- a/pipenv/vendor/tomlkit/exceptions.py ++++ b/pipenv/vendor/tomlkit/exceptions.py +@@ -1,4 +1,7 @@ +-from typing import Optional ++try: ++ from typing import Optional ++except ImportError: ++ from pipenv.vendor.backports.typing import Optional + + + class TOMLKitError(Exception): +diff --git a/pipenv/vendor/tomlkit/items.py b/pipenv/vendor/tomlkit/items.py +index 747dbd50..83b17612 100644 +--- a/pipenv/vendor/tomlkit/items.py ++++ b/pipenv/vendor/tomlkit/items.py +@@ -6,13 +6,25 @@ import string + from datetime import date + from datetime import datetime + from datetime import time +-from enum import Enum +-from typing import Any +-from typing import Dict +-from typing import Generator +-from typing import List +-from typing import Optional +-from typing import Union ++import sys ++if sys.version_info >= (3, 4): ++ from enum import Enum ++else: ++ from pipenv.vendor.backports.enum import Enum ++try: ++ from typing import Any ++ from typing import Dict ++ from typing import Generator ++ from typing import List ++ from typing import Optional ++ from typing import Union ++except ImportError: ++ from pipenv.vendor.backports.typing import Any ++ from pipenv.vendor.backports.typing import Dict ++ from pipenv.vendor.backports.typing import Generator ++ from pipenv.vendor.backports.typing import List ++ from pipenv.vendor.backports.typing import Optional ++ from pipenv.vendor.backports.typing import Union + + + from ._compat import PY2 +diff --git a/pipenv/vendor/tomlkit/parser.py b/pipenv/vendor/tomlkit/parser.py +index b55a3fe4..28c0a6e0 100644 +--- a/pipenv/vendor/tomlkit/parser.py ++++ b/pipenv/vendor/tomlkit/parser.py +@@ -7,10 +7,16 @@ import re + import string + + from copy import copy +-from typing import Iterator +-from typing import Optional +-from typing import Tuple +-from typing import Union ++try: ++ from typing import Iterator ++ from typing import Optional ++ from typing import Tuple ++ from typing import Union ++except ImportError: ++ from pipenv.vendor.backports.typing import Iterator ++ from pipenv.vendor.backports.typing import Optional ++ from pipenv.vendor.backports.typing import Tuple ++ from pipenv.vendor.backports.typing import Union + + from ._compat import PY2 + from ._compat import chr +diff --git a/pipenv/vendor/tomlkit/toml_file.py b/pipenv/vendor/tomlkit/toml_file.py +index 3b416664..3e4cc721 100644 +--- a/pipenv/vendor/tomlkit/toml_file.py ++++ b/pipenv/vendor/tomlkit/toml_file.py +@@ -1,7 +1,11 @@ + import io + +-from typing import Any +-from typing import Dict ++try: ++ from typing import Any ++ from typing import Dict ++except ImportError: ++ from pipenv.vendor.backports.typing import Any ++ from pipenv.vendor.backports.typing import Dict + + from .api import loads + from .toml_document import TOMLDocument diff --git a/tests/integration/test_install_markers.py b/tests/integration/test_install_markers.py index ee83179b45..0967026a77 100644 --- a/tests/integration/test_install_markers.py +++ b/tests/integration/test_install_markers.py @@ -164,10 +164,9 @@ def test_environment_variable_value_does_not_change_hash(PipenvInstance, pypi): url = 'https://${PYPI_USERNAME}:${PYPI_PASSWORD}@pypi.org/simple' verify_ssl = true name = 'pypi' -[requires] -python_version = '2.7' + [packages] -flask = "==0.12.2" +six = "*" """) project = Project() @@ -176,6 +175,7 @@ def test_environment_variable_value_does_not_change_hash(PipenvInstance, pypi): assert project.get_lockfile_hash() is None c = p.pipenv('install') + assert c.return_code == 0 lock_hash = project.get_lockfile_hash() assert lock_hash is not None assert lock_hash == project.calculate_pipfile_hash() diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index ab15f5cdb9..98c8560203 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -73,6 +73,8 @@ @pytest.mark.utils @pytest.mark.parametrize("deps, expected", DEP_PIP_PAIRS) def test_convert_deps_to_pip(deps, expected): + if expected.startswith("Django"): + expected = expected.lower() assert pipenv.utils.convert_deps_to_pip(deps, r=False) == [expected] @@ -115,7 +117,7 @@ def test_convert_deps_to_pip(deps, expected): ], ) def test_convert_deps_to_pip_one_way(deps, expected): - assert pipenv.utils.convert_deps_to_pip(deps, r=False) == [expected] + assert pipenv.utils.convert_deps_to_pip(deps, r=False) == [expected.lower()] @pytest.mark.skipif(isinstance(u"", str), reason="don't need to test if unicode is str") @@ -163,7 +165,8 @@ def test_is_required_version(self, version, specified_ver, expected): ) @pytest.mark.vcs def test_is_vcs(self, entry, expected): - assert pipenv.utils.is_vcs(entry) is expected + from pipenv.vendor.requirementslib.utils import is_vcs + assert is_vcs(entry) is expected @pytest.mark.utils def test_split_file(self): From ddb6065ee2ec71dbd02644399d684707d576062d Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 26 Aug 2018 19:59:36 -0400 Subject: [PATCH 25/26] Final fixes to vendoring - Add news - upate test runner - Unvendor typing - Fix environment error - Fix click completion patch - update vendoring script - Install flit with vendoring script for wheels - disable build isolation also Signed-off-by: Dan Ryan --- news/2639.bugfix | 2 + news/2639.vendor | 2 +- pipenv/utils.py | 3 +- pipenv/vendor/backports/__init__.py | 3 +- pipenv/vendor/backports/typing/__init__.py | 1 - pipenv/vendor/requirementslib/models/utils.py | 2 +- pipenv/vendor/tomlkit/api.py | 5 - pipenv/vendor/tomlkit/container.py | 17 - pipenv/vendor/tomlkit/exceptions.py | 6 - pipenv/vendor/tomlkit/items.py | 15 - pipenv/vendor/tomlkit/parser.py | 10 - pipenv/vendor/tomlkit/toml_file.py | 7 - .../vendor/{backports/typing => }/typing.py | 966 +++++++++++------- pytest.ini | 5 +- run-tests.sh | 20 +- setup.py | 2 +- tasks/vendoring/__init__.py | 30 +- .../vendor/tomlkit-typing-imports.patch | 96 +- 18 files changed, 647 insertions(+), 545 deletions(-) create mode 100644 news/2639.bugfix delete mode 100644 pipenv/vendor/backports/typing/__init__.py rename pipenv/vendor/{backports/typing => }/typing.py (74%) diff --git a/news/2639.bugfix b/news/2639.bugfix new file mode 100644 index 0000000000..c1036f7bac --- /dev/null +++ b/news/2639.bugfix @@ -0,0 +1,2 @@ +Fixed a bug which caused attempted hashing of ``ssh://`` style URIs which could cause failures during installation of private ssh repositories. +- Corrected path conversion issues which caused certain editable VCS paths to be converted to ``ssh://`` URIs improperly. diff --git a/news/2639.vendor b/news/2639.vendor index 9b454d9b91..b159c33f98 100644 --- a/news/2639.vendor +++ b/news/2639.vendor @@ -8,7 +8,7 @@ - ``pytoml`` to ``0.1.18`` - ``certifi`` to ``2018.8.24`` - ``ptyprocess`` to ``0.6.0`` - - ``requirementslib`` to ``1.1.2`` + - ``requirementslib`` to ``1.1.5`` - ``pythonfinder`` to ``1.0.2`` - ``pipdeptree`` to ``0.13.0`` - ``python-dotenv`` to ``0.9.1`` diff --git a/pipenv/utils.py b/pipenv/utils.py index cdb375dd26..12750fdac4 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -362,7 +362,8 @@ def venv_resolve_deps( "--system" if allow_global else "", ) with temp_environ(): - os.environ["PIPENV_PACKAGES"] = "\n".join(deps) + os.environ = {fs_str(k): fs_str(val) for k, val in os.environ.items()} + os.environ["PIPENV_PACKAGES"] = str("\n".join(deps)) if pypi_mirror: os.environ["PIPENV_PYPI_MIRROR"] = str(pypi_mirror) os.environ["PIPENV_VERBOSITY"] = str(environments.PIPENV_VERBOSITY) diff --git a/pipenv/vendor/backports/__init__.py b/pipenv/vendor/backports/__init__.py index 791e7ec6aa..3cd3096323 100644 --- a/pipenv/vendor/backports/__init__.py +++ b/pipenv/vendor/backports/__init__.py @@ -3,6 +3,5 @@ from pkgutil import extend_path __path__ = extend_path(__path__, __name__) from . import weakref -from . import shutil_get_terminal_size from . import enum - +from . import shutil_get_terminal_size diff --git a/pipenv/vendor/backports/typing/__init__.py b/pipenv/vendor/backports/typing/__init__.py deleted file mode 100644 index ddef31b438..0000000000 --- a/pipenv/vendor/backports/typing/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from . import typing diff --git a/pipenv/vendor/requirementslib/models/utils.py b/pipenv/vendor/requirementslib/models/utils.py index 6999204c87..6fd55b6ff2 100644 --- a/pipenv/vendor/requirementslib/models/utils.py +++ b/pipenv/vendor/requirementslib/models/utils.py @@ -117,7 +117,7 @@ def strip_ssh_from_git_uri(uri): def add_ssh_scheme_to_git_uri(uri): - """Cleans VCS uris from pip format""" + """Cleans VCS uris from pipenv.patched.notpip format""" if isinstance(uri, six.string_types): # Add scheme for parsing purposes, this is also what pip does if uri.startswith("git+") and "://" not in uri: diff --git a/pipenv/vendor/tomlkit/api.py b/pipenv/vendor/tomlkit/api.py index d36375e867..0ac2675262 100644 --- a/pipenv/vendor/tomlkit/api.py +++ b/pipenv/vendor/tomlkit/api.py @@ -1,10 +1,5 @@ import datetime as _datetime -try: - from typing import Tuple -except ImportError: - from pipenv.vendor.backports.typing import Tuple - from ._utils import parse_rfc3339 from .container import Container from .items import AoT diff --git a/pipenv/vendor/tomlkit/container.py b/pipenv/vendor/tomlkit/container.py index c7595a5286..a7876ff11c 100644 --- a/pipenv/vendor/tomlkit/container.py +++ b/pipenv/vendor/tomlkit/container.py @@ -1,22 +1,5 @@ from __future__ import unicode_literals -try: - from typing import Any - from typing import Dict - from typing import Generator - from typing import List - from typing import Optional - from typing import Tuple - from typing import Union -except ImportError: - from pipenv.vendor.backports.typing import Any - from pipenv.vendor.backports.typing import Dict - from pipenv.vendor.backports.typing import Generator - from pipenv.vendor.backports.typing import List - from pipenv.vendor.backports.typing import Optional - from pipenv.vendor.backports.typing import Tuple - from pipenv.vendor.backports.typing import Union - from ._compat import decode from .exceptions import KeyAlreadyPresent from .exceptions import NonExistentKey diff --git a/pipenv/vendor/tomlkit/exceptions.py b/pipenv/vendor/tomlkit/exceptions.py index dae29f6fd7..d889a924ae 100644 --- a/pipenv/vendor/tomlkit/exceptions.py +++ b/pipenv/vendor/tomlkit/exceptions.py @@ -1,9 +1,3 @@ -try: - from typing import Optional -except ImportError: - from pipenv.vendor.backports.typing import Optional - - class TOMLKitError(Exception): pass diff --git a/pipenv/vendor/tomlkit/items.py b/pipenv/vendor/tomlkit/items.py index 83b17612b5..8807f4b3bc 100644 --- a/pipenv/vendor/tomlkit/items.py +++ b/pipenv/vendor/tomlkit/items.py @@ -11,21 +11,6 @@ from enum import Enum else: from pipenv.vendor.backports.enum import Enum -try: - from typing import Any - from typing import Dict - from typing import Generator - from typing import List - from typing import Optional - from typing import Union -except ImportError: - from pipenv.vendor.backports.typing import Any - from pipenv.vendor.backports.typing import Dict - from pipenv.vendor.backports.typing import Generator - from pipenv.vendor.backports.typing import List - from pipenv.vendor.backports.typing import Optional - from pipenv.vendor.backports.typing import Union - from ._compat import PY2 from ._compat import decode diff --git a/pipenv/vendor/tomlkit/parser.py b/pipenv/vendor/tomlkit/parser.py index 28c0a6e01d..3d4984d126 100644 --- a/pipenv/vendor/tomlkit/parser.py +++ b/pipenv/vendor/tomlkit/parser.py @@ -7,16 +7,6 @@ import string from copy import copy -try: - from typing import Iterator - from typing import Optional - from typing import Tuple - from typing import Union -except ImportError: - from pipenv.vendor.backports.typing import Iterator - from pipenv.vendor.backports.typing import Optional - from pipenv.vendor.backports.typing import Tuple - from pipenv.vendor.backports.typing import Union from ._compat import PY2 from ._compat import chr diff --git a/pipenv/vendor/tomlkit/toml_file.py b/pipenv/vendor/tomlkit/toml_file.py index 3e4cc721a7..631e995958 100644 --- a/pipenv/vendor/tomlkit/toml_file.py +++ b/pipenv/vendor/tomlkit/toml_file.py @@ -1,12 +1,5 @@ import io -try: - from typing import Any - from typing import Dict -except ImportError: - from pipenv.vendor.backports.typing import Any - from pipenv.vendor.backports.typing import Dict - from .api import loads from .toml_document import TOMLDocument diff --git a/pipenv/vendor/backports/typing/typing.py b/pipenv/vendor/typing.py similarity index 74% rename from pipenv/vendor/backports/typing/typing.py rename to pipenv/vendor/typing.py index 7d8c7a343e..b5564cc29a 100644 --- a/pipenv/vendor/backports/typing/typing.py +++ b/pipenv/vendor/typing.py @@ -1,17 +1,23 @@ -from __future__ import absolute_import, unicode_literals - import abc from abc import abstractmethod, abstractproperty import collections +import contextlib import functools import re as stdlib_re # Avoid confusion with the re we export. import sys import types -import copy try: import collections.abc as collections_abc except ImportError: import collections as collections_abc # Fallback for PY3.2. +if sys.version_info[:2] >= (3, 6): + import _collections_abc # Needed for private function _check_methods # noqa +try: + from types import WrapperDescriptorType, MethodWrapperType, MethodDescriptorType +except ImportError: + WrapperDescriptorType = type(object.__init__) + MethodWrapperType = type(object().__str__) + MethodDescriptorType = type(str.join) # Please keep __all__ alphabetized within each category. @@ -47,13 +53,24 @@ 'Sequence', 'Sized', 'ValuesView', + # The following are added depending on presence + # of their non-generic counterparts in stdlib: + # Awaitable, + # AsyncIterator, + # AsyncIterable, + # Coroutine, + # Collection, + # AsyncGenerator, + # AsyncContextManager # Structural checks, a.k.a. protocols. 'Reversible', 'SupportsAbs', + 'SupportsBytes', 'SupportsComplex', 'SupportsFloat', 'SupportsInt', + 'SupportsRound', # Concrete collection types. 'Counter', @@ -73,7 +90,6 @@ 'NewType', 'no_type_check', 'no_type_check_decorator', - 'NoReturn', 'overload', 'Text', 'TYPE_CHECKING', @@ -103,21 +119,22 @@ class TypingMeta(type): """Metaclass for most types defined in typing module (not a part of public API). + This overrides __new__() to require an extra keyword parameter + '_root', which serves as a guard against naive subclassing of the + typing classes. Any legitimate class defined using a metaclass + derived from TypingMeta must pass _root=True. + This also defines a dummy constructor (all the work for most typing constructs is done in __new__) and a nicer repr(). """ _is_protocol = False - def __new__(cls, name, bases, namespace): - return super(TypingMeta, cls).__new__(cls, str(name), bases, namespace) - - @classmethod - def assert_no_subclassing(cls, bases): - for base in bases: - if isinstance(base, cls): - raise TypeError("Cannot subclass %s" % - (', '.join(map(_type_repr, bases)) or '()')) + def __new__(cls, name, bases, namespace, *, _root=False): + if not _root: + raise TypeError("Cannot subclass %s" % + (', '.join(map(_type_repr, bases)) or '()')) + return super().__new__(cls, name, bases, namespace) def __init__(self, *args, **kwds): pass @@ -140,9 +157,9 @@ def __repr__(self): return '%s.%s' % (self.__module__, qname) -class _TypingBase(object): +class _TypingBase(metaclass=TypingMeta, _root=True): """Internal indicator of special typing constructs.""" - __metaclass__ = TypingMeta + __slots__ = ('__weakref__',) def __init__(self, *args, **kwds): @@ -159,7 +176,7 @@ def __new__(cls, *args, **kwds): isinstance(args[1], tuple)): # Close enough. raise TypeError("Cannot subclass %r" % cls) - return super(_TypingBase, cls).__new__(cls) + return super().__new__(cls) # Things that are not classes also need these. def _eval_type(self, globalns, localns): @@ -177,7 +194,7 @@ def __call__(self, *args, **kwds): raise TypeError("Cannot instantiate %r" % type(self)) -class _FinalTypingBase(_TypingBase): +class _FinalTypingBase(_TypingBase, _root=True): """Internal mix-in class to prevent instantiation. Prevents instantiation unless _root=True is given in class call. @@ -186,9 +203,9 @@ class _FinalTypingBase(_TypingBase): __slots__ = () - def __new__(cls, *args, **kwds): - self = super(_FinalTypingBase, cls).__new__(cls, *args, **kwds) - if '_root' in kwds and kwds['_root'] is True: + def __new__(cls, *args, _root=False, **kwds): + self = super().__new__(cls, *args, **kwds) + if _root is True: return self raise TypeError("Cannot instantiate %r" % cls) @@ -196,15 +213,15 @@ def __reduce__(self): return _trim_name(type(self).__name__) -class _ForwardRef(_TypingBase): +class _ForwardRef(_TypingBase, _root=True): """Internal wrapper to hold a forward reference.""" __slots__ = ('__forward_arg__', '__forward_code__', '__forward_evaluated__', '__forward_value__') def __init__(self, arg): - super(_ForwardRef, self).__init__(arg) - if not isinstance(arg, basestring): + super().__init__(arg) + if not isinstance(arg, str): raise TypeError('Forward reference must be a string -- got %r' % (arg,)) try: code = compile(arg, '', 'eval') @@ -249,7 +266,7 @@ def __repr__(self): return '_ForwardRef(%r)' % (self.__forward_arg__,) -class _TypeAlias(_TypingBase): +class _TypeAlias(_TypingBase, _root=True): """Internal helper class for defining generic variants of concrete types. Note that this is not a type; let's call it a pseudo-type. It cannot @@ -271,7 +288,7 @@ def __init__(self, name, type_var, impl_type, type_checker): type_checker: Function that takes an impl_type instance. and returns a value that should be a type_var instance. """ - assert isinstance(name, basestring), repr(name) + assert isinstance(name, str), repr(name) assert isinstance(impl_type, type), repr(impl_type) assert not isinstance(impl_type, TypingMeta), repr(impl_type) assert isinstance(type_var, (type, _TypingBase)), repr(type_var) @@ -348,7 +365,7 @@ def _type_check(arg, msg): """ if arg is None: return type(None) - if isinstance(arg, basestring): + if isinstance(arg, str): arg = _ForwardRef(arg) if ( isinstance(arg, _TypingBase) and type(arg).__name__ == '_ClassVar' or @@ -374,91 +391,17 @@ def _type_repr(obj): else, we fall back on repr(obj). """ if isinstance(obj, type) and not isinstance(obj, TypingMeta): - if obj.__module__ == '__builtin__': + if obj.__module__ == 'builtins': return _qualname(obj) return '%s.%s' % (obj.__module__, _qualname(obj)) - if obj is Ellipsis: + if obj is ...: return('...') if isinstance(obj, types.FunctionType): return obj.__name__ return repr(obj) -class ClassVarMeta(TypingMeta): - """Metaclass for _ClassVar""" - - def __new__(cls, name, bases, namespace): - cls.assert_no_subclassing(bases) - self = super(ClassVarMeta, cls).__new__(cls, name, bases, namespace) - return self - - -class _ClassVar(_FinalTypingBase): - """Special type construct to mark class variables. - - An annotation wrapped in ClassVar indicates that a given - attribute is intended to be used as a class variable and - should not be set on instances of that class. Usage:: - - class Starship: - stats = {} # type: ClassVar[Dict[str, int]] # class variable - damage = 10 # type: int # instance variable - - ClassVar accepts only types and cannot be further subscribed. - - Note that ClassVar is not a class itself, and should not - be used with isinstance() or issubclass(). - """ - - __metaclass__ = ClassVarMeta - __slots__ = ('__type__',) - - def __init__(self, tp=None, _root=False): - self.__type__ = tp - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is None: - return cls(_type_check(item, - '{} accepts only types.'.format(cls.__name__[1:])), - _root=True) - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) - - def _eval_type(self, globalns, localns): - return type(self)(_eval_type(self.__type__, globalns, localns), - _root=True) - - def __repr__(self): - r = super(_ClassVar, self).__repr__() - if self.__type__ is not None: - r += '[{}]'.format(_type_repr(self.__type__)) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not isinstance(other, _ClassVar): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - -ClassVar = _ClassVar(_root=True) - - -class AnyMeta(TypingMeta): - """Metaclass for Any.""" - - def __new__(cls, name, bases, namespace): - cls.assert_no_subclassing(bases) - self = super(AnyMeta, cls).__new__(cls, name, bases, namespace) - return self - - -class _Any(_FinalTypingBase): +class _Any(_FinalTypingBase, _root=True): """Special type indicating an unconstrained type. - Any is compatible with every type. @@ -469,7 +412,7 @@ class _Any(_FinalTypingBase): static type checkers. At runtime, Any should not be used with instance or class checks. """ - __metaclass__ = AnyMeta + __slots__ = () def __instancecheck__(self, obj): @@ -482,16 +425,7 @@ def __subclasscheck__(self, cls): Any = _Any(_root=True) -class NoReturnMeta(TypingMeta): - """Metaclass for NoReturn.""" - - def __new__(cls, name, bases, namespace): - cls.assert_no_subclassing(bases) - self = super(NoReturnMeta, cls).__new__(cls, name, bases, namespace) - return self - - -class _NoReturn(_FinalTypingBase): +class _NoReturn(_FinalTypingBase, _root=True): """Special type indicating functions that never return. Example:: @@ -503,7 +437,7 @@ def stop() -> NoReturn: This type is invalid in other positions, e.g., ``List[NoReturn]`` will fail in static type checkers. """ - __metaclass__ = NoReturnMeta + __slots__ = () def __instancecheck__(self, obj): @@ -516,13 +450,7 @@ def __subclasscheck__(self, cls): NoReturn = _NoReturn(_root=True) -class TypeVarMeta(TypingMeta): - def __new__(cls, name, bases, namespace): - cls.assert_no_subclassing(bases) - return super(TypeVarMeta, cls).__new__(cls, name, bases, namespace) - - -class TypeVar(_TypingBase): +class TypeVar(_TypingBase, _root=True): """Type variable. Usage:: @@ -564,15 +492,13 @@ def longest(x: A, y: A) -> A: A.__constraints__ == (str, bytes) """ - __metaclass__ = TypeVarMeta __slots__ = ('__name__', '__bound__', '__constraints__', '__covariant__', '__contravariant__') - def __init__(self, name, *constraints, **kwargs): - super(TypeVar, self).__init__(name, *constraints, **kwargs) - bound = kwargs.get('bound', None) - covariant = kwargs.get('covariant', False) - contravariant = kwargs.get('contravariant', False) + def __init__(self, name, *constraints, bound=None, + covariant=False, contravariant=False): + super().__init__(name, *constraints, bound=bound, + covariant=covariant, contravariant=contravariant) self.__name__ = name if covariant and contravariant: raise ValueError("Bivariant types are not supported.") @@ -621,7 +547,7 @@ def __subclasscheck__(self, cls): # A useful type variable with constraints. This represents string types. # (This one *is* for export!) -AnyStr = TypeVar('AnyStr', bytes, unicode) +AnyStr = TypeVar('AnyStr', bytes, str) def _replace_arg(arg, tvars, args): @@ -740,38 +666,24 @@ def _check_generic(cls, parameters): def _tp_cache(func): - maxsize = 128 - cache = {} - _cleanups.append(cache.clear) + """Internal wrapper caching __getitem__ of generic types with a fallback to + original function for non-hashable arguments. + """ + + cached = functools.lru_cache()(func) + _cleanups.append(cached.cache_clear) @functools.wraps(func) - def inner(*args): - key = args + def inner(*args, **kwds): try: - return cache[key] + return cached(*args, **kwds) except TypeError: - # Assume it's an unhashable argument. - return func(*args) - except KeyError: - value = func(*args) - if len(cache) >= maxsize: - # If the cache grows too much, just start over. - cache.clear() - cache[key] = value - return value - + pass # All real errors (not unhashable args) are raised below. + return func(*args, **kwds) return inner -class UnionMeta(TypingMeta): - """Metaclass for Union.""" - - def __new__(cls, name, bases, namespace): - cls.assert_no_subclassing(bases) - return super(UnionMeta, cls).__new__(cls, name, bases, namespace) - - -class _Union(_FinalTypingBase): +class _Union(_FinalTypingBase, _root=True): """Union type; Union[X, Y] means either X or Y. To define a union, use e.g. Union[int, str]. Details: @@ -815,11 +727,10 @@ class Manager(Employee): pass - You can use Optional[X] as a shorthand for Union[X, None]. """ - __metaclass__ = UnionMeta __slots__ = ('__parameters__', '__args__', '__origin__', '__tree_hash__') - def __new__(cls, parameters=None, origin=None, *args, **kwds): - self = super(_Union, cls).__new__(cls, parameters, origin, *args, **kwds) + def __new__(cls, parameters=None, origin=None, *args, _root=False): + self = super().__new__(cls, parameters, origin, *args, _root=_root) if origin is None: self.__parameters__ = None self.__args__ = None @@ -861,7 +772,7 @@ def _get_type_vars(self, tvars): def __repr__(self): if self.__origin__ is None: - return super(_Union, self).__repr__() + return super().__repr__() tree = self._subs_tree() if not isinstance(tree, tuple): return repr(tree) @@ -874,7 +785,7 @@ def _tree_repr(self, tree): arg_list.append(_type_repr(arg)) else: arg_list.append(arg[0]._tree_repr(arg)) - return super(_Union, self).__repr__() + '[%s]' % ', '.join(arg_list) + return super().__repr__() + '[%s]' % ', '.join(arg_list) @_tp_cache def __getitem__(self, parameters): @@ -921,21 +832,12 @@ def __subclasscheck__(self, cls): Union = _Union(_root=True) -class OptionalMeta(TypingMeta): - """Metaclass for Optional.""" - - def __new__(cls, name, bases, namespace): - cls.assert_no_subclassing(bases) - return super(OptionalMeta, cls).__new__(cls, name, bases, namespace) - - -class _Optional(_FinalTypingBase): +class _Optional(_FinalTypingBase, _root=True): """Optional type. Optional[X] is equivalent to Union[X, None]. """ - __metaclass__ = OptionalMeta __slots__ = () @_tp_cache @@ -970,11 +872,11 @@ def _make_subclasshook(cls): # The logic mirrors that of ABCMeta.__subclasscheck__. # Registered classes need not be checked here because # cls and its extra share the same _abc_registry. - def __extrahook__(cls, subclass): + def __extrahook__(subclass): res = cls.__extra__.__subclasshook__(subclass) if res is not NotImplemented: return res - if cls.__extra__ in getattr(subclass, '__mro__', ()): + if cls.__extra__ in subclass.__mro__: return True for scls in cls.__extra__.__subclasses__(): if isinstance(scls, GenericMeta): @@ -984,11 +886,22 @@ def __extrahook__(cls, subclass): return NotImplemented else: # For non-ABC extras we'll just call issubclass(). - def __extrahook__(cls, subclass): + def __extrahook__(subclass): if cls.__extra__ and issubclass(subclass, cls.__extra__): return True return NotImplemented - return classmethod(__extrahook__) + return __extrahook__ + + +def _no_slots_copy(dct): + """Internal helper: copy class __dict__ and clean slots class variables. + (They will be re-created if necessary by normal class machinery.) + """ + dict_copy = dict(dct) + if '__slots__' in dict_copy: + for slot in dict_copy['__slots__']: + dict_copy.pop(slot, None) + return dict_copy class GenericMeta(TypingMeta, abc.ABCMeta): @@ -1053,8 +966,6 @@ def __new__(cls, name, bases, namespace, tvars = gvars initial_bases = bases - if extra is None: - extra = namespace.get('__extra__') if extra is not None and type(extra) is abc.ABCMeta and extra not in bases: bases = (extra,) + bases bases = tuple(b._gorg if isinstance(b, GenericMeta) else b for b in bases) @@ -1062,15 +973,15 @@ def __new__(cls, name, bases, namespace, # remove bare Generic from bases if there are other generic bases if any(isinstance(b, GenericMeta) and b is not Generic for b in bases): bases = tuple(b for b in bases if b is not Generic) - namespace.update({'__origin__': origin, '__extra__': extra}) - self = super(GenericMeta, cls).__new__(cls, name, bases, namespace) + namespace.update({'__origin__': origin, '__extra__': extra, + '_gorg': None if not origin else origin._gorg}) + self = super().__new__(cls, name, bases, namespace, _root=True) super(GenericMeta, self).__setattr__('_gorg', self if not origin else origin._gorg) - self.__parameters__ = tvars # Be prepared that GenericMeta will be subclassed by TupleMeta # and CallableMeta, those two allow ..., (), or [] in __args___. - self.__args__ = tuple(Ellipsis if a is _TypingEllipsis else + self.__args__ = tuple(... if a is _TypingEllipsis else () if a is _TypingEmpty else a for a in args) if args else None # Speed hack (https://github.com/python/typing/issues/196). @@ -1088,6 +999,12 @@ def __new__(cls, name, bases, namespace, getattr(self.__subclasshook__, '__name__', '') == '__extrahook__' ): self.__subclasshook__ = _make_subclasshook(self) + if isinstance(extra, abc.ABCMeta): + self._abc_registry = extra._abc_registry + self._abc_cache = extra._abc_cache + elif origin is not None: + self._abc_registry = origin._abc_registry + self._abc_cache = origin._abc_cache if origin and hasattr(origin, '__qualname__'): # Fix for Python 3.2. self.__qualname__ = origin.__qualname__ @@ -1095,15 +1012,6 @@ def __new__(cls, name, bases, namespace, super(GenericMeta, self).__hash__()) return self - def __init__(self, *args, **kwargs): - super(GenericMeta, self).__init__(*args, **kwargs) - if isinstance(self.__extra__, abc.ABCMeta): - self._abc_registry = self.__extra__._abc_registry - self._abc_cache = self.__extra__._abc_cache - elif self.__origin__ is not None: - self._abc_registry = self.__origin__._abc_registry - self._abc_cache = self.__origin__._abc_cache - # _abc_negative_cache and _abc_negative_cache_version # realised as descriptors, since GenClass[t1, t2, ...] always # share subclass info with GenClass. @@ -1149,7 +1057,7 @@ def _eval_type(self, globalns, localns): return self return self.__class__(self.__name__, self.__bases__, - dict(self.__dict__), + _no_slots_copy(self.__dict__), tvars=_type_vars(ev_args) if ev_args else None, args=ev_args, origin=ev_origin, @@ -1158,7 +1066,7 @@ def _eval_type(self, globalns, localns): def __repr__(self): if self.__origin__ is None: - return super(GenericMeta, self).__repr__() + return super().__repr__() return self._tree_repr(self._subs_tree()) def _tree_repr(self, tree): @@ -1170,7 +1078,7 @@ def _tree_repr(self, tree): arg_list.append(_type_repr(arg)) else: arg_list.append(arg[0]._tree_repr(arg)) - return super(GenericMeta, self).__repr__() + '[%s]' % ', '.join(arg_list) + return super().__repr__() + '[%s]' % ', '.join(arg_list) def _subs_tree(self, tvars=None, args=None): if self.__origin__ is None: @@ -1227,7 +1135,7 @@ def __getitem__(self, params): prepend = (self,) if self.__origin__ is None else () return self.__class__(self.__name__, prepend + self.__bases__, - dict(self.__dict__), + _no_slots_copy(self.__dict__), tvars=tvars, args=args, origin=self, @@ -1236,18 +1144,14 @@ def __getitem__(self, params): def __subclasscheck__(self, cls): if self.__origin__ is not None: - # This should only be modules within the standard - # library. singledispatch is the only exception, because - # it's a Python 2 backport of functools.singledispatch. - if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools', - 'singledispatch']: + if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: raise TypeError("Parameterized generics cannot be used with class " "or instance checks") return False if self is Generic: raise TypeError("Class %r cannot be used with class " "or instance checks" % self) - return super(GenericMeta, self).__subclasscheck__(cls) + return super().__subclasscheck__(cls) def __instancecheck__(self, instance): # Since we extend ABC.__subclasscheck__ and @@ -1255,31 +1159,20 @@ def __instancecheck__(self, instance): # latter, we must extend __instancecheck__ too. For simplicity # we just skip the cache check -- instance checks for generic # classes are supposed to be rare anyways. - if not isinstance(instance, type): - return issubclass(instance.__class__, self) - return False + return issubclass(instance.__class__, self) def __setattr__(self, attr, value): - # We consider all the subscripted genrics as proxies for original class + # We consider all the subscripted generics as proxies for original class if ( attr.startswith('__') and attr.endswith('__') or - attr.startswith('_abc_') + attr.startswith('_abc_') or + self._gorg is None # The class is not fully created, see #typing/506 ): super(GenericMeta, self).__setattr__(attr, value) else: super(GenericMeta, self._gorg).__setattr__(attr, value) -def _copy_generic(self): - """Hack to work around https://bugs.python.org/issue11480 on Python 2""" - return self.__class__(self.__name__, self.__bases__, dict(self.__dict__), - self.__parameters__, self.__args__, self.__origin__, - self.__extra__, self.__orig_bases__) - - -copy._copy_dispatch[GenericMeta] = _copy_generic - - # Prevent checks for Generic to crash when defining Generic. Generic = None @@ -1288,18 +1181,10 @@ def _generic_new(base_cls, cls, *args, **kwds): # Assure type is erased on instantiation, # but attempt to store it in __orig_class__ if cls.__origin__ is None: - if (base_cls.__new__ is object.__new__ and - cls.__init__ is not object.__init__): - return base_cls.__new__(cls) - else: - return base_cls.__new__(cls, *args, **kwds) + return base_cls.__new__(cls) else: origin = cls._gorg - if (base_cls.__new__ is object.__new__ and - cls.__init__ is not object.__init__): - obj = base_cls.__new__(origin) - else: - obj = base_cls.__new__(origin, *args, **kwds) + obj = base_cls.__new__(origin) try: obj.__orig_class__ = cls except AttributeError: @@ -1308,7 +1193,7 @@ def _generic_new(base_cls, cls, *args, **kwds): return obj -class Generic(object): +class Generic(metaclass=GenericMeta): """Abstract base class for generic types. A generic type is typically declared by inheriting from @@ -1329,7 +1214,6 @@ def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT: return default """ - __metaclass__ = GenericMeta __slots__ = () def __new__(cls, *args, **kwds): @@ -1339,14 +1223,14 @@ def __new__(cls, *args, **kwds): return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) -class _TypingEmpty(object): +class _TypingEmpty: """Internal placeholder for () or []. Used by TupleMeta and CallableMeta to allow empty list/tuple in specific places, without allowing them to sneak in where prohibited. """ -class _TypingEllipsis(object): +class _TypingEllipsis: """Internal placeholder for ... (ellipsis).""" @@ -1358,18 +1242,18 @@ def __getitem__(self, parameters): if self.__origin__ is not None or self._gorg is not Tuple: # Normal generic rules apply if this is not the first subscription # or a subscription of a subclass. - return super(TupleMeta, self).__getitem__(parameters) + return super().__getitem__(parameters) if parameters == (): - return super(TupleMeta, self).__getitem__((_TypingEmpty,)) + return super().__getitem__((_TypingEmpty,)) if not isinstance(parameters, tuple): parameters = (parameters,) - if len(parameters) == 2 and parameters[1] is Ellipsis: + if len(parameters) == 2 and parameters[1] is ...: msg = "Tuple[t, ...]: t must be a type." p = _type_check(parameters[0], msg) - return super(TupleMeta, self).__getitem__((p, _TypingEllipsis)) + return super().__getitem__((p, _TypingEllipsis)) msg = "Tuple[t0, t1, ...]: each t must be a type." parameters = tuple(_type_check(p, msg) for p in parameters) - return super(TupleMeta, self).__getitem__(parameters) + return super().__getitem__(parameters) def __instancecheck__(self, obj): if self.__args__ is None: @@ -1384,10 +1268,7 @@ def __subclasscheck__(self, cls): "with issubclass().") -copy._copy_dispatch[TupleMeta] = _copy_generic - - -class Tuple(tuple): +class Tuple(tuple, extra=tuple, metaclass=TupleMeta): """Tuple type; Tuple[X, Y] is the cross-product type of X and Y. Example: Tuple[T1, T2] is a tuple of two elements corresponding @@ -1397,8 +1278,6 @@ class Tuple(tuple): To specify a variable-length tuple of homogeneous type, use Tuple[T, ...]. """ - __metaclass__ = TupleMeta - __extra__ = tuple __slots__ = () def __new__(cls, *args, **kwds): @@ -1409,18 +1288,18 @@ def __new__(cls, *args, **kwds): class CallableMeta(GenericMeta): - """ Metaclass for Callable.""" + """Metaclass for Callable (internal).""" def __repr__(self): if self.__origin__ is None: - return super(CallableMeta, self).__repr__() + return super().__repr__() return self._tree_repr(self._subs_tree()) def _tree_repr(self, tree): if self._gorg is not Callable: - return super(CallableMeta, self)._tree_repr(tree) + return super()._tree_repr(tree) # For actual Callable (not its subclass) we override - # super(CallableMeta, self)._tree_repr() for nice formatting. + # super()._tree_repr() for nice formatting. arg_list = [] for arg in tree[1:]: if not isinstance(arg, tuple): @@ -1438,7 +1317,7 @@ def __getitem__(self, parameters): """ if self.__origin__ is not None or self._gorg is not Callable: - return super(CallableMeta, self).__getitem__(parameters) + return super().__getitem__(parameters) if not isinstance(parameters, tuple) or len(parameters) != 2: raise TypeError("Callable must be used as " "Callable[[arg, ...], result].") @@ -1458,17 +1337,14 @@ def __getitem_inner__(self, parameters): msg = "Callable[args, result]: result must be a type." result = _type_check(result, msg) if args is Ellipsis: - return super(CallableMeta, self).__getitem__((_TypingEllipsis, result)) + return super().__getitem__((_TypingEllipsis, result)) msg = "Callable[[arg, ...], result]: each arg must be a type." args = tuple(_type_check(arg, msg) for arg in args) parameters = args + (result,) - return super(CallableMeta, self).__getitem__(parameters) - + return super().__getitem__(parameters) -copy._copy_dispatch[CallableMeta] = _copy_generic - -class Callable(object): +class Callable(extra=collections_abc.Callable, metaclass=CallableMeta): """Callable type; Callable[[int], str] is a function of (int) -> str. The subscription syntax must always be used with exactly two @@ -1479,8 +1355,6 @@ class Callable(object): such function types are rarely used as callback types. """ - __metaclass__ = CallableMeta - __extra__ = collections_abc.Callable __slots__ = () def __new__(cls, *args, **kwds): @@ -1490,6 +1364,63 @@ def __new__(cls, *args, **kwds): return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) +class _ClassVar(_FinalTypingBase, _root=True): + """Special type construct to mark class variables. + + An annotation wrapped in ClassVar indicates that a given + attribute is intended to be used as a class variable and + should not be set on instances of that class. Usage:: + + class Starship: + stats: ClassVar[Dict[str, int]] = {} # class variable + damage: int = 10 # instance variable + + ClassVar accepts only types and cannot be further subscribed. + + Note that ClassVar is not a class itself, and should not + be used with isinstance() or issubclass(). + """ + + __slots__ = ('__type__',) + + def __init__(self, tp=None, **kwds): + self.__type__ = tp + + def __getitem__(self, item): + cls = type(self) + if self.__type__ is None: + return cls(_type_check(item, + '{} accepts only single type.'.format(cls.__name__[1:])), + _root=True) + raise TypeError('{} cannot be further subscripted' + .format(cls.__name__[1:])) + + def _eval_type(self, globalns, localns): + new_tp = _eval_type(self.__type__, globalns, localns) + if new_tp == self.__type__: + return self + return type(self)(new_tp, _root=True) + + def __repr__(self): + r = super().__repr__() + if self.__type__ is not None: + r += '[{}]'.format(_type_repr(self.__type__)) + return r + + def __hash__(self): + return hash((type(self).__name__, self.__type__)) + + def __eq__(self, other): + if not isinstance(other, _ClassVar): + return NotImplemented + if self.__type__ is not None: + return self.__type__ == other.__type__ + return self is other + + +ClassVar = _ClassVar(_root=True) + + def cast(typ, val): """Cast a value to a type. @@ -1503,7 +1434,11 @@ def cast(typ, val): def _get_defaults(func): """Internal helper to extract the default arguments, by name.""" - code = func.__code__ + try: + code = func.__code__ + except AttributeError: + # Some built-in functions don't have __code__, __defaults__, etc. + return {} pos_count = code.co_argcount arg_names = code.co_varnames arg_names = arg_names[:pos_count] @@ -1517,9 +1452,91 @@ def _get_defaults(func): return res +_allowed_types = (types.FunctionType, types.BuiltinFunctionType, + types.MethodType, types.ModuleType, + WrapperDescriptorType, MethodWrapperType, MethodDescriptorType) + + def get_type_hints(obj, globalns=None, localns=None): - """In Python 2 this is not supported and always returns None.""" - return None + """Return type hints for an object. + + This is often the same as obj.__annotations__, but it handles + forward references encoded as string literals, and if necessary + adds Optional[t] if a default value equal to None is set. + + The argument may be a module, class, method, or function. The annotations + are returned as a dictionary. For classes, annotations include also + inherited members. + + TypeError is raised if the argument is not of a type that can contain + annotations, and an empty dictionary is returned if no annotations are + present. + + BEWARE -- the behavior of globalns and localns is counterintuitive + (unless you are familiar with how eval() and exec() work). The + search order is locals first, then globals. + + - If no dict arguments are passed, an attempt is made to use the + globals from obj (or the respective module's globals for classes), + and these are also used as the locals. If the object does not appear + to have globals, an empty dictionary is used. + + - If one dict argument is passed, it is used for both globals and + locals. + + - If two dict arguments are passed, they specify globals and + locals, respectively. + """ + + if getattr(obj, '__no_type_check__', None): + return {} + # Classes require a special treatment. + if isinstance(obj, type): + hints = {} + for base in reversed(obj.__mro__): + if globalns is None: + base_globals = sys.modules[base.__module__].__dict__ + else: + base_globals = globalns + ann = base.__dict__.get('__annotations__', {}) + for name, value in ann.items(): + if value is None: + value = type(None) + if isinstance(value, str): + value = _ForwardRef(value) + value = _eval_type(value, base_globals, localns) + hints[name] = value + return hints + + if globalns is None: + if isinstance(obj, types.ModuleType): + globalns = obj.__dict__ + else: + globalns = getattr(obj, '__globals__', {}) + if localns is None: + localns = globalns + elif localns is None: + localns = globalns + hints = getattr(obj, '__annotations__', None) + if hints is None: + # Return empty annotations for something that _could_ have them. + if isinstance(obj, _allowed_types): + return {} + else: + raise TypeError('{!r} is not a module, class, method, ' + 'or function.'.format(obj)) + defaults = _get_defaults(obj) + hints = dict(hints) + for name, value in hints.items(): + if value is None: + value = type(None) + if isinstance(value, str): + value = _ForwardRef(value) + value = _eval_type(value, globalns, localns) + if name in defaults and defaults[name] is None: + value = Optional[value] + hints[name] = value + return hints def no_type_check(arg): @@ -1611,7 +1628,7 @@ class _ProtocolMeta(GenericMeta): def __instancecheck__(self, obj): if _Protocol not in self.__bases__: - return super(_ProtocolMeta, self).__instancecheck__(obj) + return super().__instancecheck__(obj) raise TypeError("Protocols cannot be used with isinstance().") def __subclasscheck__(self, cls): @@ -1650,6 +1667,8 @@ def _get_protocol_attrs(self): else: if (not attr.startswith('_abc_') and attr != '__abstractmethods__' and + attr != '__annotations__' and + attr != '__weakref__' and attr != '_is_protocol' and attr != '_gorg' and attr != '__dict__' and @@ -1668,7 +1687,7 @@ def _get_protocol_attrs(self): return attrs -class _Protocol(object): +class _Protocol(metaclass=_ProtocolMeta): """Internal base class for protocol classes. This implements a simple-minded structural issubclass check @@ -1676,7 +1695,6 @@ class _Protocol(object): such as Hashable). """ - __metaclass__ = _ProtocolMeta __slots__ = () _is_protocol = True @@ -1688,21 +1706,47 @@ class _Protocol(object): Hashable = collections_abc.Hashable # Not generic. -class Iterable(Generic[T_co]): +if hasattr(collections_abc, 'Awaitable'): + class Awaitable(Generic[T_co], extra=collections_abc.Awaitable): + __slots__ = () + + __all__.append('Awaitable') + + +if hasattr(collections_abc, 'Coroutine'): + class Coroutine(Awaitable[V_co], Generic[T_co, T_contra, V_co], + extra=collections_abc.Coroutine): + __slots__ = () + + __all__.append('Coroutine') + + +if hasattr(collections_abc, 'AsyncIterable'): + + class AsyncIterable(Generic[T_co], extra=collections_abc.AsyncIterable): + __slots__ = () + + class AsyncIterator(AsyncIterable[T_co], + extra=collections_abc.AsyncIterator): + __slots__ = () + + __all__.append('AsyncIterable') + __all__.append('AsyncIterator') + + +class Iterable(Generic[T_co], extra=collections_abc.Iterable): __slots__ = () - __extra__ = collections_abc.Iterable -class Iterator(Iterable[T_co]): +class Iterator(Iterable[T_co], extra=collections_abc.Iterator): __slots__ = () - __extra__ = collections_abc.Iterator class SupportsInt(_Protocol): __slots__ = () @abstractmethod - def __int__(self): + def __int__(self) -> int: pass @@ -1710,7 +1754,7 @@ class SupportsFloat(_Protocol): __slots__ = () @abstractmethod - def __float__(self): + def __float__(self) -> float: pass @@ -1718,7 +1762,15 @@ class SupportsComplex(_Protocol): __slots__ = () @abstractmethod - def __complex__(self): + def __complex__(self) -> complex: + pass + + +class SupportsBytes(_Protocol): + __slots__ = () + + @abstractmethod + def __bytes__(self) -> bytes: pass @@ -1726,81 +1778,102 @@ class SupportsAbs(_Protocol[T_co]): __slots__ = () @abstractmethod - def __abs__(self): + def __abs__(self) -> T_co: + pass + + +class SupportsRound(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __round__(self, ndigits: int = 0) -> T_co: pass if hasattr(collections_abc, 'Reversible'): - class Reversible(Iterable[T_co]): + class Reversible(Iterable[T_co], extra=collections_abc.Reversible): __slots__ = () - __extra__ = collections_abc.Reversible else: class Reversible(_Protocol[T_co]): __slots__ = () @abstractmethod - def __reversed__(self): + def __reversed__(self) -> 'Iterator[T_co]': pass Sized = collections_abc.Sized # Not generic. -class Container(Generic[T_co]): +class Container(Generic[T_co], extra=collections_abc.Container): __slots__ = () - __extra__ = collections_abc.Container -# Callable was defined earlier. +if hasattr(collections_abc, 'Collection'): + class Collection(Sized, Iterable[T_co], Container[T_co], + extra=collections_abc.Collection): + __slots__ = () + __all__.append('Collection') -class AbstractSet(Sized, Iterable[T_co], Container[T_co]): - __slots__ = () - __extra__ = collections_abc.Set + +# Callable was defined earlier. + +if hasattr(collections_abc, 'Collection'): + class AbstractSet(Collection[T_co], + extra=collections_abc.Set): + __slots__ = () +else: + class AbstractSet(Sized, Iterable[T_co], Container[T_co], + extra=collections_abc.Set): + __slots__ = () -class MutableSet(AbstractSet[T]): +class MutableSet(AbstractSet[T], extra=collections_abc.MutableSet): __slots__ = () - __extra__ = collections_abc.MutableSet # NOTE: It is only covariant in the value type. -class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co]): - __slots__ = () - __extra__ = collections_abc.Mapping +if hasattr(collections_abc, 'Collection'): + class Mapping(Collection[KT], Generic[KT, VT_co], + extra=collections_abc.Mapping): + __slots__ = () +else: + class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co], + extra=collections_abc.Mapping): + __slots__ = () -class MutableMapping(Mapping[KT, VT]): +class MutableMapping(Mapping[KT, VT], extra=collections_abc.MutableMapping): __slots__ = () - __extra__ = collections_abc.MutableMapping if hasattr(collections_abc, 'Reversible'): - class Sequence(Sized, Reversible[T_co], Container[T_co]): - __slots__ = () - __extra__ = collections_abc.Sequence + if hasattr(collections_abc, 'Collection'): + class Sequence(Reversible[T_co], Collection[T_co], + extra=collections_abc.Sequence): + __slots__ = () + else: + class Sequence(Sized, Reversible[T_co], Container[T_co], + extra=collections_abc.Sequence): + __slots__ = () else: - class Sequence(Sized, Iterable[T_co], Container[T_co]): + class Sequence(Sized, Iterable[T_co], Container[T_co], + extra=collections_abc.Sequence): __slots__ = () - __extra__ = collections_abc.Sequence -class MutableSequence(Sequence[T]): +class MutableSequence(Sequence[T], extra=collections_abc.MutableSequence): __slots__ = () - __extra__ = collections_abc.MutableSequence - -class ByteString(Sequence[int]): - pass +class ByteString(Sequence[int], extra=collections_abc.ByteString): + __slots__ = () -ByteString.register(str) -ByteString.register(bytearray) +class List(list, MutableSequence[T], extra=list): -class List(list, MutableSequence[T]): __slots__ = () - __extra__ = list def __new__(cls, *args, **kwds): if cls._gorg is List: @@ -1809,9 +1882,9 @@ def __new__(cls, *args, **kwds): return _generic_new(list, cls, *args, **kwds) -class Deque(collections.deque, MutableSequence[T]): +class Deque(collections.deque, MutableSequence[T], extra=collections.deque): + __slots__ = () - __extra__ = collections.deque def __new__(cls, *args, **kwds): if cls._gorg is Deque: @@ -1819,9 +1892,9 @@ def __new__(cls, *args, **kwds): return _generic_new(collections.deque, cls, *args, **kwds) -class Set(set, MutableSet[T]): +class Set(set, MutableSet[T], extra=set): + __slots__ = () - __extra__ = set def __new__(cls, *args, **kwds): if cls._gorg is Set: @@ -1830,9 +1903,8 @@ def __new__(cls, *args, **kwds): return _generic_new(set, cls, *args, **kwds) -class FrozenSet(frozenset, AbstractSet[T_co]): +class FrozenSet(frozenset, AbstractSet[T_co], extra=frozenset): __slots__ = () - __extra__ = frozenset def __new__(cls, *args, **kwds): if cls._gorg is FrozenSet: @@ -1841,55 +1913,89 @@ def __new__(cls, *args, **kwds): return _generic_new(frozenset, cls, *args, **kwds) -class MappingView(Sized, Iterable[T_co]): +class MappingView(Sized, Iterable[T_co], extra=collections_abc.MappingView): __slots__ = () - __extra__ = collections_abc.MappingView -class KeysView(MappingView[KT], AbstractSet[KT]): +class KeysView(MappingView[KT], AbstractSet[KT], + extra=collections_abc.KeysView): __slots__ = () - __extra__ = collections_abc.KeysView class ItemsView(MappingView[Tuple[KT, VT_co]], AbstractSet[Tuple[KT, VT_co]], - Generic[KT, VT_co]): + Generic[KT, VT_co], + extra=collections_abc.ItemsView): __slots__ = () - __extra__ = collections_abc.ItemsView -class ValuesView(MappingView[VT_co]): +class ValuesView(MappingView[VT_co], extra=collections_abc.ValuesView): __slots__ = () - __extra__ = collections_abc.ValuesView -class ContextManager(Generic[T_co]): +if hasattr(contextlib, 'AbstractContextManager'): + class ContextManager(Generic[T_co], extra=contextlib.AbstractContextManager): + __slots__ = () +else: + class ContextManager(Generic[T_co]): + __slots__ = () + + def __enter__(self): + return self + + @abc.abstractmethod + def __exit__(self, exc_type, exc_value, traceback): + return None + + @classmethod + def __subclasshook__(cls, C): + if cls is ContextManager: + # In Python 3.6+, it is possible to set a method to None to + # explicitly indicate that the class does not implement an ABC + # (https://bugs.python.org/issue25958), but we do not support + # that pattern here because this fallback class is only used + # in Python 3.5 and earlier. + if (any("__enter__" in B.__dict__ for B in C.__mro__) and + any("__exit__" in B.__dict__ for B in C.__mro__)): + return True + return NotImplemented + + +if hasattr(contextlib, 'AbstractAsyncContextManager'): + class AsyncContextManager(Generic[T_co], + extra=contextlib.AbstractAsyncContextManager): + __slots__ = () + + __all__.append('AsyncContextManager') +elif sys.version_info[:2] >= (3, 5): + exec(""" +class AsyncContextManager(Generic[T_co]): __slots__ = () - def __enter__(self): + async def __aenter__(self): return self @abc.abstractmethod - def __exit__(self, exc_type, exc_value, traceback): + async def __aexit__(self, exc_type, exc_value, traceback): return None @classmethod def __subclasshook__(cls, C): - if cls is ContextManager: - # In Python 3.6+, it is possible to set a method to None to - # explicitly indicate that the class does not implement an ABC - # (https://bugs.python.org/issue25958), but we do not support - # that pattern here because this fallback class is only used - # in Python 3.5 and earlier. - if (any("__enter__" in B.__dict__ for B in C.__mro__) and - any("__exit__" in B.__dict__ for B in C.__mro__)): + if cls is AsyncContextManager: + if sys.version_info[:2] >= (3, 6): + return _collections_abc._check_methods(C, "__aenter__", "__aexit__") + if (any("__aenter__" in B.__dict__ for B in C.__mro__) and + any("__aexit__" in B.__dict__ for B in C.__mro__)): return True return NotImplemented +__all__.append('AsyncContextManager') +""") + + +class Dict(dict, MutableMapping[KT, VT], extra=dict): -class Dict(dict, MutableMapping[KT, VT]): __slots__ = () - __extra__ = dict def __new__(cls, *args, **kwds): if cls._gorg is Dict: @@ -1898,9 +2004,10 @@ def __new__(cls, *args, **kwds): return _generic_new(dict, cls, *args, **kwds) -class DefaultDict(collections.defaultdict, MutableMapping[KT, VT]): +class DefaultDict(collections.defaultdict, MutableMapping[KT, VT], + extra=collections.defaultdict): + __slots__ = () - __extra__ = collections.defaultdict def __new__(cls, *args, **kwds): if cls._gorg is DefaultDict: @@ -1908,9 +2015,9 @@ def __new__(cls, *args, **kwds): return _generic_new(collections.defaultdict, cls, *args, **kwds) -class Counter(collections.Counter, Dict[T, int]): +class Counter(collections.Counter, Dict[T, int], extra=collections.Counter): + __slots__ = () - __extra__ = collections.Counter def __new__(cls, *args, **kwds): if cls._gorg is Counter: @@ -1918,6 +2025,21 @@ def __new__(cls, *args, **kwds): return _generic_new(collections.Counter, cls, *args, **kwds) +if hasattr(collections, 'ChainMap'): + # ChainMap only exists in 3.3+ + __all__.append('ChainMap') + + class ChainMap(collections.ChainMap, MutableMapping[KT, VT], + extra=collections.ChainMap): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is ChainMap: + return collections.ChainMap(*args, **kwds) + return _generic_new(collections.ChainMap, cls, *args, **kwds) + + # Determine what base class to use for Generator. if hasattr(collections_abc, 'Generator'): # Sufficiently recent versions of 3.5 have a Generator ABC. @@ -1927,9 +2049,9 @@ def __new__(cls, *args, **kwds): _G_base = types.GeneratorType -class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co]): +class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co], + extra=_G_base): __slots__ = () - __extra__ = _G_base def __new__(cls, *args, **kwds): if cls._gorg is Generator: @@ -1938,12 +2060,20 @@ def __new__(cls, *args, **kwds): return _generic_new(_G_base, cls, *args, **kwds) +if hasattr(collections_abc, 'AsyncGenerator'): + class AsyncGenerator(AsyncIterator[T_co], Generic[T_co, T_contra], + extra=collections_abc.AsyncGenerator): + __slots__ = () + + __all__.append('AsyncGenerator') + + # Internal type variable used for Type[]. CT_co = TypeVar('CT_co', covariant=True, bound=type) # This is not a real generic class. Don't use outside annotations. -class Type(Generic[CT_co]): +class Type(Generic[CT_co], extra=type): """A special construct usable to annotate class objects. For example, suppose we have the following classes:: @@ -1966,35 +2096,105 @@ def new_user(user_class: Type[U]) -> U: At this point the type checker knows that joe has type BasicUser. """ + __slots__ = () - __extra__ = type -def NamedTuple(typename, fields): +def _make_nmtuple(name, types): + msg = "NamedTuple('Name', [(f0, t0), (f1, t1), ...]); each t must be a type" + types = [(n, _type_check(t, msg)) for n, t in types] + nm_tpl = collections.namedtuple(name, [n for n, t in types]) + # Prior to PEP 526, only _field_types attribute was assigned. + # Now, both __annotations__ and _field_types are used to maintain compatibility. + nm_tpl.__annotations__ = nm_tpl._field_types = collections.OrderedDict(types) + try: + nm_tpl.__module__ = sys._getframe(2).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + pass + return nm_tpl + + +_PY36 = sys.version_info[:2] >= (3, 6) + +# attributes prohibited to set in NamedTuple class syntax +_prohibited = ('__new__', '__init__', '__slots__', '__getnewargs__', + '_fields', '_field_defaults', '_field_types', + '_make', '_replace', '_asdict', '_source') + +_special = ('__module__', '__name__', '__qualname__', '__annotations__') + + +class NamedTupleMeta(type): + + def __new__(cls, typename, bases, ns): + if ns.get('_root', False): + return super().__new__(cls, typename, bases, ns) + if not _PY36: + raise TypeError("Class syntax for NamedTuple is only supported" + " in Python 3.6+") + types = ns.get('__annotations__', {}) + nm_tpl = _make_nmtuple(typename, types.items()) + defaults = [] + defaults_dict = {} + for field_name in types: + if field_name in ns: + default_value = ns[field_name] + defaults.append(default_value) + defaults_dict[field_name] = default_value + elif defaults: + raise TypeError("Non-default namedtuple field {field_name} cannot " + "follow default field(s) {default_names}" + .format(field_name=field_name, + default_names=', '.join(defaults_dict.keys()))) + nm_tpl.__new__.__defaults__ = tuple(defaults) + nm_tpl._field_defaults = defaults_dict + # update from user namespace without overriding special namedtuple attributes + for key in ns: + if key in _prohibited: + raise AttributeError("Cannot overwrite NamedTuple attribute " + key) + elif key not in _special and key not in nm_tpl._fields: + setattr(nm_tpl, key, ns[key]) + return nm_tpl + + +class NamedTuple(metaclass=NamedTupleMeta): """Typed version of namedtuple. - Usage:: + Usage in Python versions >= 3.6:: - Employee = typing.NamedTuple('Employee', [('name', str), ('id', int)]) + class Employee(NamedTuple): + name: str + id: int This is equivalent to:: Employee = collections.namedtuple('Employee', ['name', 'id']) - The resulting class has one extra attribute: _field_types, - giving a dict mapping field names to types. (The field names + The resulting class has extra __annotations__ and _field_types + attributes, giving an ordered dict mapping field names to types. + __annotations__ should be preferred, while _field_types + is kept to maintain pre PEP 526 compatibility. (The field names are in the _fields attribute, which is part of the namedtuple - API.) + API.) Alternative equivalent keyword syntax is also accepted:: + + Employee = NamedTuple('Employee', name=str, id=int) + + In Python versions <= 3.5 use:: + + Employee = NamedTuple('Employee', [('name', str), ('id', int)]) """ - fields = [(n, t) for n, t in fields] - cls = collections.namedtuple(typename, [n for n, t in fields]) - cls._field_types = dict(fields) - # Set the module to the caller's module (otherwise it'd be 'typing'). - try: - cls.__module__ = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - pass - return cls + _root = True + + def __new__(self, typename, fields=None, **kwargs): + if kwargs and not _PY36: + raise TypeError("Keyword syntax for NamedTuple is only supported" + " in Python 3.6+") + if fields is None: + fields = kwargs.items() + elif kwargs: + raise TypeError("Either list of fields or keywords" + " can be provided to NamedTuple, not both") + return _make_nmtuple(typename, fields) def NewType(name, tp): @@ -2005,8 +2205,7 @@ def NewType(name, tp): UserId = NewType('UserId', int) - def name_by_id(user_id): - # type: (UserId) -> str + def name_by_id(user_id: UserId) -> str: ... UserId('user') # Fails type check @@ -2020,14 +2219,13 @@ def name_by_id(user_id): def new_type(x): return x - # Some versions of Python 2 complain because of making all strings unicode - new_type.__name__ = str(name) + new_type.__name__ = name new_type.__supertype__ = tp return new_type # Python-version-specific alias (Python 2: unicode; Python 3: str) -Text = unicode +Text = str # Constant that's True when type checking, but False here. @@ -2050,83 +2248,83 @@ class IO(Generic[AnyStr]): __slots__ = () @abstractproperty - def mode(self): + def mode(self) -> str: pass @abstractproperty - def name(self): + def name(self) -> str: pass @abstractmethod - def close(self): + def close(self) -> None: pass - @abstractproperty - def closed(self): + @abstractmethod + def closed(self) -> bool: pass @abstractmethod - def fileno(self): + def fileno(self) -> int: pass @abstractmethod - def flush(self): + def flush(self) -> None: pass @abstractmethod - def isatty(self): + def isatty(self) -> bool: pass @abstractmethod - def read(self, n=-1): + def read(self, n: int = -1) -> AnyStr: pass @abstractmethod - def readable(self): + def readable(self) -> bool: pass @abstractmethod - def readline(self, limit=-1): + def readline(self, limit: int = -1) -> AnyStr: pass @abstractmethod - def readlines(self, hint=-1): + def readlines(self, hint: int = -1) -> List[AnyStr]: pass @abstractmethod - def seek(self, offset, whence=0): + def seek(self, offset: int, whence: int = 0) -> int: pass @abstractmethod - def seekable(self): + def seekable(self) -> bool: pass @abstractmethod - def tell(self): + def tell(self) -> int: pass @abstractmethod - def truncate(self, size=None): + def truncate(self, size: int = None) -> int: pass @abstractmethod - def writable(self): + def writable(self) -> bool: pass @abstractmethod - def write(self, s): + def write(self, s: AnyStr) -> int: pass @abstractmethod - def writelines(self, lines): + def writelines(self, lines: List[AnyStr]) -> None: pass @abstractmethod - def __enter__(self): + def __enter__(self) -> 'IO[AnyStr]': pass @abstractmethod - def __exit__(self, type, value, traceback): + def __exit__(self, type, value, traceback) -> None: pass @@ -2136,45 +2334,45 @@ class BinaryIO(IO[bytes]): __slots__ = () @abstractmethod - def write(self, s): + def write(self, s: Union[bytes, bytearray]) -> int: pass @abstractmethod - def __enter__(self): + def __enter__(self) -> 'BinaryIO': pass -class TextIO(IO[unicode]): +class TextIO(IO[str]): """Typed version of the return of open() in text mode.""" __slots__ = () @abstractproperty - def buffer(self): + def buffer(self) -> BinaryIO: pass @abstractproperty - def encoding(self): + def encoding(self) -> str: pass @abstractproperty - def errors(self): + def errors(self) -> Optional[str]: pass @abstractproperty - def line_buffering(self): + def line_buffering(self) -> bool: pass @abstractproperty - def newlines(self): + def newlines(self) -> Any: pass @abstractmethod - def __enter__(self): + def __enter__(self) -> 'TextIO': pass -class io(object): +class io: """Wrapper namespace for IO generic classes.""" __all__ = ['IO', 'TextIO', 'BinaryIO'] @@ -2183,7 +2381,7 @@ class io(object): BinaryIO = BinaryIO -io.__name__ = __name__ + b'.io' +io.__name__ = __name__ + '.io' sys.modules[io.__name__] = io @@ -2193,7 +2391,7 @@ class io(object): lambda m: m.re.pattern) -class re(object): +class re: """Wrapper namespace for re type aliases.""" __all__ = ['Pattern', 'Match'] @@ -2201,5 +2399,5 @@ class re(object): Match = Match -re.__name__ = __name__ + b'.re' +re.__name__ = __name__ + '.re' sys.modules[re.__name__] = re diff --git a/pytest.ini b/pytest.ini index 80c3e3a399..eee579da2c 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,4 +1,5 @@ [pytest] -addopts = -n auto +addopts = -ra -n auto +testpaths = tests/ ; Add vendor and patched in addition to the default list of ignored dirs -norecursedirs = .* build dist CVS _darcs {arch} *.egg vendor patched +norecursedirs = .* build dist CVS _darcs {arch} *.egg vendor patched news tasks docs diff --git a/run-tests.sh b/run-tests.sh index 64d7964895..cc5252d14c 100755 --- a/run-tests.sh +++ b/run-tests.sh @@ -28,8 +28,10 @@ fi # pip uninstall -y pipenv echo "Path: $PATH" echo "Installing Pipenv…" -pip install -e "$(pwd)" --upgrade -pipenv install --deploy --dev +PIP_USER="1" python -m pip install --upgrade setuptools +PIP_USER="1" python3 -m pip install --upgrade setuptools +python -m pip install -e "$(pwd)" --upgrade && python3 -m pip install -e "$(pwd)" --upgrade +python3 -m pipenv install --deploy --dev --system # Otherwise, we're on a development machine. # First, try MacOS… @@ -47,20 +49,20 @@ else fi echo "Installing dependencies…" -PIPENV_PYTHON=2.7 pipenv run pip install -e . --upgrade -PIPENV_PYTHON=3.7 pipenv run pip install -e . --upgrade -PIPENV_PYTHON=2.7 pipenv install --dev -PIPENV_PYTHON=3.7 pipenv install --dev +PIPENV_PYTHON=2.7 python3 -m pipenv --venv && pipenv --rm && pipenv install --dev +PIPENV_PYTHON=3.7 python3 -m pipenv --venv && pipenv --rm && pipenv install --dev +PIPENV_PYTHON=2.7 python3 -m pipenv run pip install --upgrade -e . +PIPENV_PYTHON=3.7 python3 -m pipenv run pip install --upgrade -e . echo "$ pipenv run time pytest -v -n auto tests -m \"$TEST_SUITE\"" # PIPENV_PYTHON=2.7 pipenv run time pytest -v -n auto tests -m "$TEST_SUITE" | prefix 2.7 & # PIPENV_PYTHON=3.6 pipenv run time pytest -v -n auto tests -m "$TEST_SUITE" | prefix 3.6 # Better to run them sequentially. -PIPENV_PYTHON=2.7 pipenv run time pytest -PIPENV_PYTHON=3.7 pipenv run time pytest +PIPENV_PYTHON=2.7 python3 -m pipenv run time pytest +PIPENV_PYTHON=3.7 python3 -m pipenv run time pytest # test revendoring -pip3 install --upgrade invoke requests parver +pip3 install --upgrade invoke requests parver vistir python3 -m invoke vendoring.update # Cleanup junk. rm -fr .venv diff --git a/setup.py b/setup.py index 426483ddd6..ea5af031de 100644 --- a/setup.py +++ b/setup.py @@ -30,7 +30,7 @@ 'requests[security];python_version<"2.7"', 'ordereddict;python_version<"2.7"', 'enum34; python_version<"3"', - 'typing; python_version<"3"' + 'typing; python_version<"3.5"' ] diff --git a/tasks/vendoring/__init__.py b/tasks/vendoring/__init__.py index 84e8b1ad9d..9e36723bf5 100644 --- a/tasks/vendoring/__init__.py +++ b/tasks/vendoring/__init__.py @@ -2,8 +2,9 @@ """"Vendoring script, python 3.5 needed""" # Taken from pip # see https://github.com/pypa/pip/blob/95bcf8c5f6394298035a7332c441868f3b0169f4/tasks/vendoring/__init__.py -from vistir.compat import NamedTemporaryFile, TemporaryDirectory, Path -from vistir.path import mkdir_p +from pipenv._compat import NamedTemporaryFile, TemporaryDirectory +from pathlib import Path +from pipenv.utils import mkdir_p # from tempfile import TemporaryDirectory import tarfile import zipfile @@ -19,16 +20,17 @@ 'requirements-parser': 'requirements', 'backports.shutil_get_terminal_size': 'backports/shutil_get_terminal_size', 'backports.weakref': 'backports/weakref', - 'typing.py': 'backports/typing.py', 'shutil_backports': 'backports/shutil_get_terminal_size', 'python-dotenv': 'dotenv', 'pip-tools': 'piptools', 'setuptools': 'pkg_resources', 'msgpack-python': 'msgpack', 'attrs': 'attr', - 'enum34': 'backports/enum' + 'enum': 'backports/enum' } +PY2_DOWNLOAD = ['enum34',] + # from time to time, remove the no longer needed ones HARDCODED_LICENSE_URLS = { 'pytoml': 'https://github.com/avakar/pytoml/raw/master/LICENSE', @@ -68,8 +70,7 @@ LIBRARY_RENAMES = { 'pip': 'pipenv.patched.notpip', - 'enum': 'backports/enum', - 'typing.py': 'backports/typing.py' + 'enum34': 'enum', } @@ -420,6 +421,9 @@ def packages_missing_licenses(ctx, vendor_dir=None, requirements_file='vendor.tx pkg = req.strip().split("=")[0] possible_pkgs = [pkg, pkg.replace('-', '_')] match_found = False + if pkg in PY2_DOWNLOAD: + match_found = True + # print("pkg ===> %s" % pkg) if pkg in LIBRARY_DIRNAMES: possible_pkgs.append(LIBRARY_DIRNAMES[pkg]) for pkgpath in possible_pkgs: @@ -429,21 +433,22 @@ def packages_missing_licenses(ctx, vendor_dir=None, requirements_file='vendor.tx licensepath = pkgpath.joinpath(licensepath) if licensepath.exists(): match_found = True - log("%s: Trying path %s... FOUND" % (pkg, licensepath)) + # log("%s: Trying path %s... FOUND" % (pkg, licensepath)) break elif (pkgpath.exists() or pkgpath.parent.joinpath("{0}.py".format(pkgpath.stem)).exists()): for licensepath in LICENSES: licensepath = pkgpath.parent.joinpath("{0}.{1}".format(pkgpath.stem, licensepath)) if licensepath.exists(): match_found = True - log("%s: Trying path %s... FOUND" % (pkg, licensepath)) + # log("%s: Trying path %s... FOUND" % (pkg, licensepath)) break if match_found: break if match_found: continue - log("%s: No license found in %s" % (pkg, pkgpath)) - new_requirements.append(req) + else: + # log("%s: No license found in %s" % (pkg, pkgpath)) + new_requirements.append(req) return new_requirements @@ -458,10 +463,12 @@ def download_licenses(ctx, vendor_dir=None, requirements_file='vendor.txt', pack vendor_dir = _get_vendor_dir(ctx) requirements_file = vendor_dir / requirements_file requirements = packages_missing_licenses(ctx, vendor_dir, requirements_file, package=package) + with NamedTemporaryFile(prefix="pipenv", suffix="vendor-reqs", delete=False, mode="w") as fh: fh.write("\n".join(requirements)) new_requirements_file = fh.name new_requirements_file = Path(new_requirements_file) + log(requirements) requirement = "-r {0}".format(new_requirements_file.as_posix()) if package: if not only: @@ -472,8 +479,9 @@ def download_licenses(ctx, vendor_dir=None, requirements_file='vendor.txt', pack requirement = package tmp_dir = vendor_dir / '__tmp__' # TODO: Fix this whenever it gets sorted out (see https://github.com/pypa/pip/issues/5739) + ctx.run('pip install flit') # needed for the next step ctx.run( - 'pip download --no-binary :all: --only-binary requests_download --no-deps -d {0} {1}'.format( + 'pip download --no-binary :all: --only-binary requests_download --no-build-isolation --no-deps -d {0} {1}'.format( tmp_dir.as_posix(), requirement, ) diff --git a/tasks/vendoring/patches/vendor/tomlkit-typing-imports.patch b/tasks/vendoring/patches/vendor/tomlkit-typing-imports.patch index b7e29fe08e..2288b51333 100644 --- a/tasks/vendoring/patches/vendor/tomlkit-typing-imports.patch +++ b/tasks/vendoring/patches/vendor/tomlkit-typing-imports.patch @@ -1,23 +1,20 @@ diff --git a/pipenv/vendor/tomlkit/api.py b/pipenv/vendor/tomlkit/api.py -index e541c20c..d36375e8 100644 +index e541c20c..0ac26752 100644 --- a/pipenv/vendor/tomlkit/api.py +++ b/pipenv/vendor/tomlkit/api.py -@@ -1,6 +1,9 @@ +@@ -1,7 +1,5 @@ import datetime as _datetime -from typing import Tuple -+try: -+ from typing import Tuple -+except ImportError: -+ from pipenv.vendor.backports.typing import Tuple - +- from ._utils import parse_rfc3339 from .container import Container + from .items import AoT diff --git a/pipenv/vendor/tomlkit/container.py b/pipenv/vendor/tomlkit/container.py -index c1d2d7c6..c7595a52 100644 +index c1d2d7c6..a7876ff1 100644 --- a/pipenv/vendor/tomlkit/container.py +++ b/pipenv/vendor/tomlkit/container.py -@@ -1,12 +1,21 @@ +@@ -1,13 +1,5 @@ from __future__ import unicode_literals -from typing import Any @@ -27,43 +24,26 @@ index c1d2d7c6..c7595a52 100644 -from typing import Optional -from typing import Tuple -from typing import Union -+try: -+ from typing import Any -+ from typing import Dict -+ from typing import Generator -+ from typing import List -+ from typing import Optional -+ from typing import Tuple -+ from typing import Union -+except ImportError: -+ from pipenv.vendor.backports.typing import Any -+ from pipenv.vendor.backports.typing import Dict -+ from pipenv.vendor.backports.typing import Generator -+ from pipenv.vendor.backports.typing import List -+ from pipenv.vendor.backports.typing import Optional -+ from pipenv.vendor.backports.typing import Tuple -+ from pipenv.vendor.backports.typing import Union - +- from ._compat import decode from .exceptions import KeyAlreadyPresent + from .exceptions import NonExistentKey diff --git a/pipenv/vendor/tomlkit/exceptions.py b/pipenv/vendor/tomlkit/exceptions.py -index 8d48bf19..dae29f6f 100644 +index 8d48bf19..d889a924 100644 --- a/pipenv/vendor/tomlkit/exceptions.py +++ b/pipenv/vendor/tomlkit/exceptions.py -@@ -1,4 +1,7 @@ +@@ -1,6 +1,3 @@ -from typing import Optional -+try: -+ from typing import Optional -+except ImportError: -+ from pipenv.vendor.backports.typing import Optional - - +- +- class TOMLKitError(Exception): + + pass diff --git a/pipenv/vendor/tomlkit/items.py b/pipenv/vendor/tomlkit/items.py -index 747dbd50..83b17612 100644 +index 747dbd50..8807f4b3 100644 --- a/pipenv/vendor/tomlkit/items.py +++ b/pipenv/vendor/tomlkit/items.py -@@ -6,13 +6,25 @@ import string +@@ -6,14 +6,11 @@ import string from datetime import date from datetime import datetime from datetime import time @@ -74,33 +54,20 @@ index 747dbd50..83b17612 100644 -from typing import List -from typing import Optional -from typing import Union +- +import sys +if sys.version_info >= (3, 4): + from enum import Enum +else: + from pipenv.vendor.backports.enum import Enum -+try: -+ from typing import Any -+ from typing import Dict -+ from typing import Generator -+ from typing import List -+ from typing import Optional -+ from typing import Union -+except ImportError: -+ from pipenv.vendor.backports.typing import Any -+ from pipenv.vendor.backports.typing import Dict -+ from pipenv.vendor.backports.typing import Generator -+ from pipenv.vendor.backports.typing import List -+ from pipenv.vendor.backports.typing import Optional -+ from pipenv.vendor.backports.typing import Union - from ._compat import PY2 + from ._compat import decode diff --git a/pipenv/vendor/tomlkit/parser.py b/pipenv/vendor/tomlkit/parser.py -index b55a3fe4..28c0a6e0 100644 +index b55a3fe4..3d4984d1 100644 --- a/pipenv/vendor/tomlkit/parser.py +++ b/pipenv/vendor/tomlkit/parser.py -@@ -7,10 +7,16 @@ import re +@@ -7,10 +7,6 @@ import re import string from copy import copy @@ -108,34 +75,19 @@ index b55a3fe4..28c0a6e0 100644 -from typing import Optional -from typing import Tuple -from typing import Union -+try: -+ from typing import Iterator -+ from typing import Optional -+ from typing import Tuple -+ from typing import Union -+except ImportError: -+ from pipenv.vendor.backports.typing import Iterator -+ from pipenv.vendor.backports.typing import Optional -+ from pipenv.vendor.backports.typing import Tuple -+ from pipenv.vendor.backports.typing import Union from ._compat import PY2 from ._compat import chr diff --git a/pipenv/vendor/tomlkit/toml_file.py b/pipenv/vendor/tomlkit/toml_file.py -index 3b416664..3e4cc721 100644 +index 3b416664..631e9959 100644 --- a/pipenv/vendor/tomlkit/toml_file.py +++ b/pipenv/vendor/tomlkit/toml_file.py -@@ -1,7 +1,11 @@ +@@ -1,8 +1,5 @@ import io -from typing import Any -from typing import Dict -+try: -+ from typing import Any -+ from typing import Dict -+except ImportError: -+ from pipenv.vendor.backports.typing import Any -+ from pipenv.vendor.backports.typing import Dict - +- from .api import loads from .toml_document import TOMLDocument + From 8c776425fdc1217cdfd88f58f5c618234392da5e Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 27 Aug 2018 16:03:55 -0400 Subject: [PATCH 26/26] Remove typing Signed-off-by: Dan Ryan --- pipenv/vendor/typing.LICENSE | 254 ---- pipenv/vendor/typing.py | 2403 ---------------------------------- pipenv/vendor/vendor.txt | 1 - 3 files changed, 2658 deletions(-) delete mode 100644 pipenv/vendor/typing.LICENSE delete mode 100644 pipenv/vendor/typing.py diff --git a/pipenv/vendor/typing.LICENSE b/pipenv/vendor/typing.LICENSE deleted file mode 100644 index 583f9f6e61..0000000000 --- a/pipenv/vendor/typing.LICENSE +++ /dev/null @@ -1,254 +0,0 @@ -A. HISTORY OF THE SOFTWARE -========================== - -Python was created in the early 1990s by Guido van Rossum at Stichting -Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands -as a successor of a language called ABC. Guido remains Python's -principal author, although it includes many contributions from others. - -In 1995, Guido continued his work on Python at the Corporation for -National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) -in Reston, Virginia where he released several versions of the -software. - -In May 2000, Guido and the Python core development team moved to -BeOpen.com to form the BeOpen PythonLabs team. In October of the same -year, the PythonLabs team moved to Digital Creations (now Zope -Corporation, see http://www.zope.com). In 2001, the Python Software -Foundation (PSF, see http://www.python.org/psf/) was formed, a -non-profit organization created specifically to own Python-related -Intellectual Property. Zope Corporation is a sponsoring member of -the PSF. - -All Python releases are Open Source (see http://www.opensource.org for -the Open Source Definition). Historically, most, but not all, Python -releases have also been GPL-compatible; the table below summarizes -the various releases. - - Release Derived Year Owner GPL- - from compatible? (1) - - 0.9.0 thru 1.2 1991-1995 CWI yes - 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes - 1.6 1.5.2 2000 CNRI no - 2.0 1.6 2000 BeOpen.com no - 1.6.1 1.6 2001 CNRI yes (2) - 2.1 2.0+1.6.1 2001 PSF no - 2.0.1 2.0+1.6.1 2001 PSF yes - 2.1.1 2.1+2.0.1 2001 PSF yes - 2.1.2 2.1.1 2002 PSF yes - 2.1.3 2.1.2 2002 PSF yes - 2.2 and above 2.1.1 2001-now PSF yes - -Footnotes: - -(1) GPL-compatible doesn't mean that we're distributing Python under - the GPL. All Python licenses, unlike the GPL, let you distribute - a modified version without making your changes open source. The - GPL-compatible licenses make it possible to combine Python with - other software that is released under the GPL; the others don't. - -(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, - because its license has a choice of law clause. According to - CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 - is "not incompatible" with the GPL. - -Thanks to the many outside volunteers who have worked under Guido's -direction to make these releases possible. - - -B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON -=============================================================== - -PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 --------------------------------------------- - -1. This LICENSE AGREEMENT is between the Python Software Foundation -("PSF"), and the Individual or Organization ("Licensee") accessing and -otherwise using this software ("Python") in source or binary form and -its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, PSF hereby -grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, -analyze, test, perform and/or display publicly, prepare derivative works, -distribute, and otherwise use Python alone or in any derivative version, -provided, however, that PSF's License Agreement and PSF's notice of copyright, -i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are -retained in Python alone or in any derivative version prepared by Licensee. - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python. - -4. PSF is making Python available to Licensee on an "AS IS" -basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. Nothing in this License Agreement shall be deemed to create any -relationship of agency, partnership, or joint venture between PSF and -Licensee. This License Agreement does not grant permission to use PSF -trademarks or trade name in a trademark sense to endorse or promote -products or services of Licensee, or any third party. - -8. By copying, installing or otherwise using Python, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. - - -BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 -------------------------------------------- - -BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 - -1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an -office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the -Individual or Organization ("Licensee") accessing and otherwise using -this software in source or binary form and its associated -documentation ("the Software"). - -2. Subject to the terms and conditions of this BeOpen Python License -Agreement, BeOpen hereby grants Licensee a non-exclusive, -royalty-free, world-wide license to reproduce, analyze, test, perform -and/or display publicly, prepare derivative works, distribute, and -otherwise use the Software alone or in any derivative version, -provided, however, that the BeOpen Python License is retained in the -Software, alone or in any derivative version prepared by Licensee. - -3. BeOpen is making the Software available to Licensee on an "AS IS" -basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE -SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS -AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY -DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -5. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -6. This License Agreement shall be governed by and interpreted in all -respects by the law of the State of California, excluding conflict of -law provisions. Nothing in this License Agreement shall be deemed to -create any relationship of agency, partnership, or joint venture -between BeOpen and Licensee. This License Agreement does not grant -permission to use BeOpen trademarks or trade names in a trademark -sense to endorse or promote products or services of Licensee, or any -third party. As an exception, the "BeOpen Python" logos available at -http://www.pythonlabs.com/logos.html may be used according to the -permissions granted on that web page. - -7. By copying, installing or otherwise using the software, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. - - -CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 ---------------------------------------- - -1. This LICENSE AGREEMENT is between the Corporation for National -Research Initiatives, having an office at 1895 Preston White Drive, -Reston, VA 20191 ("CNRI"), and the Individual or Organization -("Licensee") accessing and otherwise using Python 1.6.1 software in -source or binary form and its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, CNRI -hereby grants Licensee a nonexclusive, royalty-free, world-wide -license to reproduce, analyze, test, perform and/or display publicly, -prepare derivative works, distribute, and otherwise use Python 1.6.1 -alone or in any derivative version, provided, however, that CNRI's -License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) -1995-2001 Corporation for National Research Initiatives; All Rights -Reserved" are retained in Python 1.6.1 alone or in any derivative -version prepared by Licensee. Alternately, in lieu of CNRI's License -Agreement, Licensee may substitute the following text (omitting the -quotes): "Python 1.6.1 is made available subject to the terms and -conditions in CNRI's License Agreement. This Agreement together with -Python 1.6.1 may be located on the Internet using the following -unique, persistent identifier (known as a handle): 1895.22/1013. This -Agreement may also be obtained from a proxy server on the Internet -using the following URL: http://hdl.handle.net/1895.22/1013". - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python 1.6.1 or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python 1.6.1. - -4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" -basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. This License Agreement shall be governed by the federal -intellectual property law of the United States, including without -limitation the federal copyright law, and, to the extent such -U.S. federal law does not apply, by the law of the Commonwealth of -Virginia, excluding Virginia's conflict of law provisions. -Notwithstanding the foregoing, with regard to derivative works based -on Python 1.6.1 that incorporate non-separable material that was -previously distributed under the GNU General Public License (GPL), the -law of the Commonwealth of Virginia shall govern this License -Agreement only as to issues arising under or with respect to -Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this -License Agreement shall be deemed to create any relationship of -agency, partnership, or joint venture between CNRI and Licensee. This -License Agreement does not grant permission to use CNRI trademarks or -trade name in a trademark sense to endorse or promote products or -services of Licensee, or any third party. - -8. By clicking on the "ACCEPT" button where indicated, or by copying, -installing or otherwise using Python 1.6.1, Licensee agrees to be -bound by the terms and conditions of this License Agreement. - - ACCEPT - - -CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 --------------------------------------------------- - -Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, -The Netherlands. All rights reserved. - -Permission to use, copy, modify, and distribute this software and its -documentation for any purpose and without fee is hereby granted, -provided that the above copyright notice appear in all copies and that -both that copyright notice and this permission notice appear in -supporting documentation, and that the name of Stichting Mathematisch -Centrum or CWI not be used in advertising or publicity pertaining to -distribution of the software without specific, written prior -permission. - -STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO -THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE -FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/pipenv/vendor/typing.py b/pipenv/vendor/typing.py deleted file mode 100644 index b5564cc29a..0000000000 --- a/pipenv/vendor/typing.py +++ /dev/null @@ -1,2403 +0,0 @@ -import abc -from abc import abstractmethod, abstractproperty -import collections -import contextlib -import functools -import re as stdlib_re # Avoid confusion with the re we export. -import sys -import types -try: - import collections.abc as collections_abc -except ImportError: - import collections as collections_abc # Fallback for PY3.2. -if sys.version_info[:2] >= (3, 6): - import _collections_abc # Needed for private function _check_methods # noqa -try: - from types import WrapperDescriptorType, MethodWrapperType, MethodDescriptorType -except ImportError: - WrapperDescriptorType = type(object.__init__) - MethodWrapperType = type(object().__str__) - MethodDescriptorType = type(str.join) - - -# Please keep __all__ alphabetized within each category. -__all__ = [ - # Super-special typing primitives. - 'Any', - 'Callable', - 'ClassVar', - 'Generic', - 'Optional', - 'Tuple', - 'Type', - 'TypeVar', - 'Union', - - # ABCs (from collections.abc). - 'AbstractSet', # collections.abc.Set. - 'GenericMeta', # subclass of abc.ABCMeta and a metaclass - # for 'Generic' and ABCs below. - 'ByteString', - 'Container', - 'ContextManager', - 'Hashable', - 'ItemsView', - 'Iterable', - 'Iterator', - 'KeysView', - 'Mapping', - 'MappingView', - 'MutableMapping', - 'MutableSequence', - 'MutableSet', - 'Sequence', - 'Sized', - 'ValuesView', - # The following are added depending on presence - # of their non-generic counterparts in stdlib: - # Awaitable, - # AsyncIterator, - # AsyncIterable, - # Coroutine, - # Collection, - # AsyncGenerator, - # AsyncContextManager - - # Structural checks, a.k.a. protocols. - 'Reversible', - 'SupportsAbs', - 'SupportsBytes', - 'SupportsComplex', - 'SupportsFloat', - 'SupportsInt', - 'SupportsRound', - - # Concrete collection types. - 'Counter', - 'Deque', - 'Dict', - 'DefaultDict', - 'List', - 'Set', - 'FrozenSet', - 'NamedTuple', # Not really a type. - 'Generator', - - # One-off things. - 'AnyStr', - 'cast', - 'get_type_hints', - 'NewType', - 'no_type_check', - 'no_type_check_decorator', - 'overload', - 'Text', - 'TYPE_CHECKING', -] - -# The pseudo-submodules 're' and 'io' are part of the public -# namespace, but excluded from __all__ because they might stomp on -# legitimate imports of those modules. - - -def _qualname(x): - if sys.version_info[:2] >= (3, 3): - return x.__qualname__ - else: - # Fall back to just name. - return x.__name__ - - -def _trim_name(nm): - whitelist = ('_TypeAlias', '_ForwardRef', '_TypingBase', '_FinalTypingBase') - if nm.startswith('_') and nm not in whitelist: - nm = nm[1:] - return nm - - -class TypingMeta(type): - """Metaclass for most types defined in typing module - (not a part of public API). - - This overrides __new__() to require an extra keyword parameter - '_root', which serves as a guard against naive subclassing of the - typing classes. Any legitimate class defined using a metaclass - derived from TypingMeta must pass _root=True. - - This also defines a dummy constructor (all the work for most typing - constructs is done in __new__) and a nicer repr(). - """ - - _is_protocol = False - - def __new__(cls, name, bases, namespace, *, _root=False): - if not _root: - raise TypeError("Cannot subclass %s" % - (', '.join(map(_type_repr, bases)) or '()')) - return super().__new__(cls, name, bases, namespace) - - def __init__(self, *args, **kwds): - pass - - def _eval_type(self, globalns, localns): - """Override this in subclasses to interpret forward references. - - For example, List['C'] is internally stored as - List[_ForwardRef('C')], which should evaluate to List[C], - where C is an object found in globalns or localns (searching - localns first, of course). - """ - return self - - def _get_type_vars(self, tvars): - pass - - def __repr__(self): - qname = _trim_name(_qualname(self)) - return '%s.%s' % (self.__module__, qname) - - -class _TypingBase(metaclass=TypingMeta, _root=True): - """Internal indicator of special typing constructs.""" - - __slots__ = ('__weakref__',) - - def __init__(self, *args, **kwds): - pass - - def __new__(cls, *args, **kwds): - """Constructor. - - This only exists to give a better error message in case - someone tries to subclass a special typing object (not a good idea). - """ - if (len(args) == 3 and - isinstance(args[0], str) and - isinstance(args[1], tuple)): - # Close enough. - raise TypeError("Cannot subclass %r" % cls) - return super().__new__(cls) - - # Things that are not classes also need these. - def _eval_type(self, globalns, localns): - return self - - def _get_type_vars(self, tvars): - pass - - def __repr__(self): - cls = type(self) - qname = _trim_name(_qualname(cls)) - return '%s.%s' % (cls.__module__, qname) - - def __call__(self, *args, **kwds): - raise TypeError("Cannot instantiate %r" % type(self)) - - -class _FinalTypingBase(_TypingBase, _root=True): - """Internal mix-in class to prevent instantiation. - - Prevents instantiation unless _root=True is given in class call. - It is used to create pseudo-singleton instances Any, Union, Optional, etc. - """ - - __slots__ = () - - def __new__(cls, *args, _root=False, **kwds): - self = super().__new__(cls, *args, **kwds) - if _root is True: - return self - raise TypeError("Cannot instantiate %r" % cls) - - def __reduce__(self): - return _trim_name(type(self).__name__) - - -class _ForwardRef(_TypingBase, _root=True): - """Internal wrapper to hold a forward reference.""" - - __slots__ = ('__forward_arg__', '__forward_code__', - '__forward_evaluated__', '__forward_value__') - - def __init__(self, arg): - super().__init__(arg) - if not isinstance(arg, str): - raise TypeError('Forward reference must be a string -- got %r' % (arg,)) - try: - code = compile(arg, '', 'eval') - except SyntaxError: - raise SyntaxError('Forward reference must be an expression -- got %r' % - (arg,)) - self.__forward_arg__ = arg - self.__forward_code__ = code - self.__forward_evaluated__ = False - self.__forward_value__ = None - - def _eval_type(self, globalns, localns): - if not self.__forward_evaluated__ or localns is not globalns: - if globalns is None and localns is None: - globalns = localns = {} - elif globalns is None: - globalns = localns - elif localns is None: - localns = globalns - self.__forward_value__ = _type_check( - eval(self.__forward_code__, globalns, localns), - "Forward references must evaluate to types.") - self.__forward_evaluated__ = True - return self.__forward_value__ - - def __eq__(self, other): - if not isinstance(other, _ForwardRef): - return NotImplemented - return (self.__forward_arg__ == other.__forward_arg__ and - self.__forward_value__ == other.__forward_value__) - - def __hash__(self): - return hash((self.__forward_arg__, self.__forward_value__)) - - def __instancecheck__(self, obj): - raise TypeError("Forward references cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Forward references cannot be used with issubclass().") - - def __repr__(self): - return '_ForwardRef(%r)' % (self.__forward_arg__,) - - -class _TypeAlias(_TypingBase, _root=True): - """Internal helper class for defining generic variants of concrete types. - - Note that this is not a type; let's call it a pseudo-type. It cannot - be used in instance and subclass checks in parameterized form, i.e. - ``isinstance(42, Match[str])`` raises ``TypeError`` instead of returning - ``False``. - """ - - __slots__ = ('name', 'type_var', 'impl_type', 'type_checker') - - def __init__(self, name, type_var, impl_type, type_checker): - """Initializer. - - Args: - name: The name, e.g. 'Pattern'. - type_var: The type parameter, e.g. AnyStr, or the - specific type, e.g. str. - impl_type: The implementation type. - type_checker: Function that takes an impl_type instance. - and returns a value that should be a type_var instance. - """ - assert isinstance(name, str), repr(name) - assert isinstance(impl_type, type), repr(impl_type) - assert not isinstance(impl_type, TypingMeta), repr(impl_type) - assert isinstance(type_var, (type, _TypingBase)), repr(type_var) - self.name = name - self.type_var = type_var - self.impl_type = impl_type - self.type_checker = type_checker - - def __repr__(self): - return "%s[%s]" % (self.name, _type_repr(self.type_var)) - - def __getitem__(self, parameter): - if not isinstance(self.type_var, TypeVar): - raise TypeError("%s cannot be further parameterized." % self) - if self.type_var.__constraints__ and isinstance(parameter, type): - if not issubclass(parameter, self.type_var.__constraints__): - raise TypeError("%s is not a valid substitution for %s." % - (parameter, self.type_var)) - if isinstance(parameter, TypeVar) and parameter is not self.type_var: - raise TypeError("%s cannot be re-parameterized." % self) - return self.__class__(self.name, parameter, - self.impl_type, self.type_checker) - - def __eq__(self, other): - if not isinstance(other, _TypeAlias): - return NotImplemented - return self.name == other.name and self.type_var == other.type_var - - def __hash__(self): - return hash((self.name, self.type_var)) - - def __instancecheck__(self, obj): - if not isinstance(self.type_var, TypeVar): - raise TypeError("Parameterized type aliases cannot be used " - "with isinstance().") - return isinstance(obj, self.impl_type) - - def __subclasscheck__(self, cls): - if not isinstance(self.type_var, TypeVar): - raise TypeError("Parameterized type aliases cannot be used " - "with issubclass().") - return issubclass(cls, self.impl_type) - - -def _get_type_vars(types, tvars): - for t in types: - if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): - t._get_type_vars(tvars) - - -def _type_vars(types): - tvars = [] - _get_type_vars(types, tvars) - return tuple(tvars) - - -def _eval_type(t, globalns, localns): - if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): - return t._eval_type(globalns, localns) - return t - - -def _type_check(arg, msg): - """Check that the argument is a type, and return it (internal helper). - - As a special case, accept None and return type(None) instead. - Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable. - - The msg argument is a human-readable error message, e.g. - - "Union[arg, ...]: arg should be a type." - - We append the repr() of the actual value (truncated to 100 chars). - """ - if arg is None: - return type(None) - if isinstance(arg, str): - arg = _ForwardRef(arg) - if ( - isinstance(arg, _TypingBase) and type(arg).__name__ == '_ClassVar' or - not isinstance(arg, (type, _TypingBase)) and not callable(arg) - ): - raise TypeError(msg + " Got %.100r." % (arg,)) - # Bare Union etc. are not valid as type arguments - if ( - type(arg).__name__ in ('_Union', '_Optional') and - not getattr(arg, '__origin__', None) or - isinstance(arg, TypingMeta) and arg._gorg in (Generic, _Protocol) - ): - raise TypeError("Plain %s is not valid as type argument" % arg) - return arg - - -def _type_repr(obj): - """Return the repr() of an object, special-casing types (internal helper). - - If obj is a type, we return a shorter version than the default - type.__repr__, based on the module and qualified name, which is - typically enough to uniquely identify a type. For everything - else, we fall back on repr(obj). - """ - if isinstance(obj, type) and not isinstance(obj, TypingMeta): - if obj.__module__ == 'builtins': - return _qualname(obj) - return '%s.%s' % (obj.__module__, _qualname(obj)) - if obj is ...: - return('...') - if isinstance(obj, types.FunctionType): - return obj.__name__ - return repr(obj) - - -class _Any(_FinalTypingBase, _root=True): - """Special type indicating an unconstrained type. - - - Any is compatible with every type. - - Any assumed to have all methods. - - All values assumed to be instances of Any. - - Note that all the above statements are true from the point of view of - static type checkers. At runtime, Any should not be used with instance - or class checks. - """ - - __slots__ = () - - def __instancecheck__(self, obj): - raise TypeError("Any cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Any cannot be used with issubclass().") - - -Any = _Any(_root=True) - - -class _NoReturn(_FinalTypingBase, _root=True): - """Special type indicating functions that never return. - Example:: - - from typing import NoReturn - - def stop() -> NoReturn: - raise Exception('no way') - - This type is invalid in other positions, e.g., ``List[NoReturn]`` - will fail in static type checkers. - """ - - __slots__ = () - - def __instancecheck__(self, obj): - raise TypeError("NoReturn cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("NoReturn cannot be used with issubclass().") - - -NoReturn = _NoReturn(_root=True) - - -class TypeVar(_TypingBase, _root=True): - """Type variable. - - Usage:: - - T = TypeVar('T') # Can be anything - A = TypeVar('A', str, bytes) # Must be str or bytes - - Type variables exist primarily for the benefit of static type - checkers. They serve as the parameters for generic types as well - as for generic function definitions. See class Generic for more - information on generic types. Generic functions work as follows: - - def repeat(x: T, n: int) -> List[T]: - '''Return a list containing n references to x.''' - return [x]*n - - def longest(x: A, y: A) -> A: - '''Return the longest of two strings.''' - return x if len(x) >= len(y) else y - - The latter example's signature is essentially the overloading - of (str, str) -> str and (bytes, bytes) -> bytes. Also note - that if the arguments are instances of some subclass of str, - the return type is still plain str. - - At runtime, isinstance(x, T) and issubclass(C, T) will raise TypeError. - - Type variables defined with covariant=True or contravariant=True - can be used do declare covariant or contravariant generic types. - See PEP 484 for more details. By default generic types are invariant - in all type variables. - - Type variables can be introspected. e.g.: - - T.__name__ == 'T' - T.__constraints__ == () - T.__covariant__ == False - T.__contravariant__ = False - A.__constraints__ == (str, bytes) - """ - - __slots__ = ('__name__', '__bound__', '__constraints__', - '__covariant__', '__contravariant__') - - def __init__(self, name, *constraints, bound=None, - covariant=False, contravariant=False): - super().__init__(name, *constraints, bound=bound, - covariant=covariant, contravariant=contravariant) - self.__name__ = name - if covariant and contravariant: - raise ValueError("Bivariant types are not supported.") - self.__covariant__ = bool(covariant) - self.__contravariant__ = bool(contravariant) - if constraints and bound is not None: - raise TypeError("Constraints cannot be combined with bound=...") - if constraints and len(constraints) == 1: - raise TypeError("A single constraint is not allowed") - msg = "TypeVar(name, constraint, ...): constraints must be types." - self.__constraints__ = tuple(_type_check(t, msg) for t in constraints) - if bound: - self.__bound__ = _type_check(bound, "Bound must be a type.") - else: - self.__bound__ = None - - def _get_type_vars(self, tvars): - if self not in tvars: - tvars.append(self) - - def __repr__(self): - if self.__covariant__: - prefix = '+' - elif self.__contravariant__: - prefix = '-' - else: - prefix = '~' - return prefix + self.__name__ - - def __instancecheck__(self, instance): - raise TypeError("Type variables cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Type variables cannot be used with issubclass().") - - -# Some unconstrained type variables. These are used by the container types. -# (These are not for export.) -T = TypeVar('T') # Any type. -KT = TypeVar('KT') # Key type. -VT = TypeVar('VT') # Value type. -T_co = TypeVar('T_co', covariant=True) # Any type covariant containers. -V_co = TypeVar('V_co', covariant=True) # Any type covariant containers. -VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers. -T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant. - -# A useful type variable with constraints. This represents string types. -# (This one *is* for export!) -AnyStr = TypeVar('AnyStr', bytes, str) - - -def _replace_arg(arg, tvars, args): - """An internal helper function: replace arg if it is a type variable - found in tvars with corresponding substitution from args or - with corresponding substitution sub-tree if arg is a generic type. - """ - - if tvars is None: - tvars = [] - if hasattr(arg, '_subs_tree') and isinstance(arg, (GenericMeta, _TypingBase)): - return arg._subs_tree(tvars, args) - if isinstance(arg, TypeVar): - for i, tvar in enumerate(tvars): - if arg == tvar: - return args[i] - return arg - - -# Special typing constructs Union, Optional, Generic, Callable and Tuple -# use three special attributes for internal bookkeeping of generic types: -# * __parameters__ is a tuple of unique free type parameters of a generic -# type, for example, Dict[T, T].__parameters__ == (T,); -# * __origin__ keeps a reference to a type that was subscripted, -# e.g., Union[T, int].__origin__ == Union; -# * __args__ is a tuple of all arguments used in subscripting, -# e.g., Dict[T, int].__args__ == (T, int). - - -def _subs_tree(cls, tvars=None, args=None): - """An internal helper function: calculate substitution tree - for generic cls after replacing its type parameters with - substitutions in tvars -> args (if any). - Repeat the same following __origin__'s. - - Return a list of arguments with all possible substitutions - performed. Arguments that are generic classes themselves are represented - as tuples (so that no new classes are created by this function). - For example: _subs_tree(List[Tuple[int, T]][str]) == [(Tuple, int, str)] - """ - - if cls.__origin__ is None: - return cls - # Make of chain of origins (i.e. cls -> cls.__origin__) - current = cls.__origin__ - orig_chain = [] - while current.__origin__ is not None: - orig_chain.append(current) - current = current.__origin__ - # Replace type variables in __args__ if asked ... - tree_args = [] - for arg in cls.__args__: - tree_args.append(_replace_arg(arg, tvars, args)) - # ... then continue replacing down the origin chain. - for ocls in orig_chain: - new_tree_args = [] - for arg in ocls.__args__: - new_tree_args.append(_replace_arg(arg, ocls.__parameters__, tree_args)) - tree_args = new_tree_args - return tree_args - - -def _remove_dups_flatten(parameters): - """An internal helper for Union creation and substitution: flatten Union's - among parameters, then remove duplicates and strict subclasses. - """ - - # Flatten out Union[Union[...], ...]. - params = [] - for p in parameters: - if isinstance(p, _Union) and p.__origin__ is Union: - params.extend(p.__args__) - elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union: - params.extend(p[1:]) - else: - params.append(p) - # Weed out strict duplicates, preserving the first of each occurrence. - all_params = set(params) - if len(all_params) < len(params): - new_params = [] - for t in params: - if t in all_params: - new_params.append(t) - all_params.remove(t) - params = new_params - assert not all_params, all_params - # Weed out subclasses. - # E.g. Union[int, Employee, Manager] == Union[int, Employee]. - # If object is present it will be sole survivor among proper classes. - # Never discard type variables. - # (In particular, Union[str, AnyStr] != AnyStr.) - all_params = set(params) - for t1 in params: - if not isinstance(t1, type): - continue - if any(isinstance(t2, type) and issubclass(t1, t2) - for t2 in all_params - {t1} - if not (isinstance(t2, GenericMeta) and - t2.__origin__ is not None)): - all_params.remove(t1) - return tuple(t for t in params if t in all_params) - - -def _check_generic(cls, parameters): - # Check correct count for parameters of a generic cls (internal helper). - if not cls.__parameters__: - raise TypeError("%s is not a generic class" % repr(cls)) - alen = len(parameters) - elen = len(cls.__parameters__) - if alen != elen: - raise TypeError("Too %s parameters for %s; actual %s, expected %s" % - ("many" if alen > elen else "few", repr(cls), alen, elen)) - - -_cleanups = [] - - -def _tp_cache(func): - """Internal wrapper caching __getitem__ of generic types with a fallback to - original function for non-hashable arguments. - """ - - cached = functools.lru_cache()(func) - _cleanups.append(cached.cache_clear) - - @functools.wraps(func) - def inner(*args, **kwds): - try: - return cached(*args, **kwds) - except TypeError: - pass # All real errors (not unhashable args) are raised below. - return func(*args, **kwds) - return inner - - -class _Union(_FinalTypingBase, _root=True): - """Union type; Union[X, Y] means either X or Y. - - To define a union, use e.g. Union[int, str]. Details: - - - The arguments must be types and there must be at least one. - - - None as an argument is a special case and is replaced by - type(None). - - - Unions of unions are flattened, e.g.:: - - Union[Union[int, str], float] == Union[int, str, float] - - - Unions of a single argument vanish, e.g.:: - - Union[int] == int # The constructor actually returns int - - - Redundant arguments are skipped, e.g.:: - - Union[int, str, int] == Union[int, str] - - - When comparing unions, the argument order is ignored, e.g.:: - - Union[int, str] == Union[str, int] - - - When two arguments have a subclass relationship, the least - derived argument is kept, e.g.:: - - class Employee: pass - class Manager(Employee): pass - Union[int, Employee, Manager] == Union[int, Employee] - Union[Manager, int, Employee] == Union[int, Employee] - Union[Employee, Manager] == Employee - - - Similar for object:: - - Union[int, object] == object - - - You cannot subclass or instantiate a union. - - - You can use Optional[X] as a shorthand for Union[X, None]. - """ - - __slots__ = ('__parameters__', '__args__', '__origin__', '__tree_hash__') - - def __new__(cls, parameters=None, origin=None, *args, _root=False): - self = super().__new__(cls, parameters, origin, *args, _root=_root) - if origin is None: - self.__parameters__ = None - self.__args__ = None - self.__origin__ = None - self.__tree_hash__ = hash(frozenset(('Union',))) - return self - if not isinstance(parameters, tuple): - raise TypeError("Expected parameters=") - if origin is Union: - parameters = _remove_dups_flatten(parameters) - # It's not a union if there's only one type left. - if len(parameters) == 1: - return parameters[0] - self.__parameters__ = _type_vars(parameters) - self.__args__ = parameters - self.__origin__ = origin - # Pre-calculate the __hash__ on instantiation. - # This improves speed for complex substitutions. - subs_tree = self._subs_tree() - if isinstance(subs_tree, tuple): - self.__tree_hash__ = hash(frozenset(subs_tree)) - else: - self.__tree_hash__ = hash(subs_tree) - return self - - def _eval_type(self, globalns, localns): - if self.__args__ is None: - return self - ev_args = tuple(_eval_type(t, globalns, localns) for t in self.__args__) - ev_origin = _eval_type(self.__origin__, globalns, localns) - if ev_args == self.__args__ and ev_origin == self.__origin__: - # Everything is already evaluated. - return self - return self.__class__(ev_args, ev_origin, _root=True) - - def _get_type_vars(self, tvars): - if self.__origin__ and self.__parameters__: - _get_type_vars(self.__parameters__, tvars) - - def __repr__(self): - if self.__origin__ is None: - return super().__repr__() - tree = self._subs_tree() - if not isinstance(tree, tuple): - return repr(tree) - return tree[0]._tree_repr(tree) - - def _tree_repr(self, tree): - arg_list = [] - for arg in tree[1:]: - if not isinstance(arg, tuple): - arg_list.append(_type_repr(arg)) - else: - arg_list.append(arg[0]._tree_repr(arg)) - return super().__repr__() + '[%s]' % ', '.join(arg_list) - - @_tp_cache - def __getitem__(self, parameters): - if parameters == (): - raise TypeError("Cannot take a Union of no types.") - if not isinstance(parameters, tuple): - parameters = (parameters,) - if self.__origin__ is None: - msg = "Union[arg, ...]: each arg must be a type." - else: - msg = "Parameters to generic types must be types." - parameters = tuple(_type_check(p, msg) for p in parameters) - if self is not Union: - _check_generic(self, parameters) - return self.__class__(parameters, origin=self, _root=True) - - def _subs_tree(self, tvars=None, args=None): - if self is Union: - return Union # Nothing to substitute - tree_args = _subs_tree(self, tvars, args) - tree_args = _remove_dups_flatten(tree_args) - if len(tree_args) == 1: - return tree_args[0] # Union of a single type is that type - return (Union,) + tree_args - - def __eq__(self, other): - if isinstance(other, _Union): - return self.__tree_hash__ == other.__tree_hash__ - elif self is not Union: - return self._subs_tree() == other - else: - return self is other - - def __hash__(self): - return self.__tree_hash__ - - def __instancecheck__(self, obj): - raise TypeError("Unions cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Unions cannot be used with issubclass().") - - -Union = _Union(_root=True) - - -class _Optional(_FinalTypingBase, _root=True): - """Optional type. - - Optional[X] is equivalent to Union[X, None]. - """ - - __slots__ = () - - @_tp_cache - def __getitem__(self, arg): - arg = _type_check(arg, "Optional[t] requires a single type.") - return Union[arg, type(None)] - - -Optional = _Optional(_root=True) - - -def _next_in_mro(cls): - """Helper for Generic.__new__. - - Returns the class after the last occurrence of Generic or - Generic[...] in cls.__mro__. - """ - next_in_mro = object - # Look for the last occurrence of Generic or Generic[...]. - for i, c in enumerate(cls.__mro__[:-1]): - if isinstance(c, GenericMeta) and c._gorg is Generic: - next_in_mro = cls.__mro__[i + 1] - return next_in_mro - - -def _make_subclasshook(cls): - """Construct a __subclasshook__ callable that incorporates - the associated __extra__ class in subclass checks performed - against cls. - """ - if isinstance(cls.__extra__, abc.ABCMeta): - # The logic mirrors that of ABCMeta.__subclasscheck__. - # Registered classes need not be checked here because - # cls and its extra share the same _abc_registry. - def __extrahook__(subclass): - res = cls.__extra__.__subclasshook__(subclass) - if res is not NotImplemented: - return res - if cls.__extra__ in subclass.__mro__: - return True - for scls in cls.__extra__.__subclasses__(): - if isinstance(scls, GenericMeta): - continue - if issubclass(subclass, scls): - return True - return NotImplemented - else: - # For non-ABC extras we'll just call issubclass(). - def __extrahook__(subclass): - if cls.__extra__ and issubclass(subclass, cls.__extra__): - return True - return NotImplemented - return __extrahook__ - - -def _no_slots_copy(dct): - """Internal helper: copy class __dict__ and clean slots class variables. - (They will be re-created if necessary by normal class machinery.) - """ - dict_copy = dict(dct) - if '__slots__' in dict_copy: - for slot in dict_copy['__slots__']: - dict_copy.pop(slot, None) - return dict_copy - - -class GenericMeta(TypingMeta, abc.ABCMeta): - """Metaclass for generic types. - - This is a metaclass for typing.Generic and generic ABCs defined in - typing module. User defined subclasses of GenericMeta can override - __new__ and invoke super().__new__. Note that GenericMeta.__new__ - has strict rules on what is allowed in its bases argument: - * plain Generic is disallowed in bases; - * Generic[...] should appear in bases at most once; - * if Generic[...] is present, then it should list all type variables - that appear in other bases. - In addition, type of all generic bases is erased, e.g., C[int] is - stripped to plain C. - """ - - def __new__(cls, name, bases, namespace, - tvars=None, args=None, origin=None, extra=None, orig_bases=None): - """Create a new generic class. GenericMeta.__new__ accepts - keyword arguments that are used for internal bookkeeping, therefore - an override should pass unused keyword arguments to super(). - """ - if tvars is not None: - # Called from __getitem__() below. - assert origin is not None - assert all(isinstance(t, TypeVar) for t in tvars), tvars - else: - # Called from class statement. - assert tvars is None, tvars - assert args is None, args - assert origin is None, origin - - # Get the full set of tvars from the bases. - tvars = _type_vars(bases) - # Look for Generic[T1, ..., Tn]. - # If found, tvars must be a subset of it. - # If not found, tvars is it. - # Also check for and reject plain Generic, - # and reject multiple Generic[...]. - gvars = None - for base in bases: - if base is Generic: - raise TypeError("Cannot inherit from plain Generic") - if (isinstance(base, GenericMeta) and - base.__origin__ is Generic): - if gvars is not None: - raise TypeError( - "Cannot inherit from Generic[...] multiple types.") - gvars = base.__parameters__ - if gvars is None: - gvars = tvars - else: - tvarset = set(tvars) - gvarset = set(gvars) - if not tvarset <= gvarset: - raise TypeError( - "Some type variables (%s) " - "are not listed in Generic[%s]" % - (", ".join(str(t) for t in tvars if t not in gvarset), - ", ".join(str(g) for g in gvars))) - tvars = gvars - - initial_bases = bases - if extra is not None and type(extra) is abc.ABCMeta and extra not in bases: - bases = (extra,) + bases - bases = tuple(b._gorg if isinstance(b, GenericMeta) else b for b in bases) - - # remove bare Generic from bases if there are other generic bases - if any(isinstance(b, GenericMeta) and b is not Generic for b in bases): - bases = tuple(b for b in bases if b is not Generic) - namespace.update({'__origin__': origin, '__extra__': extra, - '_gorg': None if not origin else origin._gorg}) - self = super().__new__(cls, name, bases, namespace, _root=True) - super(GenericMeta, self).__setattr__('_gorg', - self if not origin else origin._gorg) - self.__parameters__ = tvars - # Be prepared that GenericMeta will be subclassed by TupleMeta - # and CallableMeta, those two allow ..., (), or [] in __args___. - self.__args__ = tuple(... if a is _TypingEllipsis else - () if a is _TypingEmpty else - a for a in args) if args else None - # Speed hack (https://github.com/python/typing/issues/196). - self.__next_in_mro__ = _next_in_mro(self) - # Preserve base classes on subclassing (__bases__ are type erased now). - if orig_bases is None: - self.__orig_bases__ = initial_bases - - # This allows unparameterized generic collections to be used - # with issubclass() and isinstance() in the same way as their - # collections.abc counterparts (e.g., isinstance([], Iterable)). - if ( - '__subclasshook__' not in namespace and extra or - # allow overriding - getattr(self.__subclasshook__, '__name__', '') == '__extrahook__' - ): - self.__subclasshook__ = _make_subclasshook(self) - if isinstance(extra, abc.ABCMeta): - self._abc_registry = extra._abc_registry - self._abc_cache = extra._abc_cache - elif origin is not None: - self._abc_registry = origin._abc_registry - self._abc_cache = origin._abc_cache - - if origin and hasattr(origin, '__qualname__'): # Fix for Python 3.2. - self.__qualname__ = origin.__qualname__ - self.__tree_hash__ = (hash(self._subs_tree()) if origin else - super(GenericMeta, self).__hash__()) - return self - - # _abc_negative_cache and _abc_negative_cache_version - # realised as descriptors, since GenClass[t1, t2, ...] always - # share subclass info with GenClass. - # This is an important memory optimization. - @property - def _abc_negative_cache(self): - if isinstance(self.__extra__, abc.ABCMeta): - return self.__extra__._abc_negative_cache - return self._gorg._abc_generic_negative_cache - - @_abc_negative_cache.setter - def _abc_negative_cache(self, value): - if self.__origin__ is None: - if isinstance(self.__extra__, abc.ABCMeta): - self.__extra__._abc_negative_cache = value - else: - self._abc_generic_negative_cache = value - - @property - def _abc_negative_cache_version(self): - if isinstance(self.__extra__, abc.ABCMeta): - return self.__extra__._abc_negative_cache_version - return self._gorg._abc_generic_negative_cache_version - - @_abc_negative_cache_version.setter - def _abc_negative_cache_version(self, value): - if self.__origin__ is None: - if isinstance(self.__extra__, abc.ABCMeta): - self.__extra__._abc_negative_cache_version = value - else: - self._abc_generic_negative_cache_version = value - - def _get_type_vars(self, tvars): - if self.__origin__ and self.__parameters__: - _get_type_vars(self.__parameters__, tvars) - - def _eval_type(self, globalns, localns): - ev_origin = (self.__origin__._eval_type(globalns, localns) - if self.__origin__ else None) - ev_args = tuple(_eval_type(a, globalns, localns) for a - in self.__args__) if self.__args__ else None - if ev_origin == self.__origin__ and ev_args == self.__args__: - return self - return self.__class__(self.__name__, - self.__bases__, - _no_slots_copy(self.__dict__), - tvars=_type_vars(ev_args) if ev_args else None, - args=ev_args, - origin=ev_origin, - extra=self.__extra__, - orig_bases=self.__orig_bases__) - - def __repr__(self): - if self.__origin__ is None: - return super().__repr__() - return self._tree_repr(self._subs_tree()) - - def _tree_repr(self, tree): - arg_list = [] - for arg in tree[1:]: - if arg == (): - arg_list.append('()') - elif not isinstance(arg, tuple): - arg_list.append(_type_repr(arg)) - else: - arg_list.append(arg[0]._tree_repr(arg)) - return super().__repr__() + '[%s]' % ', '.join(arg_list) - - def _subs_tree(self, tvars=None, args=None): - if self.__origin__ is None: - return self - tree_args = _subs_tree(self, tvars, args) - return (self._gorg,) + tuple(tree_args) - - def __eq__(self, other): - if not isinstance(other, GenericMeta): - return NotImplemented - if self.__origin__ is None or other.__origin__ is None: - return self is other - return self.__tree_hash__ == other.__tree_hash__ - - def __hash__(self): - return self.__tree_hash__ - - @_tp_cache - def __getitem__(self, params): - if not isinstance(params, tuple): - params = (params,) - if not params and self._gorg is not Tuple: - raise TypeError( - "Parameter list to %s[...] cannot be empty" % _qualname(self)) - msg = "Parameters to generic types must be types." - params = tuple(_type_check(p, msg) for p in params) - if self is Generic: - # Generic can only be subscripted with unique type variables. - if not all(isinstance(p, TypeVar) for p in params): - raise TypeError( - "Parameters to Generic[...] must all be type variables") - if len(set(params)) != len(params): - raise TypeError( - "Parameters to Generic[...] must all be unique") - tvars = params - args = params - elif self in (Tuple, Callable): - tvars = _type_vars(params) - args = params - elif self is _Protocol: - # _Protocol is internal, don't check anything. - tvars = params - args = params - elif self.__origin__ in (Generic, _Protocol): - # Can't subscript Generic[...] or _Protocol[...]. - raise TypeError("Cannot subscript already-subscripted %s" % - repr(self)) - else: - # Subscripting a regular Generic subclass. - _check_generic(self, params) - tvars = _type_vars(params) - args = params - - prepend = (self,) if self.__origin__ is None else () - return self.__class__(self.__name__, - prepend + self.__bases__, - _no_slots_copy(self.__dict__), - tvars=tvars, - args=args, - origin=self, - extra=self.__extra__, - orig_bases=self.__orig_bases__) - - def __subclasscheck__(self, cls): - if self.__origin__ is not None: - if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: - raise TypeError("Parameterized generics cannot be used with class " - "or instance checks") - return False - if self is Generic: - raise TypeError("Class %r cannot be used with class " - "or instance checks" % self) - return super().__subclasscheck__(cls) - - def __instancecheck__(self, instance): - # Since we extend ABC.__subclasscheck__ and - # ABC.__instancecheck__ inlines the cache checking done by the - # latter, we must extend __instancecheck__ too. For simplicity - # we just skip the cache check -- instance checks for generic - # classes are supposed to be rare anyways. - return issubclass(instance.__class__, self) - - def __setattr__(self, attr, value): - # We consider all the subscripted generics as proxies for original class - if ( - attr.startswith('__') and attr.endswith('__') or - attr.startswith('_abc_') or - self._gorg is None # The class is not fully created, see #typing/506 - ): - super(GenericMeta, self).__setattr__(attr, value) - else: - super(GenericMeta, self._gorg).__setattr__(attr, value) - - -# Prevent checks for Generic to crash when defining Generic. -Generic = None - - -def _generic_new(base_cls, cls, *args, **kwds): - # Assure type is erased on instantiation, - # but attempt to store it in __orig_class__ - if cls.__origin__ is None: - return base_cls.__new__(cls) - else: - origin = cls._gorg - obj = base_cls.__new__(origin) - try: - obj.__orig_class__ = cls - except AttributeError: - pass - obj.__init__(*args, **kwds) - return obj - - -class Generic(metaclass=GenericMeta): - """Abstract base class for generic types. - - A generic type is typically declared by inheriting from - this class parameterized with one or more type variables. - For example, a generic mapping type might be defined as:: - - class Mapping(Generic[KT, VT]): - def __getitem__(self, key: KT) -> VT: - ... - # Etc. - - This class can then be used as follows:: - - def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT: - try: - return mapping[key] - except KeyError: - return default - """ - - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is Generic: - raise TypeError("Type Generic cannot be instantiated; " - "it can be used only as a base class") - return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) - - -class _TypingEmpty: - """Internal placeholder for () or []. Used by TupleMeta and CallableMeta - to allow empty list/tuple in specific places, without allowing them - to sneak in where prohibited. - """ - - -class _TypingEllipsis: - """Internal placeholder for ... (ellipsis).""" - - -class TupleMeta(GenericMeta): - """Metaclass for Tuple (internal).""" - - @_tp_cache - def __getitem__(self, parameters): - if self.__origin__ is not None or self._gorg is not Tuple: - # Normal generic rules apply if this is not the first subscription - # or a subscription of a subclass. - return super().__getitem__(parameters) - if parameters == (): - return super().__getitem__((_TypingEmpty,)) - if not isinstance(parameters, tuple): - parameters = (parameters,) - if len(parameters) == 2 and parameters[1] is ...: - msg = "Tuple[t, ...]: t must be a type." - p = _type_check(parameters[0], msg) - return super().__getitem__((p, _TypingEllipsis)) - msg = "Tuple[t0, t1, ...]: each t must be a type." - parameters = tuple(_type_check(p, msg) for p in parameters) - return super().__getitem__(parameters) - - def __instancecheck__(self, obj): - if self.__args__ is None: - return isinstance(obj, tuple) - raise TypeError("Parameterized Tuple cannot be used " - "with isinstance().") - - def __subclasscheck__(self, cls): - if self.__args__ is None: - return issubclass(cls, tuple) - raise TypeError("Parameterized Tuple cannot be used " - "with issubclass().") - - -class Tuple(tuple, extra=tuple, metaclass=TupleMeta): - """Tuple type; Tuple[X, Y] is the cross-product type of X and Y. - - Example: Tuple[T1, T2] is a tuple of two elements corresponding - to type variables T1 and T2. Tuple[int, float, str] is a tuple - of an int, a float and a string. - - To specify a variable-length tuple of homogeneous type, use Tuple[T, ...]. - """ - - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is Tuple: - raise TypeError("Type Tuple cannot be instantiated; " - "use tuple() instead") - return _generic_new(tuple, cls, *args, **kwds) - - -class CallableMeta(GenericMeta): - """Metaclass for Callable (internal).""" - - def __repr__(self): - if self.__origin__ is None: - return super().__repr__() - return self._tree_repr(self._subs_tree()) - - def _tree_repr(self, tree): - if self._gorg is not Callable: - return super()._tree_repr(tree) - # For actual Callable (not its subclass) we override - # super()._tree_repr() for nice formatting. - arg_list = [] - for arg in tree[1:]: - if not isinstance(arg, tuple): - arg_list.append(_type_repr(arg)) - else: - arg_list.append(arg[0]._tree_repr(arg)) - if arg_list[0] == '...': - return repr(tree[0]) + '[..., %s]' % arg_list[1] - return (repr(tree[0]) + - '[[%s], %s]' % (', '.join(arg_list[:-1]), arg_list[-1])) - - def __getitem__(self, parameters): - """A thin wrapper around __getitem_inner__ to provide the latter - with hashable arguments to improve speed. - """ - - if self.__origin__ is not None or self._gorg is not Callable: - return super().__getitem__(parameters) - if not isinstance(parameters, tuple) or len(parameters) != 2: - raise TypeError("Callable must be used as " - "Callable[[arg, ...], result].") - args, result = parameters - if args is Ellipsis: - parameters = (Ellipsis, result) - else: - if not isinstance(args, list): - raise TypeError("Callable[args, result]: args must be a list." - " Got %.100r." % (args,)) - parameters = (tuple(args), result) - return self.__getitem_inner__(parameters) - - @_tp_cache - def __getitem_inner__(self, parameters): - args, result = parameters - msg = "Callable[args, result]: result must be a type." - result = _type_check(result, msg) - if args is Ellipsis: - return super().__getitem__((_TypingEllipsis, result)) - msg = "Callable[[arg, ...], result]: each arg must be a type." - args = tuple(_type_check(arg, msg) for arg in args) - parameters = args + (result,) - return super().__getitem__(parameters) - - -class Callable(extra=collections_abc.Callable, metaclass=CallableMeta): - """Callable type; Callable[[int], str] is a function of (int) -> str. - - The subscription syntax must always be used with exactly two - values: the argument list and the return type. The argument list - must be a list of types or ellipsis; the return type must be a single type. - - There is no syntax to indicate optional or keyword arguments, - such function types are rarely used as callback types. - """ - - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is Callable: - raise TypeError("Type Callable cannot be instantiated; " - "use a non-abstract subclass instead") - return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) - - -class _ClassVar(_FinalTypingBase, _root=True): - """Special type construct to mark class variables. - - An annotation wrapped in ClassVar indicates that a given - attribute is intended to be used as a class variable and - should not be set on instances of that class. Usage:: - - class Starship: - stats: ClassVar[Dict[str, int]] = {} # class variable - damage: int = 10 # instance variable - - ClassVar accepts only types and cannot be further subscribed. - - Note that ClassVar is not a class itself, and should not - be used with isinstance() or issubclass(). - """ - - __slots__ = ('__type__',) - - def __init__(self, tp=None, **kwds): - self.__type__ = tp - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is None: - return cls(_type_check(item, - '{} accepts only single type.'.format(cls.__name__[1:])), - _root=True) - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) - - def _eval_type(self, globalns, localns): - new_tp = _eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)(new_tp, _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += '[{}]'.format(_type_repr(self.__type__)) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not isinstance(other, _ClassVar): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - -ClassVar = _ClassVar(_root=True) - - -def cast(typ, val): - """Cast a value to a type. - - This returns the value unchanged. To the type checker this - signals that the return value has the designated type, but at - runtime we intentionally don't check anything (we want this - to be as fast as possible). - """ - return val - - -def _get_defaults(func): - """Internal helper to extract the default arguments, by name.""" - try: - code = func.__code__ - except AttributeError: - # Some built-in functions don't have __code__, __defaults__, etc. - return {} - pos_count = code.co_argcount - arg_names = code.co_varnames - arg_names = arg_names[:pos_count] - defaults = func.__defaults__ or () - kwdefaults = func.__kwdefaults__ - res = dict(kwdefaults) if kwdefaults else {} - pos_offset = pos_count - len(defaults) - for name, value in zip(arg_names[pos_offset:], defaults): - assert name not in res - res[name] = value - return res - - -_allowed_types = (types.FunctionType, types.BuiltinFunctionType, - types.MethodType, types.ModuleType, - WrapperDescriptorType, MethodWrapperType, MethodDescriptorType) - - -def get_type_hints(obj, globalns=None, localns=None): - """Return type hints for an object. - - This is often the same as obj.__annotations__, but it handles - forward references encoded as string literals, and if necessary - adds Optional[t] if a default value equal to None is set. - - The argument may be a module, class, method, or function. The annotations - are returned as a dictionary. For classes, annotations include also - inherited members. - - TypeError is raised if the argument is not of a type that can contain - annotations, and an empty dictionary is returned if no annotations are - present. - - BEWARE -- the behavior of globalns and localns is counterintuitive - (unless you are familiar with how eval() and exec() work). The - search order is locals first, then globals. - - - If no dict arguments are passed, an attempt is made to use the - globals from obj (or the respective module's globals for classes), - and these are also used as the locals. If the object does not appear - to have globals, an empty dictionary is used. - - - If one dict argument is passed, it is used for both globals and - locals. - - - If two dict arguments are passed, they specify globals and - locals, respectively. - """ - - if getattr(obj, '__no_type_check__', None): - return {} - # Classes require a special treatment. - if isinstance(obj, type): - hints = {} - for base in reversed(obj.__mro__): - if globalns is None: - base_globals = sys.modules[base.__module__].__dict__ - else: - base_globals = globalns - ann = base.__dict__.get('__annotations__', {}) - for name, value in ann.items(): - if value is None: - value = type(None) - if isinstance(value, str): - value = _ForwardRef(value) - value = _eval_type(value, base_globals, localns) - hints[name] = value - return hints - - if globalns is None: - if isinstance(obj, types.ModuleType): - globalns = obj.__dict__ - else: - globalns = getattr(obj, '__globals__', {}) - if localns is None: - localns = globalns - elif localns is None: - localns = globalns - hints = getattr(obj, '__annotations__', None) - if hints is None: - # Return empty annotations for something that _could_ have them. - if isinstance(obj, _allowed_types): - return {} - else: - raise TypeError('{!r} is not a module, class, method, ' - 'or function.'.format(obj)) - defaults = _get_defaults(obj) - hints = dict(hints) - for name, value in hints.items(): - if value is None: - value = type(None) - if isinstance(value, str): - value = _ForwardRef(value) - value = _eval_type(value, globalns, localns) - if name in defaults and defaults[name] is None: - value = Optional[value] - hints[name] = value - return hints - - -def no_type_check(arg): - """Decorator to indicate that annotations are not type hints. - - The argument must be a class or function; if it is a class, it - applies recursively to all methods and classes defined in that class - (but not to methods defined in its superclasses or subclasses). - - This mutates the function(s) or class(es) in place. - """ - if isinstance(arg, type): - arg_attrs = arg.__dict__.copy() - for attr, val in arg.__dict__.items(): - if val in arg.__bases__ + (arg,): - arg_attrs.pop(attr) - for obj in arg_attrs.values(): - if isinstance(obj, types.FunctionType): - obj.__no_type_check__ = True - if isinstance(obj, type): - no_type_check(obj) - try: - arg.__no_type_check__ = True - except TypeError: # built-in classes - pass - return arg - - -def no_type_check_decorator(decorator): - """Decorator to give another decorator the @no_type_check effect. - - This wraps the decorator with something that wraps the decorated - function in @no_type_check. - """ - - @functools.wraps(decorator) - def wrapped_decorator(*args, **kwds): - func = decorator(*args, **kwds) - func = no_type_check(func) - return func - - return wrapped_decorator - - -def _overload_dummy(*args, **kwds): - """Helper for @overload to raise when called.""" - raise NotImplementedError( - "You should not call an overloaded function. " - "A series of @overload-decorated functions " - "outside a stub module should always be followed " - "by an implementation that is not @overload-ed.") - - -def overload(func): - """Decorator for overloaded functions/methods. - - In a stub file, place two or more stub definitions for the same - function in a row, each decorated with @overload. For example: - - @overload - def utf8(value: None) -> None: ... - @overload - def utf8(value: bytes) -> bytes: ... - @overload - def utf8(value: str) -> bytes: ... - - In a non-stub file (i.e. a regular .py file), do the same but - follow it with an implementation. The implementation should *not* - be decorated with @overload. For example: - - @overload - def utf8(value: None) -> None: ... - @overload - def utf8(value: bytes) -> bytes: ... - @overload - def utf8(value: str) -> bytes: ... - def utf8(value): - # implementation goes here - """ - return _overload_dummy - - -class _ProtocolMeta(GenericMeta): - """Internal metaclass for _Protocol. - - This exists so _Protocol classes can be generic without deriving - from Generic. - """ - - def __instancecheck__(self, obj): - if _Protocol not in self.__bases__: - return super().__instancecheck__(obj) - raise TypeError("Protocols cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - if not self._is_protocol: - # No structural checks since this isn't a protocol. - return NotImplemented - - if self is _Protocol: - # Every class is a subclass of the empty protocol. - return True - - # Find all attributes defined in the protocol. - attrs = self._get_protocol_attrs() - - for attr in attrs: - if not any(attr in d.__dict__ for d in cls.__mro__): - return False - return True - - def _get_protocol_attrs(self): - # Get all Protocol base classes. - protocol_bases = [] - for c in self.__mro__: - if getattr(c, '_is_protocol', False) and c.__name__ != '_Protocol': - protocol_bases.append(c) - - # Get attributes included in protocol. - attrs = set() - for base in protocol_bases: - for attr in base.__dict__.keys(): - # Include attributes not defined in any non-protocol bases. - for c in self.__mro__: - if (c is not base and attr in c.__dict__ and - not getattr(c, '_is_protocol', False)): - break - else: - if (not attr.startswith('_abc_') and - attr != '__abstractmethods__' and - attr != '__annotations__' and - attr != '__weakref__' and - attr != '_is_protocol' and - attr != '_gorg' and - attr != '__dict__' and - attr != '__args__' and - attr != '__slots__' and - attr != '_get_protocol_attrs' and - attr != '__next_in_mro__' and - attr != '__parameters__' and - attr != '__origin__' and - attr != '__orig_bases__' and - attr != '__extra__' and - attr != '__tree_hash__' and - attr != '__module__'): - attrs.add(attr) - - return attrs - - -class _Protocol(metaclass=_ProtocolMeta): - """Internal base class for protocol classes. - - This implements a simple-minded structural issubclass check - (similar but more general than the one-offs in collections.abc - such as Hashable). - """ - - __slots__ = () - - _is_protocol = True - - -# Various ABCs mimicking those in collections.abc. -# A few are simply re-exported for completeness. - -Hashable = collections_abc.Hashable # Not generic. - - -if hasattr(collections_abc, 'Awaitable'): - class Awaitable(Generic[T_co], extra=collections_abc.Awaitable): - __slots__ = () - - __all__.append('Awaitable') - - -if hasattr(collections_abc, 'Coroutine'): - class Coroutine(Awaitable[V_co], Generic[T_co, T_contra, V_co], - extra=collections_abc.Coroutine): - __slots__ = () - - __all__.append('Coroutine') - - -if hasattr(collections_abc, 'AsyncIterable'): - - class AsyncIterable(Generic[T_co], extra=collections_abc.AsyncIterable): - __slots__ = () - - class AsyncIterator(AsyncIterable[T_co], - extra=collections_abc.AsyncIterator): - __slots__ = () - - __all__.append('AsyncIterable') - __all__.append('AsyncIterator') - - -class Iterable(Generic[T_co], extra=collections_abc.Iterable): - __slots__ = () - - -class Iterator(Iterable[T_co], extra=collections_abc.Iterator): - __slots__ = () - - -class SupportsInt(_Protocol): - __slots__ = () - - @abstractmethod - def __int__(self) -> int: - pass - - -class SupportsFloat(_Protocol): - __slots__ = () - - @abstractmethod - def __float__(self) -> float: - pass - - -class SupportsComplex(_Protocol): - __slots__ = () - - @abstractmethod - def __complex__(self) -> complex: - pass - - -class SupportsBytes(_Protocol): - __slots__ = () - - @abstractmethod - def __bytes__(self) -> bytes: - pass - - -class SupportsAbs(_Protocol[T_co]): - __slots__ = () - - @abstractmethod - def __abs__(self) -> T_co: - pass - - -class SupportsRound(_Protocol[T_co]): - __slots__ = () - - @abstractmethod - def __round__(self, ndigits: int = 0) -> T_co: - pass - - -if hasattr(collections_abc, 'Reversible'): - class Reversible(Iterable[T_co], extra=collections_abc.Reversible): - __slots__ = () -else: - class Reversible(_Protocol[T_co]): - __slots__ = () - - @abstractmethod - def __reversed__(self) -> 'Iterator[T_co]': - pass - - -Sized = collections_abc.Sized # Not generic. - - -class Container(Generic[T_co], extra=collections_abc.Container): - __slots__ = () - - -if hasattr(collections_abc, 'Collection'): - class Collection(Sized, Iterable[T_co], Container[T_co], - extra=collections_abc.Collection): - __slots__ = () - - __all__.append('Collection') - - -# Callable was defined earlier. - -if hasattr(collections_abc, 'Collection'): - class AbstractSet(Collection[T_co], - extra=collections_abc.Set): - __slots__ = () -else: - class AbstractSet(Sized, Iterable[T_co], Container[T_co], - extra=collections_abc.Set): - __slots__ = () - - -class MutableSet(AbstractSet[T], extra=collections_abc.MutableSet): - __slots__ = () - - -# NOTE: It is only covariant in the value type. -if hasattr(collections_abc, 'Collection'): - class Mapping(Collection[KT], Generic[KT, VT_co], - extra=collections_abc.Mapping): - __slots__ = () -else: - class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co], - extra=collections_abc.Mapping): - __slots__ = () - - -class MutableMapping(Mapping[KT, VT], extra=collections_abc.MutableMapping): - __slots__ = () - - -if hasattr(collections_abc, 'Reversible'): - if hasattr(collections_abc, 'Collection'): - class Sequence(Reversible[T_co], Collection[T_co], - extra=collections_abc.Sequence): - __slots__ = () - else: - class Sequence(Sized, Reversible[T_co], Container[T_co], - extra=collections_abc.Sequence): - __slots__ = () -else: - class Sequence(Sized, Iterable[T_co], Container[T_co], - extra=collections_abc.Sequence): - __slots__ = () - - -class MutableSequence(Sequence[T], extra=collections_abc.MutableSequence): - __slots__ = () - - -class ByteString(Sequence[int], extra=collections_abc.ByteString): - __slots__ = () - - -class List(list, MutableSequence[T], extra=list): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is List: - raise TypeError("Type List cannot be instantiated; " - "use list() instead") - return _generic_new(list, cls, *args, **kwds) - - -class Deque(collections.deque, MutableSequence[T], extra=collections.deque): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is Deque: - return collections.deque(*args, **kwds) - return _generic_new(collections.deque, cls, *args, **kwds) - - -class Set(set, MutableSet[T], extra=set): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is Set: - raise TypeError("Type Set cannot be instantiated; " - "use set() instead") - return _generic_new(set, cls, *args, **kwds) - - -class FrozenSet(frozenset, AbstractSet[T_co], extra=frozenset): - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is FrozenSet: - raise TypeError("Type FrozenSet cannot be instantiated; " - "use frozenset() instead") - return _generic_new(frozenset, cls, *args, **kwds) - - -class MappingView(Sized, Iterable[T_co], extra=collections_abc.MappingView): - __slots__ = () - - -class KeysView(MappingView[KT], AbstractSet[KT], - extra=collections_abc.KeysView): - __slots__ = () - - -class ItemsView(MappingView[Tuple[KT, VT_co]], - AbstractSet[Tuple[KT, VT_co]], - Generic[KT, VT_co], - extra=collections_abc.ItemsView): - __slots__ = () - - -class ValuesView(MappingView[VT_co], extra=collections_abc.ValuesView): - __slots__ = () - - -if hasattr(contextlib, 'AbstractContextManager'): - class ContextManager(Generic[T_co], extra=contextlib.AbstractContextManager): - __slots__ = () -else: - class ContextManager(Generic[T_co]): - __slots__ = () - - def __enter__(self): - return self - - @abc.abstractmethod - def __exit__(self, exc_type, exc_value, traceback): - return None - - @classmethod - def __subclasshook__(cls, C): - if cls is ContextManager: - # In Python 3.6+, it is possible to set a method to None to - # explicitly indicate that the class does not implement an ABC - # (https://bugs.python.org/issue25958), but we do not support - # that pattern here because this fallback class is only used - # in Python 3.5 and earlier. - if (any("__enter__" in B.__dict__ for B in C.__mro__) and - any("__exit__" in B.__dict__ for B in C.__mro__)): - return True - return NotImplemented - - -if hasattr(contextlib, 'AbstractAsyncContextManager'): - class AsyncContextManager(Generic[T_co], - extra=contextlib.AbstractAsyncContextManager): - __slots__ = () - - __all__.append('AsyncContextManager') -elif sys.version_info[:2] >= (3, 5): - exec(""" -class AsyncContextManager(Generic[T_co]): - __slots__ = () - - async def __aenter__(self): - return self - - @abc.abstractmethod - async def __aexit__(self, exc_type, exc_value, traceback): - return None - - @classmethod - def __subclasshook__(cls, C): - if cls is AsyncContextManager: - if sys.version_info[:2] >= (3, 6): - return _collections_abc._check_methods(C, "__aenter__", "__aexit__") - if (any("__aenter__" in B.__dict__ for B in C.__mro__) and - any("__aexit__" in B.__dict__ for B in C.__mro__)): - return True - return NotImplemented - -__all__.append('AsyncContextManager') -""") - - -class Dict(dict, MutableMapping[KT, VT], extra=dict): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is Dict: - raise TypeError("Type Dict cannot be instantiated; " - "use dict() instead") - return _generic_new(dict, cls, *args, **kwds) - - -class DefaultDict(collections.defaultdict, MutableMapping[KT, VT], - extra=collections.defaultdict): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is DefaultDict: - return collections.defaultdict(*args, **kwds) - return _generic_new(collections.defaultdict, cls, *args, **kwds) - - -class Counter(collections.Counter, Dict[T, int], extra=collections.Counter): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is Counter: - return collections.Counter(*args, **kwds) - return _generic_new(collections.Counter, cls, *args, **kwds) - - -if hasattr(collections, 'ChainMap'): - # ChainMap only exists in 3.3+ - __all__.append('ChainMap') - - class ChainMap(collections.ChainMap, MutableMapping[KT, VT], - extra=collections.ChainMap): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is ChainMap: - return collections.ChainMap(*args, **kwds) - return _generic_new(collections.ChainMap, cls, *args, **kwds) - - -# Determine what base class to use for Generator. -if hasattr(collections_abc, 'Generator'): - # Sufficiently recent versions of 3.5 have a Generator ABC. - _G_base = collections_abc.Generator -else: - # Fall back on the exact type. - _G_base = types.GeneratorType - - -class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co], - extra=_G_base): - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is Generator: - raise TypeError("Type Generator cannot be instantiated; " - "create a subclass instead") - return _generic_new(_G_base, cls, *args, **kwds) - - -if hasattr(collections_abc, 'AsyncGenerator'): - class AsyncGenerator(AsyncIterator[T_co], Generic[T_co, T_contra], - extra=collections_abc.AsyncGenerator): - __slots__ = () - - __all__.append('AsyncGenerator') - - -# Internal type variable used for Type[]. -CT_co = TypeVar('CT_co', covariant=True, bound=type) - - -# This is not a real generic class. Don't use outside annotations. -class Type(Generic[CT_co], extra=type): - """A special construct usable to annotate class objects. - - For example, suppose we have the following classes:: - - class User: ... # Abstract base for User classes - class BasicUser(User): ... - class ProUser(User): ... - class TeamUser(User): ... - - And a function that takes a class argument that's a subclass of - User and returns an instance of the corresponding class:: - - U = TypeVar('U', bound=User) - def new_user(user_class: Type[U]) -> U: - user = user_class() - # (Here we could write the user object to a database) - return user - - joe = new_user(BasicUser) - - At this point the type checker knows that joe has type BasicUser. - """ - - __slots__ = () - - -def _make_nmtuple(name, types): - msg = "NamedTuple('Name', [(f0, t0), (f1, t1), ...]); each t must be a type" - types = [(n, _type_check(t, msg)) for n, t in types] - nm_tpl = collections.namedtuple(name, [n for n, t in types]) - # Prior to PEP 526, only _field_types attribute was assigned. - # Now, both __annotations__ and _field_types are used to maintain compatibility. - nm_tpl.__annotations__ = nm_tpl._field_types = collections.OrderedDict(types) - try: - nm_tpl.__module__ = sys._getframe(2).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - pass - return nm_tpl - - -_PY36 = sys.version_info[:2] >= (3, 6) - -# attributes prohibited to set in NamedTuple class syntax -_prohibited = ('__new__', '__init__', '__slots__', '__getnewargs__', - '_fields', '_field_defaults', '_field_types', - '_make', '_replace', '_asdict', '_source') - -_special = ('__module__', '__name__', '__qualname__', '__annotations__') - - -class NamedTupleMeta(type): - - def __new__(cls, typename, bases, ns): - if ns.get('_root', False): - return super().__new__(cls, typename, bases, ns) - if not _PY36: - raise TypeError("Class syntax for NamedTuple is only supported" - " in Python 3.6+") - types = ns.get('__annotations__', {}) - nm_tpl = _make_nmtuple(typename, types.items()) - defaults = [] - defaults_dict = {} - for field_name in types: - if field_name in ns: - default_value = ns[field_name] - defaults.append(default_value) - defaults_dict[field_name] = default_value - elif defaults: - raise TypeError("Non-default namedtuple field {field_name} cannot " - "follow default field(s) {default_names}" - .format(field_name=field_name, - default_names=', '.join(defaults_dict.keys()))) - nm_tpl.__new__.__defaults__ = tuple(defaults) - nm_tpl._field_defaults = defaults_dict - # update from user namespace without overriding special namedtuple attributes - for key in ns: - if key in _prohibited: - raise AttributeError("Cannot overwrite NamedTuple attribute " + key) - elif key not in _special and key not in nm_tpl._fields: - setattr(nm_tpl, key, ns[key]) - return nm_tpl - - -class NamedTuple(metaclass=NamedTupleMeta): - """Typed version of namedtuple. - - Usage in Python versions >= 3.6:: - - class Employee(NamedTuple): - name: str - id: int - - This is equivalent to:: - - Employee = collections.namedtuple('Employee', ['name', 'id']) - - The resulting class has extra __annotations__ and _field_types - attributes, giving an ordered dict mapping field names to types. - __annotations__ should be preferred, while _field_types - is kept to maintain pre PEP 526 compatibility. (The field names - are in the _fields attribute, which is part of the namedtuple - API.) Alternative equivalent keyword syntax is also accepted:: - - Employee = NamedTuple('Employee', name=str, id=int) - - In Python versions <= 3.5 use:: - - Employee = NamedTuple('Employee', [('name', str), ('id', int)]) - """ - _root = True - - def __new__(self, typename, fields=None, **kwargs): - if kwargs and not _PY36: - raise TypeError("Keyword syntax for NamedTuple is only supported" - " in Python 3.6+") - if fields is None: - fields = kwargs.items() - elif kwargs: - raise TypeError("Either list of fields or keywords" - " can be provided to NamedTuple, not both") - return _make_nmtuple(typename, fields) - - -def NewType(name, tp): - """NewType creates simple unique types with almost zero - runtime overhead. NewType(name, tp) is considered a subtype of tp - by static type checkers. At runtime, NewType(name, tp) returns - a dummy function that simply returns its argument. Usage:: - - UserId = NewType('UserId', int) - - def name_by_id(user_id: UserId) -> str: - ... - - UserId('user') # Fails type check - - name_by_id(42) # Fails type check - name_by_id(UserId(42)) # OK - - num = UserId(5) + 1 # type: int - """ - - def new_type(x): - return x - - new_type.__name__ = name - new_type.__supertype__ = tp - return new_type - - -# Python-version-specific alias (Python 2: unicode; Python 3: str) -Text = str - - -# Constant that's True when type checking, but False here. -TYPE_CHECKING = False - - -class IO(Generic[AnyStr]): - """Generic base class for TextIO and BinaryIO. - - This is an abstract, generic version of the return of open(). - - NOTE: This does not distinguish between the different possible - classes (text vs. binary, read vs. write vs. read/write, - append-only, unbuffered). The TextIO and BinaryIO subclasses - below capture the distinctions between text vs. binary, which is - pervasive in the interface; however we currently do not offer a - way to track the other distinctions in the type system. - """ - - __slots__ = () - - @abstractproperty - def mode(self) -> str: - pass - - @abstractproperty - def name(self) -> str: - pass - - @abstractmethod - def close(self) -> None: - pass - - @abstractmethod - def closed(self) -> bool: - pass - - @abstractmethod - def fileno(self) -> int: - pass - - @abstractmethod - def flush(self) -> None: - pass - - @abstractmethod - def isatty(self) -> bool: - pass - - @abstractmethod - def read(self, n: int = -1) -> AnyStr: - pass - - @abstractmethod - def readable(self) -> bool: - pass - - @abstractmethod - def readline(self, limit: int = -1) -> AnyStr: - pass - - @abstractmethod - def readlines(self, hint: int = -1) -> List[AnyStr]: - pass - - @abstractmethod - def seek(self, offset: int, whence: int = 0) -> int: - pass - - @abstractmethod - def seekable(self) -> bool: - pass - - @abstractmethod - def tell(self) -> int: - pass - - @abstractmethod - def truncate(self, size: int = None) -> int: - pass - - @abstractmethod - def writable(self) -> bool: - pass - - @abstractmethod - def write(self, s: AnyStr) -> int: - pass - - @abstractmethod - def writelines(self, lines: List[AnyStr]) -> None: - pass - - @abstractmethod - def __enter__(self) -> 'IO[AnyStr]': - pass - - @abstractmethod - def __exit__(self, type, value, traceback) -> None: - pass - - -class BinaryIO(IO[bytes]): - """Typed version of the return of open() in binary mode.""" - - __slots__ = () - - @abstractmethod - def write(self, s: Union[bytes, bytearray]) -> int: - pass - - @abstractmethod - def __enter__(self) -> 'BinaryIO': - pass - - -class TextIO(IO[str]): - """Typed version of the return of open() in text mode.""" - - __slots__ = () - - @abstractproperty - def buffer(self) -> BinaryIO: - pass - - @abstractproperty - def encoding(self) -> str: - pass - - @abstractproperty - def errors(self) -> Optional[str]: - pass - - @abstractproperty - def line_buffering(self) -> bool: - pass - - @abstractproperty - def newlines(self) -> Any: - pass - - @abstractmethod - def __enter__(self) -> 'TextIO': - pass - - -class io: - """Wrapper namespace for IO generic classes.""" - - __all__ = ['IO', 'TextIO', 'BinaryIO'] - IO = IO - TextIO = TextIO - BinaryIO = BinaryIO - - -io.__name__ = __name__ + '.io' -sys.modules[io.__name__] = io - - -Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')), - lambda p: p.pattern) -Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')), - lambda m: m.re.pattern) - - -class re: - """Wrapper namespace for re type aliases.""" - - __all__ = ['Pattern', 'Match'] - Pattern = Pattern - Match = Match - - -re.__name__ = __name__ + '.re' -sys.modules[re.__name__] = re diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index c5e68edff3..9b6d7bea8a 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -46,4 +46,3 @@ pip-shims==0.1.2 modutil==2.0.0 ptyprocess==0.6.0 enum34==1.1.6 -typing==3.6.4