diff --git a/src/rez/cli/_util.py b/src/rez/cli/_util.py
index 592706faa..d80674848 100644
--- a/src/rez/cli/_util.py
+++ b/src/rez/cli/_util.py
@@ -39,6 +39,7 @@
"interpret": {},
"memcache": {},
"pip": {},
+ "wheel": {},
"plugins": {},
"python": {
"arg_mode": "passthrough"
diff --git a/src/rez/cli/pip.py b/src/rez/cli/pip.py
index 2d391dec3..8af792d0d 100644
--- a/src/rez/cli/pip.py
+++ b/src/rez/cli/pip.py
@@ -85,4 +85,4 @@ def print_variant(v):
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
-# License along with this library. If not, see .
+# License along with this library. If not, see .
\ No newline at end of file
diff --git a/src/rez/cli/wheel.py b/src/rez/cli/wheel.py
new file mode 100644
index 000000000..c6d1ce6c0
--- /dev/null
+++ b/src/rez/cli/wheel.py
@@ -0,0 +1,250 @@
+"""
+Install a pip-compatible python package, incl. dependencies, as rez packages.
+"""
+
+import time
+import contextlib
+
+quiet = False
+
+
+def setup_parser(parser, completions=False):
+ parser.add_argument(
+ "-i", "--install", nargs="+",
+ help="Install the package")
+ parser.add_argument(
+ "-s", "--search", nargs="+",
+ help="Search for the package on PyPi")
+ parser.add_argument(
+ "-r", "--release", action="store_true",
+ help="Install as released package; if not set, package is installed "
+ "locally only")
+ parser.add_argument(
+ "-va", "--variant", action="append",
+ help="Install package as variant, may be called multiple times.")
+ parser.add_argument(
+ "-p", "--prefix", type=str, metavar="PATH",
+ help="Install to a custom package repository path.")
+ parser.add_argument(
+ "-y", "--yes", action="store_true",
+ help="Pre-emptively answer the question to continue")
+ parser.add_argument(
+ "-q", "--quiet", action="store_true",
+ help="Do not output anything to stdout, overridden with -vvv")
+
+ # Additional pip-specific arguments
+ parser.add_argument(
+ "--no-deps", action="store_true", help="Do not install dependencies")
+ parser.add_argument(
+ "--index-url", default="https://pypi.org/simple",
+ help="Provide a custom PyPI index")
+
+
+def tell(msg, newlines=1):
+ if quiet:
+ return
+
+ import sys
+ sys.stdout.write("%s%s" % (msg, "\n" * newlines))
+
+
+def error(msg, newlines=1):
+ import sys
+ sys.stderr.write("ERROR: %s\n" % msg)
+
+
+def ask(msg):
+ from rez.vendor.six.six.moves import input
+
+ try:
+ return input(msg).lower() in ("", "y", "yes", "ok")
+ except EOFError:
+ return True # On just hitting enter
+ except KeyboardInterrupt:
+ return False
+
+
+@contextlib.contextmanager
+def stage(msg, timing=True):
+ tell(msg, 0)
+ t0 = time.time()
+
+ try:
+ yield
+ except Exception:
+ tell("fail")
+ raise
+ else:
+ if timing:
+ tell("ok - %.2fs" % (time.time() - t0))
+ else:
+ tell("ok")
+
+
+def command(opts, parser, extra_arg_groups=None):
+ import os
+ import shutil
+ import tempfile
+
+ global quiet
+ quiet = (opts.verbose < 2) and opts.quiet
+
+ if opts.search:
+ _search(opts)
+
+ if opts.install:
+ t0 = time.time()
+ tmpdir = tempfile.mkdtemp(suffix="-rez", prefix="wheel-")
+ tempdir = os.path.join(tmpdir, "rez_staging", "python")
+ success = False
+
+ try:
+ _install(opts, tempdir)
+ success = True
+
+ finally:
+ shutil.rmtree(tmpdir)
+
+ tell(
+ ("Completed in %.2fs" % (time.time() - t0))
+ if success else "Failed"
+ )
+
+
+def _install(opts, tempdir):
+ import os
+ from rez import wheel
+ from rez.config import config
+
+ python_version = wheel.python_version()
+ pip_version = wheel.pip_version()
+
+ if not python_version:
+ error("Python could not be found")
+ exit(1)
+
+ if not pip_version:
+ error("pip could not be found")
+ exit(1)
+
+ if pip_version < "19.0.0":
+ error("Requires pip>=19")
+ exit(1)
+
+ tell("Using python-%s" % python_version)
+ tell("Using pip-%s" % pip_version)
+
+ try:
+ with stage("Reading package lists... "):
+ distributions = wheel.download(
+ opts.install,
+ tempdir=tempdir,
+ no_deps=opts.no_deps,
+ index_url=opts.index_url,
+ )
+ except OSError as e:
+ tell(e)
+ exit(1)
+
+ packagesdir = opts.prefix or (
+ config.release_packages_path if opts.release
+ else config.local_packages_path
+ )
+
+ with stage("Discovering existing packages... "):
+ new, exists = list(), list()
+ for dist in distributions:
+ package = wheel.convert(dist, variants=opts.variant)
+
+ if wheel.exists(package, packagesdir):
+ exists.append(package)
+ else:
+ new.append(package)
+
+ if not new:
+ for package in exists:
+ tell("%s-%s was already installed" % (
+ package.name, package.version
+ ))
+
+ return tell("No new packages were installed")
+
+ size = sum(
+ os.path.getsize(os.path.join(dirpath, filename))
+ for dirpath, dirnames, filenames in os.walk(tempdir)
+ for filename in filenames
+ ) / (10.0 ** 6) # mb
+
+ # Determine column width for upcoming printing
+ all_ = new + exists
+ max_name = max((i.name for i in all_), key=len)
+ max_version = max((str(i.version) for i in all_), key=len)
+ row_line = " {:<%d}{:<%d}{}" % (len(max_name) + 4, len(max_version) + 2)
+
+ def format_variants(package):
+ return (
+ "/".join(str(v) for v in package.variants[0])
+ if package.variants else ""
+ )
+
+ tell("The following NEW packages will be installed:")
+ for package in new:
+ tell(row_line.format(
+ package.name,
+ package.version,
+ format_variants(package)
+ ))
+
+ if exists:
+ tell("The following packages will be SKIPPED:")
+ for package in exists:
+ tell(row_line.format(
+ package.name,
+ package.version,
+ format_variants(package)
+ ))
+
+ tell("Packages will be installed to %s" % packagesdir)
+ tell("After this operation, %.2f mb will be used." % size)
+
+ if not opts.yes and not opts.quiet:
+ if not ask("Do you want to continue? [Y/n] "):
+ return
+
+ for index, package in enumerate(new):
+ msg = "(%d/%d) Installing %s-%s... " % (
+ index + 1, len(new),
+ package.name,
+ package.version,
+ )
+
+ with stage(msg, timing=False):
+ wheel.deploy(
+ package,
+ path=packagesdir
+ )
+
+ tell("%d installed, %d skipped" % (len(new), len(exists)))
+
+
+def _search(opts):
+ import subprocess
+ subprocess.check_call([
+ "python", "-m", "pip" "search"
+ ] + opts.search).wait()
+
+
+# Copyright 2013-2016 Allan Johns.
+#
+# This library is free software: you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation, either
+# version 3 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see .
diff --git a/src/rez/package_resources_.py b/src/rez/package_resources_.py
index e613edfe1..bfdd976ad 100644
--- a/src/rez/package_resources_.py
+++ b/src/rez/package_resources_.py
@@ -461,6 +461,7 @@ def _subpath(self, ignore_shortlinks=False):
return hashdir
else:
dirs = [x.safe_str() for x in self.variant_requires]
+ dirs = dirs or [""]
subpath = os.path.join(*dirs)
return subpath
diff --git a/src/rez/pip.py b/src/rez/pip.py
index 014e586e9..26b02092f 100644
--- a/src/rez/pip.py
+++ b/src/rez/pip.py
@@ -381,4 +381,4 @@ def _log(msg):
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
-# License along with this library. If not, see .
+# License along with this library. If not, see .
\ No newline at end of file
diff --git a/src/rez/serialise.py b/src/rez/serialise.py
index 1e218eee7..0de6b7a28 100644
--- a/src/rez/serialise.py
+++ b/src/rez/serialise.py
@@ -5,6 +5,7 @@
from inspect import isfunction, ismodule, getargspec
from StringIO import StringIO
import sys
+import stat
import os
import os.path
import threading
@@ -64,8 +65,21 @@ def open_file_for_write(filepath, mode=None):
debug_print("Writing to %s (local cache of %s)", cache_filepath, filepath)
- with atomic_write(filepath, overwrite=True) as f:
- f.write(content)
+ for attempt in range(2):
+ try:
+ with atomic_write(filepath, overwrite=True) as f:
+ f.write(content)
+
+ except WindowsError as e:
+ if attempt == 0:
+ # `overwrite=True` of atomic_write doesn't restore
+ # writability to the file being written to.
+ os.chmod(filepath, stat.S_IWRITE | stat.S_IREAD)
+
+ else:
+ # Under Windows, atomic_write doesn't tell you about
+ # which file actually failed.
+ raise WindowsError("%s: '%s'" % (e, filepath))
if mode is not None:
os.chmod(filepath, mode)
diff --git a/src/rez/tests/test_wheel.py b/src/rez/tests/test_wheel.py
new file mode 100644
index 000000000..6a7801f9b
--- /dev/null
+++ b/src/rez/tests/test_wheel.py
@@ -0,0 +1,259 @@
+"""
+test rez wheel
+"""
+import os
+import stat
+import shutil
+import tempfile
+import subprocess
+
+from rez.tests.util import TempdirMixin, TestBase
+from rez import wheel
+from rez.resolved_context import ResolvedContext
+from rez.package_maker__ import make_package
+from rez.packages_ import iter_packages
+from rez.util import which
+
+
+def rmtree(path):
+ # Rez write-protects the package.py files
+ def del_rw(action, name, exc):
+ os.chmod(name, stat.S_IWRITE)
+ os.remove(name)
+
+ shutil.rmtree(path, onerror=del_rw)
+
+
+class TestWheel(TestBase, TempdirMixin):
+ @classmethod
+ def setUpClass(cls):
+ TempdirMixin.setUpClass()
+ cls.settings = dict()
+ cls.tempdir = tempfile.mkdtemp()
+
+ python = which("python")
+ assert python, "No Python found"
+
+ result = subprocess.check_output(
+ [python, "--version"],
+ universal_newlines=True,
+ stderr=subprocess.STDOUT,
+ )
+ _, version = result.rstrip().split(" ", 1)
+ version = version.split()[-1]
+ version = int(version[0])
+
+ with make_package("python", cls.tempdir) as maker:
+ PATH = os.path.dirname(python)
+ maker.version = str(version)
+ maker.commands = "\n".join([
+ "env.PATH.prepend('%s')" % PATH
+ ])
+
+ cls.context = ResolvedContext(
+ ["python"],
+ package_paths=[cls.tempdir]
+ )
+
+ cls.python_version = version
+
+ @classmethod
+ def tearDownClass(cls):
+ TempdirMixin.tearDownClass()
+ rmtree(cls.tempdir)
+
+ def setUp(self):
+ """Called for each test"""
+ self.temprepo = tempfile.mkdtemp()
+
+ def tearDown(self):
+ rmtree(self.temprepo)
+
+ def _execute(self, cmd):
+ assert self.context.execute_shell(command=cmd).wait() == 0
+
+ def _install(self, *packages, **kwargs):
+ return wheel.install(packages, prefix=self.temprepo, **kwargs)
+
+ def _installed_packages(self, name):
+ return list(iter_packages(name, paths=[self.temprepo]))
+
+ def _test_install(self, package, version):
+ installed = self._install("%s==%s" % (package, version))
+ assert installed, "Something should have been installed"
+
+ names = [pkg.name for pkg in installed]
+ versions = {
+ package.name: str(package.version)
+ for package in installed
+ }
+
+ self.assertIn(package, names)
+ self.assertEqual(versions[package], version)
+
+ def test_wheel_to_variants1(self):
+ """Test wheel_to_variants with pure-Python wheel"""
+ WHEEL = """\
+Wheel-Version: 1.0
+Generator: bdist_wheel 1.0
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+"""
+
+ variants = wheel.wheel_to_variants(WHEEL)
+ self.assertEqual(variants, [])
+
+ def test_wheel_to_variants2(self):
+ """Test wheel_to_variants with compiled wheel"""
+ WHEEL = """\
+Wheel-Version: 1.0
+Generator: bdist_wheel 1.0
+Root-Is-Purelib: false
+Tag: cp36-cp36m-win_amd64
+"""
+
+ variants = wheel.wheel_to_variants(WHEEL)
+ self.assertEqual(variants, [
+ "platform-%s" % wheel.platform_name(),
+ "os-%s" % wheel.os_name(),
+ "python-3.6",
+ ])
+
+ def test_wheel_to_variants3(self):
+ """Test wheel_to_variants with unsupported WHEEL"""
+ WHEEL = """\
+Wheel-Version: 2.0
+Generator: bdist_wheel 1.0
+Root-Is-Purelib: false
+Tag: cp36-cp36m-win_amd64
+"""
+
+ self.assertRaises(Exception, wheel.wheel_to_variants, WHEEL)
+
+ def test_wheel_to_variants4(self):
+ """Test wheel_to_variants with pure-Python, solo-version wheel"""
+ WHEEL = """\
+Wheel-Version: 1.0
+Generator: bdist_wheel 1.0
+Root-Is-Purelib: true
+Tag: py2-none-any
+"""
+
+ variants = wheel.wheel_to_variants(WHEEL)
+ self.assertEqual(variants, ["python-2"])
+
+ def test_wheel_to_variants5(self):
+ """Test wheel_to_variants with badly formatted WHEEL"""
+ WHEEL = """\
+I am b'a'd
+"""
+
+ self.assertRaises(Exception, wheel.wheel_to_variants, WHEEL)
+
+ def test_purepython_23(self):
+ """Install a pure-Python package compatible with both Python 2 and 3"""
+ self._test_install("six", "1.12.0")
+
+ def test_purepython_2(self):
+ """Install a pure-Python package only compatible with Python 2"""
+ self._test_install("futures", "3.2.0")
+
+ def test_compiled(self):
+ """Install a compiled Python package"""
+ self._test_install("pyyaml", "5.1")
+
+ def test_dependencies(self):
+ """Install mkdocs, which carries lots of dependencies"""
+ installed = self._install("mkdocs==1.0.4")
+ assert installed, "Something should have been installed"
+
+ names = [pkg.name for pkg in installed]
+ package = {package.name: package for package in installed}["mkdocs"]
+ versions = {
+ package.name: str(package.version)
+ for package in installed
+ }
+
+ self.assertEqual(versions["mkdocs"], "1.0.4")
+
+ # From https://github.com/mkdocs/mkdocs/blob/1.0.4/setup.py#L58
+ dependencies = (
+ "click",
+ "jinja2",
+ "livereload",
+ "markdown",
+ "pyyaml",
+ "tornado",
+ )
+
+ for name in dependencies:
+ self.assertIn(name.lower(), names)
+
+ # All requirements have been installed
+ for req in package.requires:
+ self.assertIn(req.name.lower(), names)
+
+ def test_override_variant(self):
+ """Test overriding variant"""
+ installed = self._install("six", variants=["python-2"])
+ assert installed, "Something should have been installed"
+ package = installed[0].variants[0][0]
+ self.assertEqual(str(package), "python-2")
+
+ def test_existing_variant(self):
+ """Test installing another variant"""
+
+ # Package does not exist prior to install it
+ self.assertEqual(self._installed_packages(name="six"), [])
+
+ self._install("six", variants=["python-2"])
+ package = self._installed_packages("six")[0]
+ variants = [str(v[0]) for v in package.variants]
+ self.assertEqual(variants, ["python-2"])
+
+ # Make sure an install doesn't break or remove a prior variant
+ # This normally happens when installing the same package
+ # on another platform.
+ self._install("six", variants=["python-3"])
+ package = self._installed_packages("six")[0]
+ variants = [str(v[0]) for v in package.variants]
+ self.assertEqual(variants, ["python-2", "python-3"])
+
+ def test_battery(self):
+ """Install a variety of packages"""
+ packages = [
+ "Cython",
+ "Jinja2",
+ "MarkupSafe",
+ "Pillow",
+ "Qt.py",
+ "blockdiag",
+ "certifi",
+ "excel",
+ "funcparserlib",
+ "lockfile",
+ "lxml",
+ "ordereddict",
+ "pyblish-base",
+ "pyblish-lite",
+ "pyblish-maya",
+ "setuptools",
+ "urllib3",
+ "webcolors",
+ "xlrd",
+ "six",
+ ]
+
+ if os.name == "nt":
+ packages += [
+ "pywin32",
+ "pythonnet",
+ ]
+
+ self._install(*packages)
+
+ def test_pyside2(self):
+ """Install PySide2"""
+ if self.python_version != 3:
+ self.skipTest("PySide2 is not available on PyPI for Python 2")
diff --git a/src/rez/vendor/distlib/__init__.py b/src/rez/vendor/distlib/__init__.py
index 35af72fdb..a786b4d3b 100644
--- a/src/rez/vendor/distlib/__init__.py
+++ b/src/rez/vendor/distlib/__init__.py
@@ -1,12 +1,12 @@
# -*- coding: utf-8 -*-
#
-# Copyright (C) 2012-2016 Vinay Sajip.
+# Copyright (C) 2012-2017 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
import logging
-__version__ = '0.2.4.dev0'
+__version__ = '0.2.8'
class DistlibException(Exception):
pass
diff --git a/src/rez/vendor/distlib/_backport/shutil.py b/src/rez/vendor/distlib/_backport/shutil.py
index 9e2e234d4..159e49ee8 100644
--- a/src/rez/vendor/distlib/_backport/shutil.py
+++ b/src/rez/vendor/distlib/_backport/shutil.py
@@ -55,8 +55,8 @@ class ReadError(EnvironmentError):
"""Raised when an archive cannot be read"""
class RegistryError(Exception):
- """Raised when a registery operation with the archiving
- and unpacking registeries fails"""
+ """Raised when a registry operation with the archiving
+ and unpacking registries fails"""
try:
@@ -648,7 +648,7 @@ def register_unpack_format(name, extensions, function, extra_args=None,
_UNPACK_FORMATS[name] = extensions, function, extra_args, description
def unregister_unpack_format(name):
- """Removes the pack format from the registery."""
+ """Removes the pack format from the registry."""
del _UNPACK_FORMATS[name]
def _ensure_directory(path):
diff --git a/src/rez/vendor/distlib/_backport/sysconfig.py b/src/rez/vendor/distlib/_backport/sysconfig.py
index 1d3132679..1df3aba14 100644
--- a/src/rez/vendor/distlib/_backport/sysconfig.py
+++ b/src/rez/vendor/distlib/_backport/sysconfig.py
@@ -221,7 +221,7 @@ def _parse_makefile(filename, vars=None):
"""
# Regexes needed for parsing Makefile (and similar syntaxes,
# like old-style Setup files).
- _variable_rx = re.compile("([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
+ _variable_rx = re.compile(r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
_findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)")
_findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}")
@@ -537,7 +537,7 @@ def get_config_vars(*args):
# patched up as well.
'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
flags = _CONFIG_VARS[key]
- flags = re.sub('-arch\s+\w+\s', ' ', flags)
+ flags = re.sub(r'-arch\s+\w+\s', ' ', flags)
flags = re.sub('-isysroot [^ \t]*', ' ', flags)
_CONFIG_VARS[key] = flags
else:
@@ -554,7 +554,7 @@ def get_config_vars(*args):
'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
flags = _CONFIG_VARS[key]
- flags = re.sub('-arch\s+\w+\s', ' ', flags)
+ flags = re.sub(r'-arch\s+\w+\s', ' ', flags)
flags = flags + ' ' + arch
_CONFIG_VARS[key] = flags
@@ -569,7 +569,7 @@ def get_config_vars(*args):
# when you install Xcode.
#
CFLAGS = _CONFIG_VARS.get('CFLAGS', '')
- m = re.search('-isysroot\s+(\S+)', CFLAGS)
+ m = re.search(r'-isysroot\s+(\S+)', CFLAGS)
if m is not None:
sdk = m.group(1)
if not os.path.exists(sdk):
@@ -579,7 +579,7 @@ def get_config_vars(*args):
'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
flags = _CONFIG_VARS[key]
- flags = re.sub('-isysroot\s+\S+(\s|$)', ' ', flags)
+ flags = re.sub(r'-isysroot\s+\S+(\s|$)', ' ', flags)
_CONFIG_VARS[key] = flags
if args:
@@ -725,7 +725,7 @@ def get_platform():
machine = 'fat'
cflags = get_config_vars().get('CFLAGS')
- archs = re.findall('-arch\s+(\S+)', cflags)
+ archs = re.findall(r'-arch\s+(\S+)', cflags)
archs = tuple(sorted(set(archs)))
if len(archs) == 1:
diff --git a/src/rez/vendor/distlib/_backport/tarfile.py b/src/rez/vendor/distlib/_backport/tarfile.py
index 0580fb795..d66d85663 100644
--- a/src/rez/vendor/distlib/_backport/tarfile.py
+++ b/src/rez/vendor/distlib/_backport/tarfile.py
@@ -331,7 +331,7 @@ class ExtractError(TarError):
"""General exception for extract errors."""
pass
class ReadError(TarError):
- """Exception for unreadble tar archives."""
+ """Exception for unreadable tar archives."""
pass
class CompressionError(TarError):
"""Exception for unavailable compression methods."""
diff --git a/src/rez/vendor/distlib/compat.py b/src/rez/vendor/distlib/compat.py
index 1dae5f374..ff328c8ee 100644
--- a/src/rez/vendor/distlib/compat.py
+++ b/src/rez/vendor/distlib/compat.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright (C) 2013-2016 Vinay Sajip.
+# Copyright (C) 2013-2017 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
@@ -12,7 +12,7 @@
try:
import ssl
-except ImportError:
+except ImportError: # pragma: no cover
ssl = None
if sys.version_info[0] < 3: # pragma: no cover
@@ -110,7 +110,7 @@ def _dnsname_match(dn, hostname, max_wildcards=1):
wildcards = leftmost.count('*')
if wildcards > max_wildcards:
# Issue #17980: avoid denials of service by refusing more
- # than one wildcard per fragment. A survery of established
+ # than one wildcard per fragment. A survey of established
# policy among SSL implementations showed it to be a
# reasonable choice.
raise CertificateError(
@@ -272,7 +272,7 @@ def _access_check(fn, mode):
if hasattr(BaseZipFile, '__enter__'): # pragma: no cover
ZipFile = BaseZipFile
-else:
+else: # pragma: no cover
from zipfile import ZipExtFile as BaseZipExtFile
class ZipExtFile(BaseZipExtFile):
@@ -329,7 +329,13 @@ def callable(obj):
fsencode = os.fsencode
fsdecode = os.fsdecode
except AttributeError: # pragma: no cover
- _fsencoding = sys.getfilesystemencoding()
+ # Issue #99: on some systems (e.g. containerised),
+ # sys.getfilesystemencoding() returns None, and we need a real value,
+ # so fall back to utf-8. From the CPython 2.7 docs relating to Unix and
+ # sys.getfilesystemencoding(): the return value is "the user’s preference
+ # according to the result of nl_langinfo(CODESET), or None if the
+ # nl_langinfo(CODESET) failed."
+ _fsencoding = sys.getfilesystemencoding() or 'utf-8'
if _fsencoding == 'mbcs':
_fserrors = 'strict'
else:
@@ -359,7 +365,7 @@ def fsdecode(filename):
from codecs import BOM_UTF8, lookup
import re
- cookie_re = re.compile("coding[:=]\s*([-\w.]+)")
+ cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)")
def _get_normal_name(orig_enc):
"""Imitates get_normal_name in tokenizer.c."""
@@ -375,7 +381,7 @@ def _get_normal_name(orig_enc):
def detect_encoding(readline):
"""
The detect_encoding() function is used to detect the encoding that should
- be used to decode a Python source file. It requires one argment, readline,
+ be used to decode a Python source file. It requires one argument, readline,
in the same way as the tokenize() generator.
It will call readline a maximum of twice, and return the encoding used
@@ -608,17 +614,20 @@ def clear(self):
self.maps[0].clear()
try:
- from imp import cache_from_source
-except ImportError: # pragma: no cover
- def cache_from_source(path, debug_override=None):
- assert path.endswith('.py')
- if debug_override is None:
- debug_override = __debug__
- if debug_override:
- suffix = 'c'
- else:
- suffix = 'o'
- return path + suffix
+ from importlib.util import cache_from_source # Python >= 3.4
+except ImportError: # pragma: no cover
+ try:
+ from imp import cache_from_source
+ except ImportError: # pragma: no cover
+ def cache_from_source(path, debug_override=None):
+ assert path.endswith('.py')
+ if debug_override is None:
+ debug_override = __debug__
+ if debug_override:
+ suffix = 'c'
+ else:
+ suffix = 'o'
+ return path + suffix
try:
from collections import OrderedDict
diff --git a/src/rez/vendor/distlib/database.py b/src/rez/vendor/distlib/database.py
index c6fb3a166..b13cdac92 100644
--- a/src/rez/vendor/distlib/database.py
+++ b/src/rez/vendor/distlib/database.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright (C) 2012-2016 The Python Software Foundation.
+# Copyright (C) 2012-2017 The Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""PEP 376 implementation."""
@@ -20,7 +20,8 @@
from . import DistlibException, resources
from .compat import StringIO
from .version import get_scheme, UnsupportedVersionError
-from .metadata import Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME
+from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME,
+ LEGACY_METADATA_FILENAME)
from .util import (parse_requirement, cached_property, parse_name_and_version,
read_exports, write_exports, CSVReader, CSVWriter)
@@ -132,7 +133,9 @@ def _yield_distributions(self):
if not r or r.path in seen:
continue
if self._include_dist and entry.endswith(DISTINFO_EXT):
- possible_filenames = [METADATA_FILENAME, WHEEL_METADATA_FILENAME]
+ possible_filenames = [METADATA_FILENAME,
+ WHEEL_METADATA_FILENAME,
+ LEGACY_METADATA_FILENAME]
for metadata_filename in possible_filenames:
metadata_path = posixpath.join(entry, metadata_filename)
pydist = finder.find(metadata_path)
@@ -257,7 +260,7 @@ def provides_distribution(self, name, version=None):
:type version: string
"""
matcher = None
- if not version is None:
+ if version is not None:
try:
matcher = self._scheme.matcher('%s (%s)' % (name, version))
except ValueError:
@@ -265,18 +268,23 @@ def provides_distribution(self, name, version=None):
(name, version))
for dist in self.get_distributions():
- provided = dist.provides
+ # We hit a problem on Travis where enum34 was installed and doesn't
+ # have a provides attribute ...
+ if not hasattr(dist, 'provides'):
+ logger.debug('No "provides": %s', dist)
+ else:
+ provided = dist.provides
- for p in provided:
- p_name, p_ver = parse_name_and_version(p)
- if matcher is None:
- if p_name == name:
- yield dist
- break
- else:
- if p_name == name and matcher.match(p_ver):
- yield dist
- break
+ for p in provided:
+ p_name, p_ver = parse_name_and_version(p)
+ if matcher is None:
+ if p_name == name:
+ yield dist
+ break
+ else:
+ if p_name == name and matcher.match(p_ver):
+ yield dist
+ break
def get_file_path(self, name, relative_path):
"""
@@ -529,9 +537,10 @@ class InstalledDistribution(BaseInstalledDistribution):
hasher = 'sha256'
def __init__(self, path, metadata=None, env=None):
+ self.modules = []
self.finder = finder = resources.finder_for_path(path)
if finder is None:
- import pdb; pdb.set_trace ()
+ raise ValueError('finder unavailable for %s' % path)
if env and env._cache_enabled and path in env._cache.path:
metadata = env._cache.path[path].metadata
elif metadata is None:
@@ -553,11 +562,13 @@ def __init__(self, path, metadata=None, env=None):
if env and env._cache_enabled:
env._cache.add(self)
- try:
- r = finder.find('REQUESTED')
- except AttributeError:
- import pdb; pdb.set_trace ()
+ r = finder.find('REQUESTED')
self.requested = r is not None
+ p = os.path.join(path, 'top_level.txt')
+ if os.path.exists(p):
+ with open(p, 'rb') as f:
+ data = f.read()
+ self.modules = data.splitlines()
def __repr__(self):
return '' % (
@@ -917,11 +928,14 @@ def parse_requires_path(req_path):
pass
return reqs
+ tl_path = tl_data = None
if path.endswith('.egg'):
if os.path.isdir(path):
- meta_path = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
+ p = os.path.join(path, 'EGG-INFO')
+ meta_path = os.path.join(p, 'PKG-INFO')
metadata = Metadata(path=meta_path, scheme='legacy')
- req_path = os.path.join(path, 'EGG-INFO', 'requires.txt')
+ req_path = os.path.join(p, 'requires.txt')
+ tl_path = os.path.join(p, 'top_level.txt')
requires = parse_requires_path(req_path)
else:
# FIXME handle the case where zipfile is not available
@@ -931,6 +945,7 @@ def parse_requires_path(req_path):
metadata = Metadata(fileobj=fileobj, scheme='legacy')
try:
data = zipf.get_data('EGG-INFO/requires.txt')
+ tl_data = zipf.get_data('EGG-INFO/top_level.txt').decode('utf-8')
requires = parse_requires_data(data.decode('utf-8'))
except IOError:
requires = None
@@ -939,6 +954,7 @@ def parse_requires_path(req_path):
req_path = os.path.join(path, 'requires.txt')
requires = parse_requires_path(req_path)
path = os.path.join(path, 'PKG-INFO')
+ tl_path = os.path.join(path, 'top_level.txt')
metadata = Metadata(path=path, scheme='legacy')
else:
raise DistlibException('path must end with .egg-info or .egg, '
@@ -946,6 +962,16 @@ def parse_requires_path(req_path):
if requires:
metadata.add_requirements(requires)
+ # look for top-level modules in top_level.txt, if present
+ if tl_data is None:
+ if tl_path is not None and os.path.exists(tl_path):
+ with open(tl_path, 'rb') as f:
+ tl_data = f.read().decode('utf-8')
+ if not tl_data:
+ tl_data = []
+ else:
+ tl_data = tl_data.splitlines()
+ self.modules = tl_data
return metadata
def __repr__(self):
@@ -974,7 +1000,7 @@ def check_installed_files(self):
mismatches.append((path, 'exists', True, False))
return mismatches
- def list_installed_files(self, allow_fail=False):
+ def list_installed_files(self):
"""
Iterates over the ``installed-files.txt`` entries and returns a tuple
``(path, hash, size)`` for each line.
@@ -1004,9 +1030,9 @@ def _size(path):
# and installation metadata files
if not os.path.exists(p):
logger.warning('Non-existent file: %s', p)
- if allow_fail or p.endswith(('.pyc', '.pyo')):
+ if p.endswith(('.pyc', '.pyo')):
continue
- # #otherwise fall through and fail
+ #otherwise fall through and fail
if not os.path.isdir(p):
result.append((p, _md5(p), _size(p)))
result.append((record_path, None, None))
@@ -1025,20 +1051,21 @@ def list_distinfo_files(self, absolute=False):
:returns: iterator of paths
"""
record_path = os.path.join(self.path, 'installed-files.txt')
- skip = True
- with codecs.open(record_path, 'r', encoding='utf-8') as f:
- for line in f:
- line = line.strip()
- if line == './':
- skip = False
- continue
- if not skip:
- p = os.path.normpath(os.path.join(self.path, line))
- if p.startswith(self.path):
- if absolute:
- yield p
- else:
- yield line
+ if os.path.exists(record_path):
+ skip = True
+ with codecs.open(record_path, 'r', encoding='utf-8') as f:
+ for line in f:
+ line = line.strip()
+ if line == './':
+ skip = False
+ continue
+ if not skip:
+ p = os.path.normpath(os.path.join(self.path, line))
+ if p.startswith(self.path):
+ if absolute:
+ yield p
+ else:
+ yield line
def __eq__(self, other):
return (isinstance(other, EggInfoDistribution) and
@@ -1308,5 +1335,5 @@ def make_dist(name, version, **kwargs):
md = Metadata(**kwargs)
md.name = name
md.version = version
- md.summary = summary or 'Plaeholder for summary'
+ md.summary = summary or 'Placeholder for summary'
return Distribution(md)
diff --git a/src/rez/vendor/distlib/index.py b/src/rez/vendor/distlib/index.py
index 73037c97b..2406be216 100644
--- a/src/rez/vendor/distlib/index.py
+++ b/src/rez/vendor/distlib/index.py
@@ -49,9 +49,10 @@ def __init__(self, url=None):
self.ssl_verifier = None
self.gpg = None
self.gpg_home = None
- self.rpc_proxy = None
with open(os.devnull, 'w') as sink:
- for s in ('gpg2', 'gpg'):
+ # Use gpg by default rather than gpg2, as gpg2 insists on
+ # prompting for passwords
+ for s in ('gpg', 'gpg2'):
try:
rc = subprocess.check_call([s, '--version'], stdout=sink,
stderr=sink)
@@ -74,7 +75,7 @@ def _get_pypirc_command(self):
def read_configuration(self):
"""
Read the PyPI access configuration as supported by distutils, getting
- PyPI to do the acutal work. This populates ``username``, ``password``,
+ PyPI to do the actual work. This populates ``username``, ``password``,
``realm`` and ``url`` attributes from the configuration.
"""
# get distutils to do the work
@@ -276,7 +277,7 @@ def upload_file(self, metadata, filename, signer=None, sign_password=None,
sha256_digest = hashlib.sha256(file_data).hexdigest()
d.update({
':action': 'file_upload',
- 'protcol_version': '1',
+ 'protocol_version': '1',
'filetype': filetype,
'pyversion': pyversion,
'md5_digest': md5_digest,
@@ -508,6 +509,8 @@ def encode_request(self, fields, files):
def search(self, terms, operator=None):
if isinstance(terms, string_types):
terms = {'name': terms}
- if self.rpc_proxy is None:
- self.rpc_proxy = ServerProxy(self.url, timeout=3.0)
- return self.rpc_proxy.search(terms, operator or 'and')
+ rpc_proxy = ServerProxy(self.url, timeout=3.0)
+ try:
+ return rpc_proxy.search(terms, operator or 'and')
+ finally:
+ rpc_proxy('close')()
diff --git a/src/rez/vendor/distlib/locators.py b/src/rez/vendor/distlib/locators.py
index d66256258..5c655c3e5 100644
--- a/src/rez/vendor/distlib/locators.py
+++ b/src/rez/vendor/distlib/locators.py
@@ -21,10 +21,10 @@
from . import DistlibException
from .compat import (urljoin, urlparse, urlunparse, url2pathname, pathname2url,
queue, quote, unescape, string_types, build_opener,
- HTTPRedirectHandler as BaseRedirectHandler,
+ HTTPRedirectHandler as BaseRedirectHandler, text_type,
Request, HTTPError, URLError)
from .database import Distribution, DistributionPath, make_dist
-from .metadata import Metadata
+from .metadata import Metadata, MetadataInvalidError
from .util import (cached_property, parse_credentials, ensure_slash,
split_filename, get_project_data, parse_requirement,
parse_name_and_version, ServerProxy, normalize_name)
@@ -33,7 +33,7 @@
logger = logging.getLogger(__name__)
-HASHER_HASH = re.compile('^(\w+)=([a-f0-9]+)')
+HASHER_HASH = re.compile(r'^(\w+)=([a-f0-9]+)')
CHARSET = re.compile(r';\s*charset\s*=\s*(.*)\s*$', re.I)
HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml')
DEFAULT_INDEX = 'https://pypi.python.org/pypi'
@@ -47,7 +47,10 @@ def get_all_distribution_names(url=None):
if url is None:
url = DEFAULT_INDEX
client = ServerProxy(url, timeout=3.0)
- return client.list_packages()
+ try:
+ return client.list_packages()
+ finally:
+ client('close')()
class RedirectHandler(BaseRedirectHandler):
"""
@@ -66,7 +69,7 @@ def http_error_302(self, req, fp, code, msg, headers):
if key in headers:
newurl = headers[key]
break
- if newurl is None:
+ if newurl is None: # pragma: no cover
return
urlparts = urlparse(newurl)
if urlparts.scheme == '':
@@ -113,6 +116,28 @@ def __init__(self, scheme='default'):
# is set from the requirement passed to locate(). See issue #18 for
# why this can be useful to know.
self.matcher = None
+ self.errors = queue.Queue()
+
+ def get_errors(self):
+ """
+ Return any errors which have occurred.
+ """
+ result = []
+ while not self.errors.empty(): # pragma: no cover
+ try:
+ e = self.errors.get(False)
+ result.append(e)
+ except self.errors.Empty:
+ continue
+ self.errors.task_done()
+ return result
+
+ def clear_errors(self):
+ """
+ Clear any errors which may have been logged.
+ """
+ # Just get the errors and throw them away
+ self.get_errors()
def clear_cache(self):
self._cache.clear()
@@ -150,11 +175,12 @@ def get_project(self, name):
This calls _get_project to do all the work, and just implements a caching layer on top.
"""
- if self._cache is None:
+ if self._cache is None: # pragma: no cover
result = self._get_project(name)
elif name in self._cache:
result = self._cache[name]
else:
+ self.clear_errors()
result = self._get_project(name)
self._cache[name] = result
return result
@@ -168,10 +194,11 @@ def score_url(self, url):
basename = posixpath.basename(t.path)
compatible = True
is_wheel = basename.endswith('.whl')
+ is_downloadable = basename.endswith(self.downloadable_extensions)
if is_wheel:
compatible = is_compatible(Wheel(basename), self.wheel_tags)
- return (t.scheme != 'https', 'pypi.python.org' in t.netloc,
- is_wheel, compatible, basename)
+ return (t.scheme == 'https', 'pypi.python.org' in t.netloc,
+ is_downloadable, is_wheel, compatible, basename)
def prefer_url(self, url1, url2):
"""
@@ -214,7 +241,7 @@ def same_project(name1, name2):
result = None
scheme, netloc, path, params, query, frag = urlparse(url)
- if frag.lower().startswith('egg='):
+ if frag.lower().startswith('egg='): # pragma: no cover
logger.debug('%s: version hint in fragment: %r',
project_name, frag)
m = HASHER_HASH.match(frag)
@@ -223,12 +250,14 @@ def same_project(name1, name2):
else:
algo, digest = None, None
origpath = path
- if path and path[-1] == '/':
+ if path and path[-1] == '/': # pragma: no cover
path = path[:-1]
if path.endswith('.whl'):
try:
wheel = Wheel(path)
- if is_compatible(wheel, self.wheel_tags):
+ if not is_compatible(wheel, self.wheel_tags):
+ logger.debug('Wheel not compatible: %s', path)
+ else:
if project_name is None:
include = True
else:
@@ -243,15 +272,17 @@ def same_project(name1, name2):
'python-version': ', '.join(
['.'.join(list(v[2:])) for v in wheel.pyver]),
}
- except Exception as e:
+ except Exception as e: # pragma: no cover
logger.warning('invalid path for wheel: %s', path)
- elif path.endswith(self.downloadable_extensions):
+ elif not path.endswith(self.downloadable_extensions): # pragma: no cover
+ logger.debug('Not downloadable: %s', path)
+ else: # downloadable extension
path = filename = posixpath.basename(path)
for ext in self.downloadable_extensions:
if path.endswith(ext):
path = path[:-len(ext)]
t = self.split_filename(path, project_name)
- if not t:
+ if not t: # pragma: no cover
logger.debug('No match for project/version: %s', path)
else:
name, version, pyver = t
@@ -264,7 +295,7 @@ def same_project(name1, name2):
params, query, '')),
#'packagetype': 'sdist',
}
- if pyver:
+ if pyver: # pragma: no cover
result['python-version'] = pyver
break
if result and algo:
@@ -325,7 +356,7 @@ def locate(self, requirement, prereleases=False):
"""
result = None
r = parse_requirement(requirement)
- if r is None:
+ if r is None: # pragma: no cover
raise DistlibException('Not a valid requirement: %r' % requirement)
scheme = get_scheme(self.scheme)
self.matcher = matcher = scheme.matcher(r.requirement)
@@ -363,7 +394,7 @@ def locate(self, requirement, prereleases=False):
d = {}
sd = versions.get('digests', {})
for url in result.download_urls:
- if url in sd:
+ if url in sd: # pragma: no cover
d[url] = sd[url]
result.digests = d
self.matcher = None
@@ -482,6 +513,7 @@ def _get_project(self, name):
# result['urls'].setdefault(md.version, set()).add(url)
# result['digests'][url] = self._get_digest(info)
except Exception as e:
+ self.errors.put(text_type(e))
logger.exception('JSON fetch failed: %s', e)
return result
@@ -496,9 +528,9 @@ class Page(object):
# declared with double quotes, single quotes or no quotes - which leads to
# the length of the expression.
_href = re.compile("""
-(rel\s*=\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\s\n]*))\s+)?
-href\s*=\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\s\n]*))
-(\s+rel\s*=\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\s\n]*)))?
+(rel\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*))\\s+)?
+href\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*))
+(\\s+rel\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*)))?
""", re.I | re.S | re.X)
_base = re.compile(r"""]+)""", re.I | re.S)
@@ -583,6 +615,7 @@ def __init__(self, url, timeout=None, num_workers=10, **kwargs):
# as it is for coordinating our internal threads - the ones created
# in _prepare_threads.
self._gplock = threading.RLock()
+ self.platform_check = False # See issue #112
def _prepare_threads(self):
"""
@@ -628,8 +661,8 @@ def _get_project(self, name):
del self.result
return result
- platform_dependent = re.compile(r'\b(linux-(i\d86|x86_64|arm\w+)|'
- r'win(32|-amd64)|macosx-?\d+)\b', re.I)
+ platform_dependent = re.compile(r'\b(linux_(i\d86|x86_64|arm\w+)|'
+ r'win(32|_amd64)|macosx_?\d+)\b', re.I)
def _is_platform_dependent(self, url):
"""
@@ -647,7 +680,7 @@ def _process_download(self, url):
Note that the return value isn't actually used other than as a boolean
value.
"""
- if self._is_platform_dependent(url):
+ if self.platform_check and self._is_platform_dependent(url):
info = None
else:
info = self.convert_url_to_download_info(url, self.project_name)
@@ -702,11 +735,16 @@ def _fetch(self):
continue
for link, rel in page.links:
if link not in self._seen:
- self._seen.add(link)
- if (not self._process_download(link) and
- self._should_queue(link, url, rel)):
- logger.debug('Queueing %s from %s', link, url)
- self._to_fetch.put(link)
+ try:
+ self._seen.add(link)
+ if (not self._process_download(link) and
+ self._should_queue(link, url, rel)):
+ logger.debug('Queueing %s from %s', link, url)
+ self._to_fetch.put(link)
+ except MetadataInvalidError: # e.g. invalid versions
+ pass
+ except Exception as e: # pragma: no cover
+ self.errors.put(text_type(e))
finally:
# always do this, to avoid hangs :-)
self._to_fetch.task_done()
@@ -1210,7 +1248,7 @@ def find(self, requirement, meta_extras=None, prereleases=False):
ireqts = dist.run_requires | dist.meta_requires
sreqts = dist.build_requires
ereqts = set()
- if dist in install_dists:
+ if meta_extras and dist in install_dists:
for key in ('test', 'build', 'dev'):
e = ':%s:' % key
if e in meta_extras:
diff --git a/src/rez/vendor/distlib/manifest.py b/src/rez/vendor/distlib/manifest.py
index 21cff45e3..ca0fe442d 100644
--- a/src/rez/vendor/distlib/manifest.py
+++ b/src/rez/vendor/distlib/manifest.py
@@ -12,6 +12,7 @@
import logging
import os
import re
+import sys
from . import DistlibException
from .compat import fsdecode
@@ -23,9 +24,15 @@
logger = logging.getLogger(__name__)
# a \ followed by some spaces + EOL
-_COLLAPSE_PATTERN = re.compile('\\\w*\n', re.M)
+_COLLAPSE_PATTERN = re.compile('\\\\w*\n', re.M)
_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S)
+#
+# Due to the different results returned by fnmatch.translate, we need
+# to do slightly different processing for Python 2.7 and 3.2 ... this needed
+# to be brought in for Python 3.6 onwards.
+#
+_PYTHON_VERSION = sys.version_info[:2]
class Manifest(object):
"""A list of files built by on exploring the filesystem and filtered by
@@ -322,24 +329,43 @@ def _translate_pattern(self, pattern, anchor=True, prefix=None,
else:
return pattern
+ if _PYTHON_VERSION > (3, 2):
+ # ditch start and end characters
+ start, _, end = self._glob_to_re('_').partition('_')
+
if pattern:
pattern_re = self._glob_to_re(pattern)
+ if _PYTHON_VERSION > (3, 2):
+ assert pattern_re.startswith(start) and pattern_re.endswith(end)
else:
pattern_re = ''
base = re.escape(os.path.join(self.base, ''))
if prefix is not None:
# ditch end of pattern character
- empty_pattern = self._glob_to_re('')
- prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)]
+ if _PYTHON_VERSION <= (3, 2):
+ empty_pattern = self._glob_to_re('')
+ prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)]
+ else:
+ prefix_re = self._glob_to_re(prefix)
+ assert prefix_re.startswith(start) and prefix_re.endswith(end)
+ prefix_re = prefix_re[len(start): len(prefix_re) - len(end)]
sep = os.sep
if os.sep == '\\':
sep = r'\\'
- pattern_re = '^' + base + sep.join((prefix_re,
- '.*' + pattern_re))
- else: # no prefix -- respect anchor flag
+ if _PYTHON_VERSION <= (3, 2):
+ pattern_re = '^' + base + sep.join((prefix_re,
+ '.*' + pattern_re))
+ else:
+ pattern_re = pattern_re[len(start): len(pattern_re) - len(end)]
+ pattern_re = r'%s%s%s%s.*%s%s' % (start, base, prefix_re, sep,
+ pattern_re, end)
+ else: # no prefix -- respect anchor flag
if anchor:
- pattern_re = '^' + base + pattern_re
+ if _PYTHON_VERSION <= (3, 2):
+ pattern_re = '^' + base + pattern_re
+ else:
+ pattern_re = r'%s%s%s' % (start, base, pattern_re[len(start):])
return re.compile(pattern_re)
diff --git a/src/rez/vendor/distlib/markers.py b/src/rez/vendor/distlib/markers.py
index afb19c62e..ee1f3e236 100644
--- a/src/rez/vendor/distlib/markers.py
+++ b/src/rez/vendor/distlib/markers.py
@@ -1,182 +1,114 @@
# -*- coding: utf-8 -*-
#
-# Copyright (C) 2012-2013 Vinay Sajip.
+# Copyright (C) 2012-2017 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
-"""Parser for the environment markers micro-language defined in PEP 345."""
+"""
+Parser for the environment markers micro-language defined in PEP 508.
+"""
+
+# Note: In PEP 345, the micro-language was Python compatible, so the ast
+# module could be used to parse it. However, PEP 508 introduced operators such
+# as ~= and === which aren't in Python, necessitating a different approach.
-import ast
import os
import sys
import platform
+import re
-from .compat import python_implementation, string_types
-from .util import in_venv
+from .compat import python_implementation, urlparse, string_types
+from .util import in_venv, parse_marker
__all__ = ['interpret']
+def _is_literal(o):
+ if not isinstance(o, string_types) or not o:
+ return False
+ return o[0] in '\'"'
class Evaluator(object):
"""
- A limited evaluator for Python expressions.
+ This class is used to evaluate marker expessions.
"""
- operators = {
- 'eq': lambda x, y: x == y,
- 'gt': lambda x, y: x > y,
- 'gte': lambda x, y: x >= y,
+ operations = {
+ '==': lambda x, y: x == y,
+ '===': lambda x, y: x == y,
+ '~=': lambda x, y: x == y or x > y,
+ '!=': lambda x, y: x != y,
+ '<': lambda x, y: x < y,
+ '<=': lambda x, y: x == y or x < y,
+ '>': lambda x, y: x > y,
+ '>=': lambda x, y: x == y or x > y,
+ 'and': lambda x, y: x and y,
+ 'or': lambda x, y: x or y,
'in': lambda x, y: x in y,
- 'lt': lambda x, y: x < y,
- 'lte': lambda x, y: x <= y,
- 'not': lambda x: not x,
- 'noteq': lambda x, y: x != y,
- 'notin': lambda x, y: x not in y,
- }
-
- allowed_values = {
- 'sys_platform': sys.platform,
- 'python_version': '%s.%s' % sys.version_info[:2],
- # parsing sys.platform is not reliable, but there is no other
- # way to get e.g. 2.7.2+, and the PEP is defined with sys.version
- 'python_full_version': sys.version.split(' ', 1)[0],
- 'os_name': os.name,
- 'platform_in_venv': str(in_venv()),
- 'platform_release': platform.release(),
- 'platform_version': platform.version(),
- 'platform_machine': platform.machine(),
- 'platform_python_implementation': python_implementation(),
+ 'not in': lambda x, y: x not in y,
}
- def __init__(self, context=None):
- """
- Initialise an instance.
-
- :param context: If specified, names are looked up in this mapping.
- """
- self.context = context or {}
- self.source = None
-
- def get_fragment(self, offset):
- """
- Get the part of the source which is causing a problem.
- """
- fragment_len = 10
- s = '%r' % (self.source[offset:offset + fragment_len])
- if offset + fragment_len < len(self.source):
- s += '...'
- return s
-
- def get_handler(self, node_type):
+ def evaluate(self, expr, context):
"""
- Get a handler for the specified AST node type.
+ Evaluate a marker expression returned by the :func:`parse_requirement`
+ function in the specified context.
"""
- return getattr(self, 'do_%s' % node_type, None)
-
- def evaluate(self, node, filename=None):
- """
- Evaluate a source string or node, using ``filename`` when
- displaying errors.
- """
- if isinstance(node, string_types):
- self.source = node
- kwargs = {'mode': 'eval'}
- if filename:
- kwargs['filename'] = filename
- try:
- node = ast.parse(node, **kwargs)
- except SyntaxError as e:
- s = self.get_fragment(e.offset)
- raise SyntaxError('syntax error %s' % s)
- node_type = node.__class__.__name__.lower()
- handler = self.get_handler(node_type)
- if handler is None:
- if self.source is None:
- s = '(source not available)'
+ if isinstance(expr, string_types):
+ if expr[0] in '\'"':
+ result = expr[1:-1]
else:
- s = self.get_fragment(node.col_offset)
- raise SyntaxError("don't know how to evaluate %r %s" % (
- node_type, s))
- return handler(node)
-
- def get_attr_key(self, node):
- assert isinstance(node, ast.Attribute), 'attribute node expected'
- return '%s.%s' % (node.value.id, node.attr)
-
- def do_attribute(self, node):
- if not isinstance(node.value, ast.Name):
- valid = False
- else:
- key = self.get_attr_key(node)
- valid = key in self.context or key in self.allowed_values
- if not valid:
- raise SyntaxError('invalid expression: %s' % key)
- if key in self.context:
- result = self.context[key]
+ if expr not in context:
+ raise SyntaxError('unknown variable: %s' % expr)
+ result = context[expr]
else:
- result = self.allowed_values[key]
- return result
-
- def do_boolop(self, node):
- result = self.evaluate(node.values[0])
- is_or = node.op.__class__ is ast.Or
- is_and = node.op.__class__ is ast.And
- assert is_or or is_and
- if (is_and and result) or (is_or and not result):
- for n in node.values[1:]:
- result = self.evaluate(n)
- if (is_or and result) or (is_and and not result):
- break
- return result
-
- def do_compare(self, node):
- def sanity_check(lhsnode, rhsnode):
- valid = True
- if isinstance(lhsnode, ast.Str) and isinstance(rhsnode, ast.Str):
- valid = False
- #elif (isinstance(lhsnode, ast.Attribute)
- # and isinstance(rhsnode, ast.Attribute)):
- # klhs = self.get_attr_key(lhsnode)
- # krhs = self.get_attr_key(rhsnode)
- # valid = klhs != krhs
- if not valid:
- s = self.get_fragment(node.col_offset)
- raise SyntaxError('Invalid comparison: %s' % s)
-
- lhsnode = node.left
- lhs = self.evaluate(lhsnode)
- result = True
- for op, rhsnode in zip(node.ops, node.comparators):
- sanity_check(lhsnode, rhsnode)
- op = op.__class__.__name__.lower()
- if op not in self.operators:
- raise SyntaxError('unsupported operation: %r' % op)
- rhs = self.evaluate(rhsnode)
- result = self.operators[op](lhs, rhs)
- if not result:
- break
- lhs = rhs
- lhsnode = rhsnode
+ assert isinstance(expr, dict)
+ op = expr['op']
+ if op not in self.operations:
+ raise NotImplementedError('op not implemented: %s' % op)
+ elhs = expr['lhs']
+ erhs = expr['rhs']
+ if _is_literal(expr['lhs']) and _is_literal(expr['rhs']):
+ raise SyntaxError('invalid comparison: %s %s %s' % (elhs, op, erhs))
+
+ lhs = self.evaluate(elhs, context)
+ rhs = self.evaluate(erhs, context)
+ result = self.operations[op](lhs, rhs)
return result
- def do_expression(self, node):
- return self.evaluate(node.body)
-
- def do_name(self, node):
- valid = False
- if node.id in self.context:
- valid = True
- result = self.context[node.id]
- elif node.id in self.allowed_values:
- valid = True
- result = self.allowed_values[node.id]
- if not valid:
- raise SyntaxError('invalid expression: %s' % node.id)
- return result
+def default_context():
+ def format_full_version(info):
+ version = '%s.%s.%s' % (info.major, info.minor, info.micro)
+ kind = info.releaselevel
+ if kind != 'final':
+ version += kind[0] + str(info.serial)
+ return version
+
+ if hasattr(sys, 'implementation'):
+ implementation_version = format_full_version(sys.implementation.version)
+ implementation_name = sys.implementation.name
+ else:
+ implementation_version = '0'
+ implementation_name = ''
+
+ result = {
+ 'implementation_name': implementation_name,
+ 'implementation_version': implementation_version,
+ 'os_name': os.name,
+ 'platform_machine': platform.machine(),
+ 'platform_python_implementation': platform.python_implementation(),
+ 'platform_release': platform.release(),
+ 'platform_system': platform.system(),
+ 'platform_version': platform.version(),
+ 'platform_in_venv': str(in_venv()),
+ 'python_full_version': platform.python_version(),
+ 'python_version': platform.python_version()[:3],
+ 'sys_platform': sys.platform,
+ }
+ return result
- def do_str(self, node):
- return node.s
+DEFAULT_CONTEXT = default_context()
+del default_context
+evaluator = Evaluator()
def interpret(marker, execution_context=None):
"""
@@ -187,4 +119,13 @@ def interpret(marker, execution_context=None):
:param execution_context: The context used for name lookup.
:type execution_context: mapping
"""
- return Evaluator(execution_context).evaluate(marker.strip())
+ try:
+ expr, rest = parse_marker(marker)
+ except Exception as e:
+ raise SyntaxError('Unable to interpret marker syntax: %s: %s' % (marker, e))
+ if rest and rest[0] != '#':
+ raise SyntaxError('unexpected trailing data in marker: %s: %s' % (marker, rest))
+ context = dict(DEFAULT_CONTEXT)
+ if execution_context:
+ context.update(execution_context)
+ return evaluator.evaluate(expr, context)
diff --git a/src/rez/vendor/distlib/metadata.py b/src/rez/vendor/distlib/metadata.py
index 71525dd9e..77eed7f96 100644
--- a/src/rez/vendor/distlib/metadata.py
+++ b/src/rez/vendor/distlib/metadata.py
@@ -50,7 +50,7 @@ class MetadataInvalidError(DistlibException):
# to 1.2 once PEP 345 is supported everywhere
PKG_INFO_PREFERRED_VERSION = '1.1'
-_LINE_PREFIX_1_2 = re.compile('\n \|')
+_LINE_PREFIX_1_2 = re.compile('\n \\|')
_LINE_PREFIX_PRE_1_2 = re.compile('\n ')
_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
'Summary', 'Description',
@@ -91,11 +91,18 @@ class MetadataInvalidError(DistlibException):
_426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By',
'Setup-Requires-Dist', 'Extension')
+# See issue #106: Sometimes 'Requires' occurs wrongly in the metadata. Include
+# it in the tuple literal below to allow it (for now)
+_566_FIELDS = _426_FIELDS + ('Description-Content-Type', 'Requires')
+
+_566_MARKERS = ('Description-Content-Type',)
+
_ALL_FIELDS = set()
_ALL_FIELDS.update(_241_FIELDS)
_ALL_FIELDS.update(_314_FIELDS)
_ALL_FIELDS.update(_345_FIELDS)
_ALL_FIELDS.update(_426_FIELDS)
+_ALL_FIELDS.update(_566_FIELDS)
EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''')
@@ -107,6 +114,8 @@ def _version2fieldlist(version):
return _314_FIELDS
elif version == '1.2':
return _345_FIELDS
+ elif version in ('1.3', '2.1'):
+ return _345_FIELDS + _566_FIELDS
elif version == '2.0':
return _426_FIELDS
raise MetadataUnrecognizedVersionError(version)
@@ -126,38 +135,51 @@ def _has_marker(keys, markers):
continue
keys.append(key)
- possible_versions = ['1.0', '1.1', '1.2', '2.0']
+ possible_versions = ['1.0', '1.1', '1.2', '1.3', '2.0', '2.1']
# first let's try to see if a field is not part of one of the version
for key in keys:
if key not in _241_FIELDS and '1.0' in possible_versions:
possible_versions.remove('1.0')
+ logger.debug('Removed 1.0 due to %s', key)
if key not in _314_FIELDS and '1.1' in possible_versions:
possible_versions.remove('1.1')
+ logger.debug('Removed 1.1 due to %s', key)
if key not in _345_FIELDS and '1.2' in possible_versions:
possible_versions.remove('1.2')
+ logger.debug('Removed 1.2 due to %s', key)
+ if key not in _566_FIELDS and '1.3' in possible_versions:
+ possible_versions.remove('1.3')
+ logger.debug('Removed 1.3 due to %s', key)
+ if key not in _566_FIELDS and '2.1' in possible_versions:
+ if key != 'Description': # In 2.1, description allowed after headers
+ possible_versions.remove('2.1')
+ logger.debug('Removed 2.1 due to %s', key)
if key not in _426_FIELDS and '2.0' in possible_versions:
possible_versions.remove('2.0')
+ logger.debug('Removed 2.0 due to %s', key)
# possible_version contains qualified versions
if len(possible_versions) == 1:
return possible_versions[0] # found !
elif len(possible_versions) == 0:
+ logger.debug('Out of options - unknown metadata set: %s', fields)
raise MetadataConflictError('Unknown metadata set')
# let's see if one unique marker is found
is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS)
is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS)
+ is_2_1 = '2.1' in possible_versions and _has_marker(keys, _566_MARKERS)
is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS)
- if int(is_1_1) + int(is_1_2) + int(is_2_0) > 1:
- raise MetadataConflictError('You used incompatible 1.1/1.2/2.0 fields')
+ if int(is_1_1) + int(is_1_2) + int(is_2_1) + int(is_2_0) > 1:
+ raise MetadataConflictError('You used incompatible 1.1/1.2/2.0/2.1 fields')
# we have the choice, 1.0, or 1.2, or 2.0
# - 1.0 has a broken Summary field but works with all tools
# - 1.1 is to avoid
# - 1.2 fixes Summary but has little adoption
# - 2.0 adds more features and is very new
- if not is_1_1 and not is_1_2 and not is_2_0:
+ if not is_1_1 and not is_1_2 and not is_2_1 and not is_2_0:
# we couldn't find any specific marker
if PKG_INFO_PREFERRED_VERSION in possible_versions:
return PKG_INFO_PREFERRED_VERSION
@@ -165,6 +187,8 @@ def _has_marker(keys, markers):
return '1.1'
if is_1_2:
return '1.2'
+ if is_2_1:
+ return '2.1'
return '2.0'
@@ -355,7 +379,8 @@ def read_file(self, fileob):
value = msg[field]
if value is not None and value != 'UNKNOWN':
self.set(field, value)
- self.set_metadata_version()
+ # logger.debug('Attempting to set metadata for %s', self)
+ # self.set_metadata_version()
def write(self, filepath, skip_unknown=False):
"""Write the metadata fields to filepath."""
@@ -444,16 +469,16 @@ def set(self, name, value):
# check that the values are valid
if not scheme.is_valid_matcher(v.split(';')[0]):
logger.warning(
- '%r: %r is not valid (field %r)',
+ "'%s': '%s' is not valid (field '%s')",
project_name, v, name)
# FIXME this rejects UNKNOWN, is that right?
elif name in _VERSIONS_FIELDS and value is not None:
if not scheme.is_valid_constraint_list(value):
- logger.warning('%r: %r is not a valid version (field %r)',
+ logger.warning("'%s': '%s' is not a valid version (field '%s')",
project_name, value, name)
elif name in _VERSION_FIELDS and value is not None:
if not scheme.is_valid_version(value):
- logger.warning('%r: %r is not a valid version (field %r)',
+ logger.warning("'%s': '%s' is not a valid version (field '%s')",
project_name, value, name)
if name in _UNICODEFIELDS:
@@ -531,7 +556,7 @@ def are_valid_constraints(value):
for field in fields:
value = self.get(field, None)
if value is not None and not controller(value):
- warnings.append('Wrong value for %r: %s' % (field, value))
+ warnings.append("Wrong value for '%s': %s" % (field, value))
return missing, warnings
@@ -625,6 +650,7 @@ def __repr__(self):
METADATA_FILENAME = 'pydist.json'
WHEEL_METADATA_FILENAME = 'metadata.json'
+LEGACY_METADATA_FILENAME = 'METADATA'
class Metadata(object):
@@ -634,7 +660,7 @@ class Metadata(object):
instance which handles the key-value metadata format.
"""
- METADATA_VERSION_MATCHER = re.compile('^\d+(\.\d+)*$')
+ METADATA_VERSION_MATCHER = re.compile(r'^\d+(\.\d+)*$')
NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I)
@@ -766,6 +792,8 @@ def __getattribute__(self, key):
result = d.get(key, value)
else:
d = d.get('python.exports')
+ if not d:
+ d = self._data.get('python.exports')
if d:
result = d.get(key, value)
if result is sentinel:
@@ -784,8 +812,8 @@ def _validate_value(self, key, value, scheme=None):
if (scheme or self.scheme) not in exclusions:
m = pattern.match(value)
if not m:
- raise MetadataInvalidError('%r is an invalid value for '
- 'the %r property' % (value,
+ raise MetadataInvalidError("'%s' is an invalid value for "
+ "the '%s' property" % (value,
key))
def __setattr__(self, key, value):
diff --git a/src/rez/vendor/distlib/resources.py b/src/rez/vendor/distlib/resources.py
index 9dd8ca016..18840167a 100644
--- a/src/rez/vendor/distlib/resources.py
+++ b/src/rez/vendor/distlib/resources.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright (C) 2013-2016 Vinay Sajip.
+# Copyright (C) 2013-2017 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
@@ -289,9 +289,14 @@ def _is_directory(self, path):
}
try:
- import _frozen_importlib
- _finder_registry[_frozen_importlib.SourceFileLoader] = ResourceFinder
- _finder_registry[_frozen_importlib.FileFinder] = ResourceFinder
+ # In Python 3.6, _frozen_importlib -> _frozen_importlib_external
+ try:
+ import _frozen_importlib_external as _fi
+ except ImportError:
+ import _frozen_importlib as _fi
+ _finder_registry[_fi.SourceFileLoader] = ResourceFinder
+ _finder_registry[_fi.FileFinder] = ResourceFinder
+ del _fi
except (ImportError, AttributeError):
pass
diff --git a/src/rez/vendor/distlib/scripts.py b/src/rez/vendor/distlib/scripts.py
index c9996d598..8e22cb916 100644
--- a/src/rez/vendor/distlib/scripts.py
+++ b/src/rez/vendor/distlib/scripts.py
@@ -38,7 +38,7 @@
# check if Python is called on the first line with this expression
FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$')
-SCRIPT_TEMPLATE = '''# -*- coding: utf-8 -*-
+SCRIPT_TEMPLATE = r'''# -*- coding: utf-8 -*-
if __name__ == '__main__':
import sys, re
@@ -52,12 +52,12 @@ def _resolve(module, func):
return result
try:
- sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+ sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
func = _resolve('%(module)s', '%(func)s')
rc = func() # None interpreted as 0
except Exception as e: # only supporting Python >= 2.6
- sys.stderr.write('%%s\\n' %% e)
+ sys.stderr.write('%%s\n' %% e)
rc = 1
sys.exit(rc)
'''
@@ -136,6 +136,37 @@ def _fix_jython_executable(self, executable):
return executable
return '/usr/bin/env %s' % executable
+ def _build_shebang(self, executable, post_interp):
+ """
+ Build a shebang line. In the simple case (on Windows, or a shebang line
+ which is not too long or contains spaces) use a simple formulation for
+ the shebang. Otherwise, use /bin/sh as the executable, with a contrived
+ shebang which allows the script to run either under Python or sh, using
+ suitable quoting. Thanks to Harald Nordgren for his input.
+
+ See also: http://www.in-ulm.de/~mascheck/various/shebang/#length
+ https://hg.mozilla.org/mozilla-central/file/tip/mach
+ """
+ if os.name != 'posix':
+ simple_shebang = True
+ else:
+ # Add 3 for '#!' prefix and newline suffix.
+ shebang_length = len(executable) + len(post_interp) + 3
+ if sys.platform == 'darwin':
+ max_shebang_length = 512
+ else:
+ max_shebang_length = 127
+ simple_shebang = ((b' ' not in executable) and
+ (shebang_length <= max_shebang_length))
+
+ if simple_shebang:
+ result = b'#!' + executable + post_interp + b'\n'
+ else:
+ result = b'#!/bin/sh\n'
+ result += b"'''exec' " + executable + post_interp + b' "$0" "$@"\n'
+ result += b"' '''"
+ return result
+
def _get_shebang(self, encoding, post_interp=b'', options=None):
enquote = True
if self.executable:
@@ -169,7 +200,7 @@ def _get_shebang(self, encoding, post_interp=b'', options=None):
if (sys.platform == 'cli' and '-X:Frames' not in post_interp
and '-X:FullFrames' not in post_interp): # pragma: no cover
post_interp += b' -X:Frames'
- shebang = b'#!' + executable + post_interp + b'\n'
+ shebang = self._build_shebang(executable, post_interp)
# Python parser starts to read a script using UTF-8 until
# it gets a #coding:xxx cookie. The shebang has to be the
# first line of a file, the #coding:xxx cookie cannot be
@@ -205,8 +236,10 @@ def get_manifest(self, exename):
def _write_script(self, names, shebang, script_bytes, filenames, ext):
use_launcher = self.add_launchers and self._is_nt
linesep = os.linesep.encode('utf-8')
+ if not shebang.endswith(linesep):
+ shebang += linesep
if not use_launcher:
- script_bytes = shebang + linesep + script_bytes
+ script_bytes = shebang + script_bytes
else: # pragma: no cover
if ext == 'py':
launcher = self._get_launcher('t')
@@ -216,7 +249,7 @@ def _write_script(self, names, shebang, script_bytes, filenames, ext):
with ZipFile(stream, 'w') as zf:
zf.writestr('__main__.py', script_bytes)
zip_data = stream.getvalue()
- script_bytes = launcher + shebang + linesep + zip_data
+ script_bytes = launcher + shebang + zip_data
for name in names:
outname = os.path.join(self.target_dir, name)
if use_launcher: # pragma: no cover
diff --git a/src/rez/vendor/distlib/t32.exe b/src/rez/vendor/distlib/t32.exe
index 836211d84..a09d92687 100644
Binary files a/src/rez/vendor/distlib/t32.exe and b/src/rez/vendor/distlib/t32.exe differ
diff --git a/src/rez/vendor/distlib/t64.exe b/src/rez/vendor/distlib/t64.exe
index a401b59d6..9da9b40de 100644
Binary files a/src/rez/vendor/distlib/t64.exe and b/src/rez/vendor/distlib/t64.exe differ
diff --git a/src/rez/vendor/distlib/util.py b/src/rez/vendor/distlib/util.py
index ba2273b7d..9d4bfd3be 100644
--- a/src/rez/vendor/distlib/util.py
+++ b/src/rez/vendor/distlib/util.py
@@ -1,5 +1,5 @@
#
-# Copyright (C) 2012-2016 The Python Software Foundation.
+# Copyright (C) 2012-2017 The Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
import codecs
@@ -13,11 +13,10 @@
import os
import py_compile
import re
-import shutil
import socket
try:
import ssl
-except ImportError:
+except ImportError: # pragma: no cover
ssl = None
import subprocess
import sys
@@ -27,7 +26,7 @@
try:
import threading
-except ImportError:
+except ImportError: # pragma: no cover
import dummy_threading as threading
import time
@@ -35,101 +34,244 @@
from .compat import (string_types, text_type, shutil, raw_input, StringIO,
cache_from_source, urlopen, urljoin, httplib, xmlrpclib,
splittype, HTTPHandler, BaseConfigurator, valid_ident,
- Container, configparser, URLError, ZipFile, fsdecode)
+ Container, configparser, URLError, ZipFile, fsdecode,
+ unquote, urlparse)
logger = logging.getLogger(__name__)
#
-# Requirement parsing code for name + optional constraints + optional extras
+# Requirement parsing code as per PEP 508
#
-# e.g. 'foo >= 1.2, < 2.0 [bar, baz]'
-#
-# The regex can seem a bit hairy, so we build it up out of smaller pieces
-# which are manageable.
-#
-
-COMMA = r'\s*,\s*'
-COMMA_RE = re.compile(COMMA)
-
-IDENT = r'(\w|[.-])+'
-EXTRA_IDENT = r'(\*|:(\*|\w+):|' + IDENT + ')'
-VERSPEC = IDENT + r'\*?'
-RELOP = '([<>=!~]=)|[<>]'
-
-#
-# The first relop is optional - if absent, will be taken as '~='
-#
-BARE_CONSTRAINTS = ('(' + RELOP + r')?\s*(' + VERSPEC + ')(' + COMMA + '(' +
- RELOP + r')\s*(' + VERSPEC + '))*')
+IDENTIFIER = re.compile(r'^([\w\.-]+)\s*')
+VERSION_IDENTIFIER = re.compile(r'^([\w\.*+-]+)\s*')
+COMPARE_OP = re.compile(r'^(<=?|>=?|={2,3}|[~!]=)\s*')
+MARKER_OP = re.compile(r'^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*')
+OR = re.compile(r'^or\b\s*')
+AND = re.compile(r'^and\b\s*')
+NON_SPACE = re.compile(r'(\S+)\s*')
+STRING_CHUNK = re.compile(r'([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)')
-DIRECT_REF = '(from\s+(?P.*))'
-#
-# Either the bare constraints or the bare constraints in parentheses
-#
-CONSTRAINTS = (r'\(\s*(?P' + BARE_CONSTRAINTS + '|' + DIRECT_REF +
- r')\s*\)|(?P' + BARE_CONSTRAINTS + '\s*)')
-
-EXTRA_LIST = EXTRA_IDENT + '(' + COMMA + EXTRA_IDENT + ')*'
-EXTRAS = r'\[\s*(?P' + EXTRA_LIST + r')?\s*\]'
-REQUIREMENT = ('(?P' + IDENT + r')\s*(' + EXTRAS + r'\s*)?(\s*' +
- CONSTRAINTS + ')?$')
-REQUIREMENT_RE = re.compile(REQUIREMENT)
+def parse_marker(marker_string):
+ """
+ Parse a marker string and return a dictionary containing a marker expression.
-#
-# Used to scan through the constraints
-#
-RELOP_IDENT = '(?P' + RELOP + r')\s*(?P' + VERSPEC + ')'
-RELOP_IDENT_RE = re.compile(RELOP_IDENT)
+ The dictionary will contain keys "op", "lhs" and "rhs" for non-terminals in
+ the expression grammar, or strings. A string contained in quotes is to be
+ interpreted as a literal string, and a string not contained in quotes is a
+ variable (such as os_name).
+ """
+ def marker_var(remaining):
+ # either identifier, or literal string
+ m = IDENTIFIER.match(remaining)
+ if m:
+ result = m.groups()[0]
+ remaining = remaining[m.end():]
+ elif not remaining:
+ raise SyntaxError('unexpected end of input')
+ else:
+ q = remaining[0]
+ if q not in '\'"':
+ raise SyntaxError('invalid expression: %s' % remaining)
+ oq = '\'"'.replace(q, '')
+ remaining = remaining[1:]
+ parts = [q]
+ while remaining:
+ # either a string chunk, or oq, or q to terminate
+ if remaining[0] == q:
+ break
+ elif remaining[0] == oq:
+ parts.append(oq)
+ remaining = remaining[1:]
+ else:
+ m = STRING_CHUNK.match(remaining)
+ if not m:
+ raise SyntaxError('error in string literal: %s' % remaining)
+ parts.append(m.groups()[0])
+ remaining = remaining[m.end():]
+ else:
+ s = ''.join(parts)
+ raise SyntaxError('unterminated string: %s' % s)
+ parts.append(q)
+ result = ''.join(parts)
+ remaining = remaining[1:].lstrip() # skip past closing quote
+ return result, remaining
+
+ def marker_expr(remaining):
+ if remaining and remaining[0] == '(':
+ result, remaining = marker(remaining[1:].lstrip())
+ if remaining[0] != ')':
+ raise SyntaxError('unterminated parenthesis: %s' % remaining)
+ remaining = remaining[1:].lstrip()
+ else:
+ lhs, remaining = marker_var(remaining)
+ while remaining:
+ m = MARKER_OP.match(remaining)
+ if not m:
+ break
+ op = m.groups()[0]
+ remaining = remaining[m.end():]
+ rhs, remaining = marker_var(remaining)
+ lhs = {'op': op, 'lhs': lhs, 'rhs': rhs}
+ result = lhs
+ return result, remaining
+
+ def marker_and(remaining):
+ lhs, remaining = marker_expr(remaining)
+ while remaining:
+ m = AND.match(remaining)
+ if not m:
+ break
+ remaining = remaining[m.end():]
+ rhs, remaining = marker_expr(remaining)
+ lhs = {'op': 'and', 'lhs': lhs, 'rhs': rhs}
+ return lhs, remaining
+
+ def marker(remaining):
+ lhs, remaining = marker_and(remaining)
+ while remaining:
+ m = OR.match(remaining)
+ if not m:
+ break
+ remaining = remaining[m.end():]
+ rhs, remaining = marker_and(remaining)
+ lhs = {'op': 'or', 'lhs': lhs, 'rhs': rhs}
+ return lhs, remaining
-def parse_requirement(s):
+ return marker(marker_string)
- def get_constraint(m):
- d = m.groupdict()
- return d['op'], d['vn']
- result = None
- m = REQUIREMENT_RE.match(s)
- if m:
- d = m.groupdict()
- name = d['dn']
- cons = d['c1'] or d['c2']
- if not d['diref']:
- url = None
- else:
- # direct reference
- cons = None
- url = d['diref'].strip()
- if not cons:
- cons = None
- constr = ''
- rs = d['dn']
- else:
- if cons[0] not in '<>!=':
- cons = '~=' + cons
- iterator = RELOP_IDENT_RE.finditer(cons)
- cons = [get_constraint(m) for m in iterator]
- rs = '%s (%s)' % (name, ', '.join(['%s %s' % con for con in cons]))
- if not d['ex']:
+def parse_requirement(req):
+ """
+ Parse a requirement passed in as a string. Return a Container
+ whose attributes contain the various parts of the requirement.
+ """
+ remaining = req.strip()
+ if not remaining or remaining.startswith('#'):
+ return None
+ m = IDENTIFIER.match(remaining)
+ if not m:
+ raise SyntaxError('name expected: %s' % remaining)
+ distname = m.groups()[0]
+ remaining = remaining[m.end():]
+ extras = mark_expr = versions = uri = None
+ if remaining and remaining[0] == '[':
+ i = remaining.find(']', 1)
+ if i < 0:
+ raise SyntaxError('unterminated extra: %s' % remaining)
+ s = remaining[1:i]
+ remaining = remaining[i + 1:].lstrip()
+ extras = []
+ while s:
+ m = IDENTIFIER.match(s)
+ if not m:
+ raise SyntaxError('malformed extra: %s' % s)
+ extras.append(m.groups()[0])
+ s = s[m.end():]
+ if not s:
+ break
+ if s[0] != ',':
+ raise SyntaxError('comma expected in extras: %s' % s)
+ s = s[1:].lstrip()
+ if not extras:
extras = None
+ if remaining:
+ if remaining[0] == '@':
+ # it's a URI
+ remaining = remaining[1:].lstrip()
+ m = NON_SPACE.match(remaining)
+ if not m:
+ raise SyntaxError('invalid URI: %s' % remaining)
+ uri = m.groups()[0]
+ t = urlparse(uri)
+ # there are issues with Python and URL parsing, so this test
+ # is a bit crude. See bpo-20271, bpo-23505. Python doesn't
+ # always parse invalid URLs correctly - it should raise
+ # exceptions for malformed URLs
+ if not (t.scheme and t.netloc):
+ raise SyntaxError('Invalid URL: %s' % uri)
+ remaining = remaining[m.end():].lstrip()
else:
- extras = COMMA_RE.split(d['ex'])
- result = Container(name=name, constraints=cons, extras=extras,
- requirement=rs, source=s, url=url)
- return result
+
+ def get_versions(ver_remaining):
+ """
+ Return a list of operator, version tuples if any are
+ specified, else None.
+ """
+ m = COMPARE_OP.match(ver_remaining)
+ versions = None
+ if m:
+ versions = []
+ while True:
+ op = m.groups()[0]
+ ver_remaining = ver_remaining[m.end():]
+ m = VERSION_IDENTIFIER.match(ver_remaining)
+ if not m:
+ raise SyntaxError('invalid version: %s' % ver_remaining)
+ v = m.groups()[0]
+ versions.append((op, v))
+ ver_remaining = ver_remaining[m.end():]
+ if not ver_remaining or ver_remaining[0] != ',':
+ break
+ ver_remaining = ver_remaining[1:].lstrip()
+ m = COMPARE_OP.match(ver_remaining)
+ if not m:
+ raise SyntaxError('invalid constraint: %s' % ver_remaining)
+ if not versions:
+ versions = None
+ return versions, ver_remaining
+
+ if remaining[0] != '(':
+ versions, remaining = get_versions(remaining)
+ else:
+ i = remaining.find(')', 1)
+ if i < 0:
+ raise SyntaxError('unterminated parenthesis: %s' % remaining)
+ s = remaining[1:i]
+ remaining = remaining[i + 1:].lstrip()
+ # As a special diversion from PEP 508, allow a version number
+ # a.b.c in parentheses as a synonym for ~= a.b.c (because this
+ # is allowed in earlier PEPs)
+ if COMPARE_OP.match(s):
+ versions, _ = get_versions(s)
+ else:
+ m = VERSION_IDENTIFIER.match(s)
+ if not m:
+ raise SyntaxError('invalid constraint: %s' % s)
+ v = m.groups()[0]
+ s = s[m.end():].lstrip()
+ if s:
+ raise SyntaxError('invalid constraint: %s' % s)
+ versions = [('~=', v)]
+
+ if remaining:
+ if remaining[0] != ';':
+ raise SyntaxError('invalid requirement: %s' % remaining)
+ remaining = remaining[1:].lstrip()
+
+ mark_expr, remaining = parse_marker(remaining)
+
+ if remaining and remaining[0] != '#':
+ raise SyntaxError('unexpected trailing data: %s' % remaining)
+
+ if not versions:
+ rs = distname
+ else:
+ rs = '%s %s' % (distname, ', '.join(['%s %s' % con for con in versions]))
+ return Container(name=distname, extras=extras, constraints=versions,
+ marker=mark_expr, url=uri, requirement=rs)
def get_resources_dests(resources_root, rules):
"""Find destinations for resources files"""
- def get_rel_path(base, path):
+ def get_rel_path(root, path):
# normalizes and returns a lstripped-/-separated path
- base = base.replace(os.path.sep, '/')
+ root = root.replace(os.path.sep, '/')
path = path.replace(os.path.sep, '/')
- assert path.startswith(base)
- return path[len(base):].lstrip('/')
-
+ assert path.startswith(root)
+ return path[len(root):].lstrip('/')
destinations = {}
for base, suffix, dest in rules:
@@ -403,16 +545,14 @@ def copy_stream(self, instream, outfile, encoding=None):
def write_binary_file(self, path, data):
self.ensure_dir(os.path.dirname(path))
if not self.dry_run:
+ if os.path.exists(path):
+ os.remove(path)
with open(path, 'wb') as f:
f.write(data)
self.record_as_written(path)
def write_text_file(self, path, data, encoding):
- self.ensure_dir(os.path.dirname(path))
- if not self.dry_run:
- with open(path, 'wb') as f:
- f.write(data.encode(encoding))
- self.record_as_written(path)
+ self.write_binary_file(path, data.encode(encoding))
def set_mode(self, bits, mask, files):
if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'):
@@ -440,7 +580,7 @@ def ensure_dir(self, path):
if self.record:
self.dirs_created.add(path)
- def byte_compile(self, path, optimize=False, force=False, prefix=None):
+ def byte_compile(self, path, optimize=False, force=False, prefix=None, hashed_invalidation=False):
dpath = cache_from_source(path, not optimize)
logger.info('Byte-compiling %s to %s', path, dpath)
if not self.dry_run:
@@ -450,7 +590,10 @@ def byte_compile(self, path, optimize=False, force=False, prefix=None):
else:
assert path.startswith(prefix)
diagpath = path[len(prefix):]
- py_compile.compile(path, dpath, diagpath, True) # raise error
+ compile_kwargs = {}
+ if hashed_invalidation and hasattr(py_compile, 'PycInvalidationMode'):
+ compile_kwargs['invalidation_mode'] = py_compile.PycInvalidationMode.CHECKED_HASH
+ py_compile.compile(path, dpath, diagpath, True, **compile_kwargs) # raise error
self.record_as_written(dpath)
return dpath
@@ -541,7 +684,7 @@ def __init__(self, name, prefix, suffix, flags):
def value(self):
return resolve(self.prefix, self.suffix)
- def __repr__(self):
+ def __repr__(self): # pragma: no cover
return '' % (self.name, self.prefix,
self.suffix, self.flags)
@@ -568,8 +711,8 @@ def get_export_entry(specification):
if not m:
result = None
if '[' in specification or ']' in specification:
- raise DistlibException('Invalid specification '
- '%r' % specification)
+ raise DistlibException("Invalid specification "
+ "'%s'" % specification)
else:
d = m.groupdict()
name = d['name']
@@ -579,14 +722,14 @@ def get_export_entry(specification):
prefix, suffix = path, None
else:
if colons != 1:
- raise DistlibException('Invalid specification '
- '%r' % specification)
+ raise DistlibException("Invalid specification "
+ "'%s'" % specification)
prefix, suffix = path.split(':')
flags = d['flags']
if flags is None:
if '[' in specification or ']' in specification:
- raise DistlibException('Invalid specification '
- '%r' % specification)
+ raise DistlibException("Invalid specification "
+ "'%s'" % specification)
flags = []
else:
flags = [f.strip() for f in flags.split(',')]
@@ -697,6 +840,7 @@ def split_filename(filename, project_name=None):
"""
result = None
pyver = None
+ filename = unquote(filename).replace(' ', '-')
m = PYTHON_VERSION.search(filename)
if m:
pyver = m.group(1)
@@ -805,7 +949,7 @@ def __init__(self, base):
"""
# we use 'isdir' instead of 'exists', because we want to
# fail if there's a file with that name
- if not os.path.isdir(base):
+ if not os.path.isdir(base): # pragma: no cover
os.makedirs(base)
if (os.stat(base).st_mode & 0o77) != 0:
logger.warning('Directory \'%s\' is not private', base)
@@ -941,12 +1085,12 @@ def remove(self, pred, succ):
try:
preds = self._preds[succ]
succs = self._succs[pred]
- except KeyError:
+ except KeyError: # pragma: no cover
raise ValueError('%r not a successor of anything' % succ)
try:
preds.remove(pred)
succs.remove(succ)
- except KeyError:
+ except KeyError: # pragma: no cover
raise ValueError('%r not a successor of %r' % (succ, pred))
def is_step(self, step):
@@ -1072,7 +1216,7 @@ def check_path(path):
elif archive_filename.endswith('.tar'):
format = 'tar'
mode = 'r'
- else:
+ else: # pragma: no cover
raise ValueError('Unknown format for %r' % archive_filename)
try:
if format == 'zip':
@@ -1288,7 +1432,7 @@ def connect(self):
cert_reqs=cert_reqs,
ssl_version=ssl.PROTOCOL_SSLv23,
ca_certs=self.ca_certs)
- else:
+ else: # pragma: no cover
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.options |= ssl.OP_NO_SSLv2
if self.cert_file:
@@ -1304,7 +1448,7 @@ def connect(self):
try:
match_hostname(self.sock.getpeercert(), self.host)
logger.debug('Host verified: %s', self.host)
- except CertificateError:
+ except CertificateError: # pragma: no cover
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
raise
@@ -1441,6 +1585,9 @@ def _csv_open(fn, mode, **kwargs):
mode += 'b'
else:
kwargs['newline'] = ''
+ # Python 3 determines encoding from locale. Force 'utf-8'
+ # file encoding to match other forced utf-8 encoding
+ kwargs['encoding'] = 'utf-8'
return open(fn, mode, **kwargs)
@@ -1556,11 +1703,11 @@ def inc_convert(self, value):
result = json.load(f)
return result
-#
-# Mixin for running subprocesses and capturing their output
-#
class SubprocessMixin(object):
+ """
+ Mixin for running subprocesses and capturing their output
+ """
def __init__(self, verbose=False, progress=None):
self.verbose = verbose
self.progress = progress
@@ -1606,4 +1753,4 @@ def run_command(self, cmd, **kwargs):
def normalize_name(name):
"""Normalize a python package name a la PEP 503"""
# https://www.python.org/dev/peps/pep-0503/#normalized-names
- return re.sub(r"[-_.]+", "-", name).lower()
+ return re.sub('[-_.]+', '-', name).lower()
diff --git a/src/rez/vendor/distlib/version.py b/src/rez/vendor/distlib/version.py
index d3dcfa006..3eebe18ee 100644
--- a/src/rez/vendor/distlib/version.py
+++ b/src/rez/vendor/distlib/version.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright (C) 2012-2016 The Python Software Foundation.
+# Copyright (C) 2012-2017 The Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""
@@ -12,6 +12,7 @@
import re
from .compat import string_types
+from .util import parse_requirement
__all__ = ['NormalizedVersion', 'NormalizedMatcher',
'LegacyVersion', 'LegacyMatcher',
@@ -78,10 +79,6 @@ def is_prerelease(self):
class Matcher(object):
version_class = None
- dist_re = re.compile(r"^(\w[\s\w'.-]*)(\((.*)\))?")
- comp_re = re.compile(r'^(<=|>=|<|>|!=|={2,3}|~=)?\s*([^\s,]+)$')
- num_re = re.compile(r'^\d+(\.\d+)*$')
-
# value is either a callable or the name of a method
_operators = {
'<': lambda v, c, p: v < c,
@@ -95,26 +92,24 @@ class Matcher(object):
'!=': lambda v, c, p: v != c,
}
+ # this is a method only to support alternative implementations
+ # via overriding
+ def parse_requirement(self, s):
+ return parse_requirement(s)
+
def __init__(self, s):
if self.version_class is None:
raise ValueError('Please specify a version class')
self._string = s = s.strip()
- m = self.dist_re.match(s)
- if not m:
+ r = self.parse_requirement(s)
+ if not r:
raise ValueError('Not valid: %r' % s)
- groups = m.groups('')
- self.name = groups[0].strip()
+ self.name = r.name
self.key = self.name.lower() # for case-insensitive comparisons
clist = []
- if groups[2]:
- constraints = [c.strip() for c in groups[2].split(',')]
- for c in constraints:
- m = self.comp_re.match(c)
- if not m:
- raise ValueError('Invalid %r in %r' % (c, s))
- groups = m.groups()
- op = groups[0] or '~='
- s = groups[1]
+ if r.constraints:
+ # import pdb; pdb.set_trace()
+ for op, s in r.constraints:
if s.endswith('.*'):
if op not in ('==', '!='):
raise ValueError('\'.*\' not allowed for '
@@ -122,9 +117,8 @@ def __init__(self, s):
# Could be a partial version (e.g. for '2.*') which
# won't parse as a version, so keep it as a string
vn, prefix = s[:-2], True
- if not self.num_re.match(vn):
- # Just to check that vn is a valid version
- self.version_class(vn)
+ # Just to check that vn is a valid version
+ self.version_class(vn)
else:
# Should parse as a version, so we can create an
# instance for the comparison
@@ -137,7 +131,7 @@ def match(self, version):
Check if the provided version matches the constraints.
:param version: The version to match against this instance.
- :type version: Strring or :class:`Version` instance.
+ :type version: String or :class:`Version` instance.
"""
if isinstance(version, string_types):
version = self.version_class(version)
@@ -265,7 +259,7 @@ class NormalizedVersion(Version):
TODO: fill this out
Bad:
- 1 # mininum two numbers
+ 1 # minimum two numbers
1.2a # release level must have a release serial
1.2.3b
"""
@@ -400,7 +394,7 @@ def _match_compatible(self, version, constraint, prefix):
_SUFFIX_REPLACEMENTS = (
(re.compile('^[:~._+-]+'), ''), # remove leading puncts
- (re.compile('[,*")([\]]'), ''), # remove unwanted chars
+ (re.compile('[,*")([\\]]'), ''), # remove unwanted chars
(re.compile('[~:+_ -]'), '.'), # replace illegal chars
(re.compile('[.]{2,}'), '.'), # multiple runs of '.'
(re.compile(r'\.$'), ''), # trailing '.'
@@ -494,7 +488,7 @@ def _suggest_normalized_version(s):
rs = re.sub(r"dev$", r"dev0", rs)
# if we have something like "b-2" or "a.2" at the end of the
- # version, that is pobably beta, alpha, etc
+ # version, that is probably beta, alpha, etc
# let's remove the dash or dot
rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs)
@@ -628,7 +622,7 @@ class LegacyMatcher(Matcher):
_operators = dict(Matcher._operators)
_operators['~='] = '_match_compatible'
- numeric_re = re.compile('^(\d+(\.\d+)*)')
+ numeric_re = re.compile(r'^(\d+(\.\d+)*)')
def _match_compatible(self, version, constraint, prefix):
if version < constraint:
diff --git a/src/rez/vendor/distlib/w32.exe b/src/rez/vendor/distlib/w32.exe
index 85a90a5f5..732215a9d 100644
Binary files a/src/rez/vendor/distlib/w32.exe and b/src/rez/vendor/distlib/w32.exe differ
diff --git a/src/rez/vendor/distlib/w64.exe b/src/rez/vendor/distlib/w64.exe
index b3aea316f..c41bd0a01 100644
Binary files a/src/rez/vendor/distlib/w64.exe and b/src/rez/vendor/distlib/w64.exe differ
diff --git a/src/rez/vendor/distlib/wheel.py b/src/rez/vendor/distlib/wheel.py
index 2952b8e0b..b04bfaefe 100644
--- a/src/rez/vendor/distlib/wheel.py
+++ b/src/rez/vendor/distlib/wheel.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright (C) 2013-2016 Vinay Sajip.
+# Copyright (C) 2013-2017 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
@@ -26,7 +26,7 @@
from . import __version__, DistlibException
from .compat import sysconfig, ZipFile, fsdecode, text_type, filter
from .database import InstalledDistribution
-from .metadata import Metadata, METADATA_FILENAME
+from .metadata import Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME
from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache,
cached_property, get_cache_base, read_exports, tempdir)
from .version import NormalizedVersion, UnsupportedVersionError
@@ -35,11 +35,11 @@
cache = None # created when needed
-if hasattr(sys, 'pypy_version_info'):
+if hasattr(sys, 'pypy_version_info'): # pragma: no cover
IMP_PREFIX = 'pp'
-elif sys.platform.startswith('java'):
+elif sys.platform.startswith('java'): # pragma: no cover
IMP_PREFIX = 'jy'
-elif sys.platform == 'cli':
+elif sys.platform == 'cli': # pragma: no cover
IMP_PREFIX = 'ip'
else:
IMP_PREFIX = 'cp'
@@ -222,17 +222,23 @@ def metadata(self):
wv = wheel_metadata['Wheel-Version'].split('.', 1)
file_version = tuple([int(i) for i in wv])
if file_version < (1, 1):
- fn = 'METADATA'
+ fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME, 'METADATA']
else:
- fn = METADATA_FILENAME
- try:
- metadata_filename = posixpath.join(info_dir, fn)
- with zf.open(metadata_filename) as bf:
- wf = wrapper(bf)
- result = Metadata(fileobj=wf)
- except KeyError:
- raise ValueError('Invalid wheel, because %s is '
- 'missing' % fn)
+ fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME]
+ result = None
+ for fn in fns:
+ try:
+ metadata_filename = posixpath.join(info_dir, fn)
+ with zf.open(metadata_filename) as bf:
+ wf = wrapper(bf)
+ result = Metadata(fileobj=wf)
+ if result:
+ break
+ except KeyError:
+ pass
+ if not result:
+ raise ValueError('Invalid wheel, because metadata is '
+ 'missing: looked in %s' % ', '.join(fns))
return result
def get_wheel_metadata(self, zf):
@@ -436,7 +442,9 @@ def install(self, paths, maker, **kwargs):
This can be used to issue any warnings to raise any exceptions.
If kwarg ``lib_only`` is True, only the purelib/platlib files are
installed, and the headers, scripts, data and dist-info metadata are
- not written.
+ not written. If kwarg ``bytecode_hashed_invalidation`` is True, written
+ bytecode will try to use file-hash based invalidation (PEP-552) on
+ supported interpreter versions (CPython 2.7+).
The return value is a :class:`InstalledDistribution` instance unless
``options.lib_only`` is True, in which case the return value is ``None``.
@@ -445,6 +453,7 @@ def install(self, paths, maker, **kwargs):
dry_run = maker.dry_run
warner = kwargs.get('warner')
lib_only = kwargs.get('lib_only', False)
+ bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', False)
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
@@ -551,7 +560,8 @@ def install(self, paths, maker, **kwargs):
'%s' % outfile)
if bc and outfile.endswith('.py'):
try:
- pyc = fileop.byte_compile(outfile)
+ pyc = fileop.byte_compile(outfile,
+ hashed_invalidation=bc_hashed_invalidation)
outfiles.append(pyc)
except Exception:
# Don't give up if byte-compilation fails,
@@ -919,7 +929,7 @@ def compatible_tags():
arches = [ARCH]
if sys.platform == 'darwin':
- m = re.match('(\w+)_(\d+)_(\d+)_(\w+)$', ARCH)
+ m = re.match(r'(\w+)_(\d+)_(\d+)_(\w+)$', ARCH)
if m:
name, major, minor, arch = m.groups()
minor = int(minor)
diff --git a/src/rez/wheel.py b/src/rez/wheel.py
new file mode 100644
index 000000000..1969b10b5
--- /dev/null
+++ b/src/rez/wheel.py
@@ -0,0 +1,570 @@
+"""Install pip-package are rez-package
+
+Algorithm:
+ 1. Install with pip --install six --target STAGING_DIR
+ 2. Scan STAGING_DIR for installed packages and report
+ 3. Convert pip-package requirements to rez-requirements
+ 4. Convert pip-package to rez-package
+
+"""
+
+from rez.vendor.distlib import DistlibException
+from rez.vendor.distlib.database import DistributionPath
+from rez.vendor.distlib.markers import interpret
+from rez.vendor.distlib.util import parse_name_and_version
+from rez.utils.logging_ import print_debug
+from rez.package_maker__ import PackageMaker
+from rez.config import config
+from rez.vendor.six import six
+from rez.utils.platform_ import platform_
+from rez.utils.filesystem import retain_cwd
+from rez.backport.lru_cache import lru_cache
+
+import os
+import errno
+import shutil
+import logging
+import tempfile
+import subprocess
+
+# Public API
+__all__ = [
+ "install",
+ "download",
+ "convert",
+ "deploy",
+]
+
+# Mute unnecessary messages
+logging.getLogger("rez.vendor.distlib").setLevel(logging.CRITICAL)
+_basestring = six.string_types[0]
+_files = {}
+
+
+def install(names,
+ prefix=None,
+ no_deps=False,
+ release=False,
+ variants=None,
+ index_url=None):
+ """Convenience function to below functions
+
+ Arguments:
+ names (list): pip-formatted package names, e.g. six=1.12
+ prefix (str, optional): Absolute path to destination repository
+ no_deps (bool, optional): Do not install dependencies,
+ equivalent to pip --no-deps
+ release (bool, optional): Install onto REZ_RELEASE_PACKAGES_PATH
+ variants (list, optional): Override variants detected by WHEEL
+ index (str, optional): Override PyPI index. This should point to a
+ repository compliant with PEP 503 (the simple repository API)
+ or a local directory laid out in the same format.
+
+ """
+
+ assert prefix is None or isinstance(prefix, _basestring), (
+ "%s was not str" % prefix)
+ assert isinstance(names, (tuple, list)), "%s was not list or tuple" % names
+
+ tempdir = tempfile.mkdtemp(suffix="-rez", prefix="pip-")
+
+ distributions = download(
+ names,
+ tempdir=tempdir,
+ no_deps=no_deps,
+ index_url=index_url,
+ )
+
+ packagesdir = prefix or (
+ config.release_packages_path if release
+ else config.local_packages_path
+ )
+
+ new, existing = list(), list()
+ for dist in distributions:
+ package = convert(dist, variants=variants)
+
+ if exists(package, packagesdir):
+ existing.append(package)
+ else:
+ new.append(package)
+
+ if not new:
+ return []
+
+ for package in new:
+ deploy(package, path=packagesdir)
+
+ shutil.rmtree(tempdir)
+ return new
+
+
+def download(names, tempdir=None, no_deps=False, index_url=None):
+ """Gather pip packages in `tempdir`
+
+ Arguments:
+ names (list): Names of packages to install, in pip-format,
+ e.g. ["six==1"]
+ tempdir (str, optional): Absolute path to where pip packages go until
+ they've been installed as Rez packages, defaults to the cwd
+ no_deps (bool, optional): Equivalent to pip --no-deps, default to False
+ index_url (str, optional): Custom PyPI index
+
+ Returns:
+ distributions (list): Downloaded distlib.database.InstalledDistribution
+
+ Raises:
+ OSError: On anything gone wrong with subprocess and pip
+
+ """
+
+ assert isinstance(names, (list, tuple)), (
+ "%s was not a tuple or list" % names
+ )
+ assert all(isinstance(name, _basestring) for name in names), (
+ "%s contained non-string" % names
+ )
+
+ tempdir = tempdir or os.getcwd()
+
+ # Build pip commandline
+ cmd = [
+ "python", "-m", "pip", "install",
+ "--target", tempdir,
+
+ # Only ever consider wheels, anything else is ancient
+ "--use-pep517",
+
+ # Handle case where the Python distribution used alongside
+ # pip already has a package installed in its `site-packages/` dir.
+ "--ignore-installed",
+
+ # rez pip users don't have to see this
+ "--disable-pip-version-check",
+ ]
+
+ if index_url:
+ cmd += ["--index-url", index_url]
+
+ else:
+ # Prevent user-settings from interfering with install
+ cmd += ["--isolated"]
+
+ if no_deps:
+ # Delegate the installation of dependencies to the user
+ # This is important, as each dependency may have different
+ # requirements of its own, and variants to go with it.
+ cmd += ["--no-deps"]
+
+ cmd += names
+
+ popen = subprocess.Popen(cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ universal_newlines=True,
+ shell=True)
+
+ output = []
+ for line in iter(popen.stdout.readline, ""):
+
+ if line.startswith("DEPRECATION"):
+ # Mute warnings about Python 2 being deprecated.
+ # It's out-of-band for the casual Rez user.
+ continue
+
+ output.append(line.rstrip())
+
+ popen.wait()
+
+ if popen.returncode != 0:
+ raise OSError(
+ # pip output -------
+ # Some error here
+ # ------------------
+ "\n".join([
+ "pip output ".ljust(70, "-"),
+ "",
+ "\n".join(output),
+ "",
+ "-" * 70,
+ ])
+ )
+
+ distribution_path = DistributionPath([tempdir])
+ distributions = list(distribution_path.get_distributions())
+
+ return sorted(
+ distributions,
+
+ # Upper-case characters typically come first
+ key=lambda d: d.name.lower()
+ )
+
+
+def exists(package, path):
+ """Does `distribution` already exists as a Rez-package in `path`?
+
+ Arguments:
+ package (rez.Package):
+ path (str): Absolute path of where to look
+
+ """
+
+ try:
+ variant = next(package.iter_variants())
+ except StopIteration:
+ return False
+
+ return variant.install(path, dry_run=True) is not None
+
+
+def convert(distribution, variants=None):
+ """Make a Rez package out of `distribution`
+
+ Arguments:
+ distribution (distlib.database.InstalledDistribution): Source
+ variants (list, optional): Explicitly provide variants, defaults
+ to automatically detecting the correct variants using the
+ WHEEL metadata of `distribution`.
+
+ """
+
+ name, _ = parse_name_and_version(distribution.name_and_version)
+ name = _rez_name(distribution.name[:len(name)])
+
+ # determine variant requirements
+ variants_ = variants or []
+
+ if not variants_:
+ wheen_fname = os.path.join(distribution.path, "WHEEL")
+ with open(wheen_fname) as f:
+ variants_.extend(wheel_to_variants(f.read()))
+
+ requirements = _pip_to_rez_requirements(distribution)
+
+ maker = PackageMaker(name)
+ maker.version = distribution.version
+
+ if requirements:
+ maker.requires = requirements
+
+ if distribution.metadata.summary:
+ maker.description = distribution.metadata.summary
+
+ if variants_:
+ maker.variants = [variants_]
+
+ maker.commands = '\n'.join([
+ "env.PYTHONPATH.append('{root}/python')"
+ ])
+
+ # Store files from distribution for deployment
+ files = list()
+ for relpath, md5, size in distribution.list_installed_files():
+ root = os.path.dirname(distribution.path)
+ files += [(root, relpath)]
+
+ _files[name] = files
+
+ package = maker.get_package()
+ return package
+
+
+def deploy(package, path):
+ """Deploy `distribution` as `package` at `path`
+
+ Arguments:
+ package (rez.Package): Source package
+ path (str): Path to install directory, e.g. "~/packages"
+
+ """
+
+ def make_root(variant, destination_root):
+ for source_root, relpath in _files.pop(package.name):
+ src = os.path.join(source_root, relpath)
+ src = os.path.normpath(src)
+
+ if not os.path.exists(src):
+ continue
+
+ dst = os.path.join(root, "python", relpath)
+ dst = os.path.normpath(dst)
+
+ if not os.path.exists(os.path.dirname(dst)):
+ os.makedirs(os.path.dirname(dst))
+
+ shutil.copyfile(src, dst)
+
+ variant = next(package.iter_variants())
+ variant_ = variant.install(path)
+
+ root = variant_.root
+ if make_root and root:
+ try:
+ os.makedirs(root)
+ except OSError as e:
+ if e.errno == errno.EEXIST:
+ # That's ok
+ pass
+ else:
+ raise
+
+ with retain_cwd():
+ os.chdir(root)
+ make_root(variant_, root)
+
+ return variant_
+
+
+def wheel_to_variants(wheel):
+ """Parse WHEEL file of `distribution` as per PEP427
+
+ https://www.python.org/dev/peps/pep-0427/#file-contents
+
+ Arguments:
+ wheel (str): Contents of a WHEEL file
+
+ Returns:
+ variants (dict): With keys {"platform", "os", "python"}
+
+ """
+
+ variants = {
+ "platform": None,
+ "os": None,
+ "python": None,
+ }
+
+ py = {
+ "2": False,
+ "3": False,
+ "minor": False,
+ }
+
+ for line in wheel.splitlines():
+ line = line.rstrip()
+
+ if not line:
+ # Empty lines are allowed
+ continue
+
+ line = line.replace(" ", "")
+ key, value = line.lower().split(":")
+
+ if key == "wheel-version":
+ if value[0] != "1":
+ raise ValueError("Unsupported WHEEL format")
+
+ if key == "root-is-purelib" and value == "false":
+ variants["platform"] = platform_name()
+
+ if key == "tag":
+ # May occur multiple times
+ #
+ # Example:
+ # py2-none-any
+ # py3-none-any
+ # cp36-cp36m-win_amd64
+ #
+ py_tag, abi_tag, plat_tag = value.split("-")
+ major_ver = py_tag[2]
+
+ py[major_ver] = True
+
+ if plat_tag != "any":
+ # We could convert e.g. `win_amd64` to a Rez platform
+ # and os version, such as `platform-windows` and
+ # `os-windows.10.0.1800` but it's safe to assume that if
+ # this package was provided by pip, it must be specific
+ # to the currently running platform and os.
+
+ variants["os"] = os_name()
+ variants["platform"] = platform_name() # e.g. windows
+
+ # Indicate that this week depends on the Python minor version
+ # which is true of any compiled Python package.
+ py["minor"] = True
+
+ if py["minor"]:
+ # Use the actual version from the running Python
+ # rather than what's coming out of the the WHEEL
+ # See
+ variants["python"] = python_version()
+
+ elif py["2"] and py["3"]:
+ variants["python"] = None
+
+ elif py["2"]:
+ variants["python"] = "2"
+
+ elif py["3"]:
+ variants["python"] = "3"
+
+ return [
+ k + "-" + variants[k]
+
+ # Order is important
+ for k in ("platform",
+ "os",
+ "python")
+
+ if variants[k] is not None
+ ]
+
+
+def os_name():
+ """Return pip-compatible OS, e.g. windows-10.0 and Debian-7.6"""
+ # pip packages are no more specific than minor/major of an os
+ # E.g. windows-10.0.18362 -> windows-10.0
+ return ".".join(platform_.os.split(".")[:2])
+
+
+def platform_name():
+ return platform_.name
+
+
+@lru_cache()
+def python_version():
+ """Return major.minor version of Python, prefer current context"""
+
+ import subprocess
+ from rez.status import status
+ context = status.context
+
+ try:
+ # Use supplied Python
+ package = context.get_resolved_package("python")
+ return ".".join(str(v) for v in package.version[:2])
+
+ except AttributeError:
+ # In a context, but no Python was found
+ pass
+
+ except IndexError:
+ # We'll need this for almost every package on PyPI
+ raise IndexError("%s didn't have a minor version" % package.uri)
+
+ # Try system Python
+ popen = subprocess.Popen(
+ """\
+ python -c "import sys;print('.'.join(map(str, sys.version_info[:2])))"
+ """,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ universal_newlines=True,
+ bufsize=10 ** 4, # Enough to capture the version
+ shell=True,
+ )
+
+ if popen.wait() == 0:
+ version = popen.stdout.read().rstrip()
+ return version # 3.7
+
+
+@lru_cache()
+def pip_version():
+ """Return version of pip"""
+ import subprocess
+ from rez.status import status
+ context = status.context
+
+ try:
+ # Use supplied Python
+ package = context.get_resolved_package("pip")
+ return str(package.version)
+ except AttributeError:
+ # In a context, but no Python was found
+ pass
+
+ # Try system Python
+ popen = subprocess.Popen(
+ "python -c \"import pip;print(pip.__version__)\"",
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ universal_newlines=True,
+ bufsize=10 ** 4, # Enough to capture the version
+ shell=True,
+ )
+
+ if popen.wait() == 0:
+ version = popen.stdout.read().rstrip()
+ return version
+
+
+_verbose = config.debug("package_release")
+
+
+def _log(msg):
+ if _verbose:
+ print_debug(msg)
+
+
+def _rez_name(pip_name):
+ return pip_name.replace("-", "_")
+
+
+def _get_dependencies(requirement):
+ requirements = ([requirement] if isinstance(requirement, basestring)
+ else requirement["requires"])
+
+ result = []
+ for package in requirements:
+ if "(" in package:
+ try:
+ name, version = parse_name_and_version(package)
+ version = version.replace("==", "")
+ name = _rez_name(name)
+ except DistlibException:
+ n, vs = package.split(' (')
+ vs = vs[:-1]
+ versions = []
+ for v in vs.split(','):
+ package = "%s (%s)" % (n, v)
+ name, version = parse_name_and_version(package)
+ version = version.replace("==", "")
+ versions.append(version)
+ version = "".join(versions)
+ name = _rez_name(name)
+
+ result.append("-".join([name, version]))
+ else:
+ name = _rez_name(package)
+ result.append(name)
+
+ return result
+
+
+def _pip_to_rez_requirements(distribution):
+ """Convert pip-requirements --> rez-requirements"""
+
+ requirements = []
+ for req in (distribution.metadata.run_requires or []):
+ if "environment" in req:
+ if interpret(req["environment"]):
+ requirements += _get_dependencies(req)
+
+ elif "extra" in req:
+ # TODO: Handle optional requirements
+ # e.g. requests[security]
+ pass
+
+ else:
+ requirements += _get_dependencies(req)
+
+ return requirements
+
+
+# Copyright 2013-2016 Allan Johns.
+#
+# This library is free software: you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation, either
+# version 3 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see .
diff --git a/src/rezplugins/package_repository/filesystem.py b/src/rezplugins/package_repository/filesystem.py
index 56955911a..20aab129c 100644
--- a/src/rezplugins/package_repository/filesystem.py
+++ b/src/rezplugins/package_repository/filesystem.py
@@ -442,7 +442,13 @@ class FileSystemPackageRepository(PackageRepository):
building_prefix = ".building"
ignore_prefix = ".ignore"
- package_file_mode = (stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
+ package_file_mode = (
+ None if os.name == "nt" else
+
+ # These aren't supported on Windows
+ # https://docs.python.org/2/library/os.html#os.chmod
+ (stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
+ )
@classmethod
def name(cls):
@@ -826,6 +832,10 @@ def _create_variant(self, variant, dry_run=False, overrides=None):
# find or create the package family
family = self.get_package_family(variant_name)
+
+ if not family and dry_run:
+ return None
+
if not family:
family = self._create_family(variant_name)