From 52e2b57b880249f1ad3eb912a9d1b0e2c2472328 Mon Sep 17 00:00:00 2001 From: Tal Einat Date: Tue, 19 Jun 2018 21:21:02 +0300 Subject: [PATCH] feat: support projects using pipenv and Pipfile --- .travis.yml | 6 +- README.md | 11 +- dev-requirements.txt | 2 + lib/index.js | 55 +- lib/sub-process.js | 20 +- package.json | 2 +- plug/pip_resolve.py | 68 +- plug/pipfile.py | 56 ++ plug/pytoml/LICENSE | 16 + plug/pytoml/README.txt | 6 + plug/pytoml/__init__.py | 3 + plug/pytoml/core.py | 13 + plug/pytoml/parser.py | 374 ++++++++ plug/pytoml/writer.py | 127 +++ plug/requirements/parser.py | 2 +- plug/utils.py | 10 + test/inspect.test.js | 810 ++++++++++++------ test/test-utils.js | 163 ++++ test/workspaces/pipenv-app/Pipfile | 14 + test/workspaces/pipenv-app/README | 2 + test/workspaces/pipfile-pipapp-pinned/Pipfile | 16 + .../pipfile-pipapp-pinned/Pipfile.lock | 232 +++++ test/workspaces/pipfile-pipapp-pinned/README | 10 + test/workspaces/pipfile-pipapp/Pipfile | 16 + test/workspaces/pipfile-pipapp/README | 2 + 25 files changed, 1708 insertions(+), 328 deletions(-) create mode 100644 dev-requirements.txt create mode 100644 plug/pipfile.py create mode 100644 plug/pytoml/LICENSE create mode 100644 plug/pytoml/README.txt create mode 100644 plug/pytoml/__init__.py create mode 100644 plug/pytoml/core.py create mode 100644 plug/pytoml/parser.py create mode 100644 plug/pytoml/writer.py create mode 100644 test/test-utils.js create mode 100644 test/workspaces/pipenv-app/Pipfile create mode 100644 test/workspaces/pipenv-app/README create mode 100644 test/workspaces/pipfile-pipapp-pinned/Pipfile create mode 100644 test/workspaces/pipfile-pipapp-pinned/Pipfile.lock create mode 100644 test/workspaces/pipfile-pipapp-pinned/README create mode 100644 test/workspaces/pipfile-pipapp/Pipfile create mode 100644 test/workspaces/pipfile-pipapp/README diff --git a/.travis.yml b/.travis.yml index 8dd2681b..932ab893 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,10 +15,8 @@ cache: - node_modules before_script: - export PATH=$HOME/.local/bin:$PATH - - pip install virtualenv --user `whoami` - - virtualenv ./env - - source ./env/bin/activate - - pip install pip==$PIP_VER + - pip install --user pip==$PIP_VER + - pip install --user -r dev-requirements.txt --disable-pip-version-check script: npm test jobs: include: diff --git a/README.md b/README.md index 5c51a5fd..e124ef30 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,10 @@ Snyk helps you find, fix and monitor for known vulnerabilities in your dependenc ## Snyk Python CLI Plugin -This plugin provides dependency metadata for Python projects that use `pip` and have a `requirements.txt` file. +This plugin provides dependency metadata for Python projects that use one of the following dependency management methods: + +* `pip` with a `requirements.txt` file +* `pipenv` with a `Pipefile` file ## Contributing @@ -17,6 +20,12 @@ This plugin provides dependency metadata for Python projects that use `pip` and Prerequisites: - Node 6 (or 8) - Python (preferably 2.7) +- Installed outside of any virtualenv: + - [pip](https://pip.pypa.io/en/stable/installing/) + - the contents of `dev-requirements.txt`: + ``` + pip install --user -r dev-requirements.txt + ``` Linting and testing: ``` diff --git a/dev-requirements.txt b/dev-requirements.txt new file mode 100644 index 00000000..e4314238 --- /dev/null +++ b/dev-requirements.txt @@ -0,0 +1,2 @@ +pipenv +virtualenv diff --git a/lib/index.js b/lib/index.js index a661b686..96956863 100644 --- a/lib/index.js +++ b/lib/index.js @@ -14,10 +14,22 @@ module.exports.__tests = { function inspect(root, targetFile, options) { if (!options) { options = {}; } var command = options.command || 'python'; + var baseargs = []; + + if (path.basename(targetFile) === 'Pipfile') { + // Check that pipenv is available by running it. + var pipenvCheckProc = subProcess.executeSync('pipenv', ['--version']); + if (pipenvCheckProc.status !== 0) { + throw new Error('Failed to run `pipenv`; please make sure it is installed.'); + } + command = 'pipenv'; + baseargs = ['run', 'python']; + } + return Promise.all([ - getMetaData(command, root), + getMetaData(command, baseargs, root), getDependencies( - command, root, targetFile, options.allowMissing, options.args), + command, baseargs, root, targetFile, options.allowMissing, options.args), ]) .then(function (result) { return { @@ -27,8 +39,12 @@ function inspect(root, targetFile, options) { }); } -function getMetaData(command, root) { - return subProcess.execute(command, ['--version'], { cwd: root }) +function getMetaData(command, baseargs, root) { + return subProcess.execute( + command, + [].concat(baseargs, ['--version']), + { cwd: root } + ) .then(function (output) { return { name: 'snyk-python-plugin', @@ -38,17 +54,18 @@ function getMetaData(command, root) { } // Hack: -// We're using Zeit assets feature in order to support Python and Go testing +// We're using Zeit assets feature in order to support Python and Go testing // within a binary release. By doing "path.join(__dirname, 'PATH'), Zeit adds // PATH file auto to the assets. Sadly, Zeit doesn't support (as far as I // understand) adding a full folder as an asset, and this is why we're adding -// the required files this way. In addition, Zeit doesn't support +// the required files this way. In addition, Zeit doesn't support // path.resolve(), and this is why I'm using path.join() function createAssets(){ assets = []; assets.push(path.join(__dirname, '../plug/pip_resolve.py')); assets.push(path.join(__dirname, '../plug/distPackage.py')); assets.push(path.join(__dirname, '../plug/package.py')); + assets.push(path.join(__dirname, '../plug/pipfile.py')); assets.push(path.join(__dirname, '../plug/reqPackage.py')); assets.push(path.join(__dirname, '../plug/utils.py')); @@ -58,6 +75,11 @@ function createAssets(){ assets.push(path.join(__dirname, '../plug/requirements/vcs.py')); assets.push(path.join(__dirname, '../plug/requirements/__init__.py')); + assets.push(path.join(__dirname, '../plug/pytoml/__init__.py')); + assets.push(path.join(__dirname, '../plug/pytoml/core.py')); + assets.push(path.join(__dirname, '../plug/pytoml/parser.py')); + assets.push(path.join(__dirname, '../plug/pytoml/writer.py')); + return assets; } @@ -81,7 +103,7 @@ function getFilePathRelativeToDumpDir(filePath) { // Unix pathParts = filePath.split('/plug/'); - return pathParts[1]; + return pathParts[1]; } function dumpAllFilesInTempDir(tempDirName) { @@ -90,10 +112,10 @@ function dumpAllFilesInTempDir(tempDirName) { { throw new Error('The file `' + currentReadFilePath + '` is missing'); } - - var relFilePathToDumpDir = + + var relFilePathToDumpDir = getFilePathRelativeToDumpDir(currentReadFilePath); - + var writeFilePath = path.join(tempDirName, relFilePathToDumpDir); var contents = fs.readFileSync(currentReadFilePath); @@ -101,7 +123,8 @@ function dumpAllFilesInTempDir(tempDirName) { }); } -function getDependencies(command, root, targetFile, allowMissing, args) { +function getDependencies(command, baseargs, root, targetFile, + allowMissing, args) { var tempDirObj = tmp.dirSync({ unsafeCleanup: true }); @@ -110,7 +133,8 @@ function getDependencies(command, root, targetFile, allowMissing, args) { return subProcess.execute( command, - buildArgs(targetFile, allowMissing, tempDirObj.name, args), + [].concat(baseargs, + buildArgs(targetFile, allowMissing, tempDirObj.name, args)), { cwd: root } ) .then(function (output) { @@ -121,8 +145,11 @@ function getDependencies(command, root, targetFile, allowMissing, args) { tempDirObj.removeCallback(); if (typeof error === 'string') { if (error.indexOf('Required package missing') !== -1) { - // TODO: this should be checked in the CLI, not here - throw new Error('Please run `pip install -r ' + targetFile + '`'); + var errMsg = 'Please run `pip install -r ' + targetFile + '`'; + if (path.basename(targetFile) === 'Pipfile') { + errMsg = 'Please run `pipenv update`'; + } + throw new Error(errMsg); } throw new Error(error); } diff --git a/lib/sub-process.js b/lib/sub-process.js index ba12de24..dde0ff85 100644 --- a/lib/sub-process.js +++ b/lib/sub-process.js @@ -1,10 +1,20 @@ var childProcess = require('child_process'); +var path = require('path'); +var process = require('process'); -module.exports.execute = function (command, args, options) { - var spawnOptions = { shell: true }; +var _makeSpawnOptions = function(options) { + var spawnOptions = {shell: true}; if (options && options.cwd) { spawnOptions.cwd = options.cwd; } + if (options && options.env) { + spawnOptions.env = options.env; + } + return spawnOptions; +} + +module.exports.execute = function (command, args, options) { + var spawnOptions = _makeSpawnOptions(options); return new Promise(function (resolve, reject) { var stdout = ''; @@ -22,3 +32,9 @@ module.exports.execute = function (command, args, options) { }); }); }; + +module.exports.executeSync = function (command, args, options) { + var spawnOptions = _makeSpawnOptions(options); + + return childProcess.spawnSync(command, args, spawnOptions); +}; diff --git a/package.json b/package.json index 9200e053..3f1231e9 100644 --- a/package.json +++ b/package.json @@ -8,7 +8,7 @@ }, "main": "lib/index.js", "scripts": { - "test": "tap `find ./test -name '*.test.js'` --timeout=300", + "test": "tap `find ./test -name '*.test.js'` --timeout=900", "lint": "jscs `find ./lib -name '*.js'` -v && jscs `find ./test -name '*.js'` -v", "semantic-release": "semantic-release pre && npm publish && semantic-release post" }, diff --git a/plug/pip_resolve.py b/plug/pip_resolve.py index c0c5beaa..0ff1b1fb 100644 --- a/plug/pip_resolve.py +++ b/plug/pip_resolve.py @@ -1,3 +1,4 @@ +import io import sys import os import json @@ -5,6 +6,7 @@ import argparse import utils import requirements +import pipfile # pip >= 10.0.0 moved all APIs to the _internal package reflecting the fact # that pip does not currently have any public APIs. This is a temporary fix. @@ -14,11 +16,13 @@ except ImportError: from pip._internal import get_installed_distributions + def create_tree_of_packages_dependencies(dist_tree, packages_names, req_file_path, allow_missing=False): """Create packages dependencies tree :param dict tree: the package tree :param set packages_names: set of select packages to be shown in the output. - :param req_file_path: the path to requirements.txt file + :param req_file_path: the path to the dependencies file + (e.g. requirements.txt) :rtype: dict """ DEPENDENCIES = 'dependencies' @@ -99,17 +103,33 @@ def matches_environment(requirement): This should be expanded to include other environment markers. See: https://www.python.org/dev/peps/pep-0508/#environment-markers """ - if 'sys_platform' in requirement.line: - match = sys_platform_re.findall(requirement.line) + # TODO: refactor this out into the Requirement classes + if isinstance(requirement, pipfile.PipfileRequirement): + markers_text = requirement.markers + else: + markers_text = requirement.line + if markers_text is not None and 'sys_platform' in markers_text: + match = sys_platform_re.findall(markers_text) if len(match) > 0: return match[0].lower() == sys_platform return True def is_testable(requirement): - return requirement.editable == False and requirement.vcs == None + return requirement.editable == False and requirement.vcs is None + +def get_requirements_list(requirements_file_path): + # TODO: refactor recognizing the dependency manager to a single place + if os.path.basename(requirements_file_path) == 'Pipfile': + with io.open(requirements_file_path, 'r', encoding='utf-8') as f: + requirements_data = f.read() + req_list = list(pipfile.parse(requirements_data).get('packages', [])) + else: + # assume this is a requirements.txt formatted file + # Note: requirements.txt files are unicode and can be in any encoding. + with open(requirements_file_path, 'r') as f: + requirements_data = f.read() + req_list = list(requirements.parse(requirements_data)) -def get_requirements_list(requirements_file): - req_list = list(requirements.parse(requirements_file)) req_list = filter(matches_environment, req_list) req_list = filter(is_testable, req_list) required = [req.name.replace('_', '-') for req in req_list] @@ -125,31 +145,33 @@ def create_dependencies_tree_by_req_file_path(requirements_file_path, allow_miss # get all installed distributions tree dist_tree = utils.construct_tree(dist_index) - # open the requirements.txt file and create dependencies tree out of it - with open(requirements_file_path, 'r') as requirements_file: - required = get_requirements_list(requirements_file) - installed = [p for p in dist_index] - packages = [] - for r in required: - if r.lower() not in installed: - msg = 'Required package missing: ' + r.lower() - if allow_missing: - sys.stderr.write(msg + "\n") - else: - sys.exit(msg) + # create a list of dependencies from the dependencies file + required = get_requirements_list(requirements_file_path) + installed = [p for p in dist_index] + packages = [] + for r in required: + if r.lower() not in installed: + msg = 'Required package missing: ' + r.lower() + if allow_missing: + sys.stderr.write(msg + "\n") else: - packages.append(r); + sys.exit(msg) + else: + packages.append(r) - package_tree = create_tree_of_packages_dependencies( - dist_tree, packages, requirements_file_path, allow_missing) + # build a tree of dependencies + package_tree = create_tree_of_packages_dependencies( + dist_tree, packages, requirements_file_path, allow_missing) print(json.dumps(package_tree)) def main(): parser = argparse.ArgumentParser() - parser.add_argument("requirements", help="requirements.txt path") + parser.add_argument("requirements", + help="dependencies file path (requirements.txt or Pipfile)") parser.add_argument("--allow-missing", action="store_true", - help="don't fail if some packages listed in requirements.txt are missing") + help="don't fail if some packages listed in the dependencies file " + + "are not installed") args = parser.parse_args() create_dependencies_tree_by_req_file_path( diff --git a/plug/pipfile.py b/plug/pipfile.py new file mode 100644 index 00000000..97562b29 --- /dev/null +++ b/plug/pipfile.py @@ -0,0 +1,56 @@ +"""Simplistic parsing of Pipfile dependency files + +This only extracts a small subset of the information present in a Pipfile, +as needed for the purposes of this library. +""" +from utils import is_string + +import pytoml + + +class PipfileRequirement(object): + def __init__(self, name): + self.name = name + + self.editable = False + self.vcs = None + self.vcs_uri = None + self.version = None + self.markers = None + + @classmethod + def from_dict(cls, name, requirement_dict): + req = cls(name) + + req.version = requirement_dict.get('version') + req.editable = requirement_dict.get('editable', False) + for vcs in ['git', 'hg', 'svn', 'bzr']: + if vcs in requirement_dict: + req.vcs = vcs + req.vcs_uri = requirement_dict[vcs] + break + req.markers = requirement_dict.get('markers') + + return req + + +def parse(file_contents): + data = pytoml.loads(file_contents) + + sections = ['packages', 'dev-packages'] + res = dict.fromkeys(sections) + for section in sections: + if section not in data: + continue + + section_data = data[section] + + res[section] = [ + PipfileRequirement.from_dict( + name, + value if not is_string(value) else {'version': value}, + ) + for name, value in sorted(section_data.items()) + ] + + return res diff --git a/plug/pytoml/LICENSE b/plug/pytoml/LICENSE new file mode 100644 index 00000000..9739fc67 --- /dev/null +++ b/plug/pytoml/LICENSE @@ -0,0 +1,16 @@ +No-notice MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/plug/pytoml/README.txt b/plug/pytoml/README.txt new file mode 100644 index 00000000..3c9ce749 --- /dev/null +++ b/plug/pytoml/README.txt @@ -0,0 +1,6 @@ +This is pytoml v0.1.16, taken from the avakar/pytoml GitHub repo. + +See: https://github.com/avakar/pytoml/releases/tag/v0.1.16 + +It is bundled out of necessity due to constraints of the +Snyk CLI plugin architecture. diff --git a/plug/pytoml/__init__.py b/plug/pytoml/__init__.py new file mode 100644 index 00000000..8dc73155 --- /dev/null +++ b/plug/pytoml/__init__.py @@ -0,0 +1,3 @@ +from .core import TomlError +from .parser import load, loads +from .writer import dump, dumps diff --git a/plug/pytoml/core.py b/plug/pytoml/core.py new file mode 100644 index 00000000..c182734e --- /dev/null +++ b/plug/pytoml/core.py @@ -0,0 +1,13 @@ +class TomlError(RuntimeError): + def __init__(self, message, line, col, filename): + RuntimeError.__init__(self, message, line, col, filename) + self.message = message + self.line = line + self.col = col + self.filename = filename + + def __str__(self): + return '{}({}, {}): {}'.format(self.filename, self.line, self.col, self.message) + + def __repr__(self): + return 'TomlError({!r}, {!r}, {!r}, {!r})'.format(self.message, self.line, self.col, self.filename) diff --git a/plug/pytoml/parser.py b/plug/pytoml/parser.py new file mode 100644 index 00000000..e03a03fb --- /dev/null +++ b/plug/pytoml/parser.py @@ -0,0 +1,374 @@ +import string, re, sys, datetime +from .core import TomlError + +if sys.version_info[0] == 2: + _chr = unichr +else: + _chr = chr + +def load(fin, translate=lambda t, x, v: v, object_pairs_hook=dict): + return loads(fin.read(), translate=translate, object_pairs_hook=object_pairs_hook, filename=getattr(fin, 'name', repr(fin))) + +def loads(s, filename='', translate=lambda t, x, v: v, object_pairs_hook=dict): + if isinstance(s, bytes): + s = s.decode('utf-8') + + s = s.replace('\r\n', '\n') + + root = object_pairs_hook() + tables = object_pairs_hook() + scope = root + + src = _Source(s, filename=filename) + ast = _p_toml(src, object_pairs_hook=object_pairs_hook) + + def error(msg): + raise TomlError(msg, pos[0], pos[1], filename) + + def process_value(v, object_pairs_hook): + kind, text, value, pos = v + if kind == 'str' and value.startswith('\n'): + value = value[1:] + if kind == 'array': + if value and any(k != value[0][0] for k, t, v, p in value[1:]): + error('array-type-mismatch') + value = [process_value(item, object_pairs_hook=object_pairs_hook) for item in value] + elif kind == 'table': + value = object_pairs_hook([(k, process_value(value[k], object_pairs_hook=object_pairs_hook)) for k in value]) + return translate(kind, text, value) + + for kind, value, pos in ast: + if kind == 'kv': + k, v = value + if k in scope: + error('duplicate_keys. Key "{0}" was used more than once.'.format(k)) + scope[k] = process_value(v, object_pairs_hook=object_pairs_hook) + else: + is_table_array = (kind == 'table_array') + cur = tables + for name in value[:-1]: + if isinstance(cur.get(name), list): + d, cur = cur[name][-1] + else: + d, cur = cur.setdefault(name, (None, object_pairs_hook())) + + scope = object_pairs_hook() + name = value[-1] + if name not in cur: + if is_table_array: + cur[name] = [(scope, object_pairs_hook())] + else: + cur[name] = (scope, object_pairs_hook()) + elif isinstance(cur[name], list): + if not is_table_array: + error('table_type_mismatch') + cur[name].append((scope, object_pairs_hook())) + else: + if is_table_array: + error('table_type_mismatch') + old_scope, next_table = cur[name] + if old_scope is not None: + error('duplicate_tables') + cur[name] = (scope, next_table) + + def merge_tables(scope, tables): + if scope is None: + scope = object_pairs_hook() + for k in tables: + if k in scope: + error('key_table_conflict') + v = tables[k] + if isinstance(v, list): + scope[k] = [merge_tables(sc, tbl) for sc, tbl in v] + else: + scope[k] = merge_tables(v[0], v[1]) + return scope + + return merge_tables(root, tables) + +class _Source: + def __init__(self, s, filename=None): + self.s = s + self._pos = (1, 1) + self._last = None + self._filename = filename + self.backtrack_stack = [] + + def last(self): + return self._last + + def pos(self): + return self._pos + + def fail(self): + return self._expect(None) + + def consume_dot(self): + if self.s: + self._last = self.s[0] + self.s = self[1:] + self._advance(self._last) + return self._last + return None + + def expect_dot(self): + return self._expect(self.consume_dot()) + + def consume_eof(self): + if not self.s: + self._last = '' + return True + return False + + def expect_eof(self): + return self._expect(self.consume_eof()) + + def consume(self, s): + if self.s.startswith(s): + self.s = self.s[len(s):] + self._last = s + self._advance(s) + return True + return False + + def expect(self, s): + return self._expect(self.consume(s)) + + def consume_re(self, re): + m = re.match(self.s) + if m: + self.s = self.s[len(m.group(0)):] + self._last = m + self._advance(m.group(0)) + return m + return None + + def expect_re(self, re): + return self._expect(self.consume_re(re)) + + def __enter__(self): + self.backtrack_stack.append((self.s, self._pos)) + + def __exit__(self, type, value, traceback): + if type is None: + self.backtrack_stack.pop() + else: + self.s, self._pos = self.backtrack_stack.pop() + return type == TomlError + + def commit(self): + self.backtrack_stack[-1] = (self.s, self._pos) + + def _expect(self, r): + if not r: + raise TomlError('msg', self._pos[0], self._pos[1], self._filename) + return r + + def _advance(self, s): + suffix_pos = s.rfind('\n') + if suffix_pos == -1: + self._pos = (self._pos[0], self._pos[1] + len(s)) + else: + self._pos = (self._pos[0] + s.count('\n'), len(s) - suffix_pos) + +_ews_re = re.compile(r'(?:[ \t]|#[^\n]*\n|#[^\n]*\Z|\n)*') +def _p_ews(s): + s.expect_re(_ews_re) + +_ws_re = re.compile(r'[ \t]*') +def _p_ws(s): + s.expect_re(_ws_re) + +_escapes = { 'b': '\b', 'n': '\n', 'r': '\r', 't': '\t', '"': '"', '\'': '\'', + '\\': '\\', '/': '/', 'f': '\f' } + +_basicstr_re = re.compile(r'[^"\\\000-\037]*') +_short_uni_re = re.compile(r'u([0-9a-fA-F]{4})') +_long_uni_re = re.compile(r'U([0-9a-fA-F]{8})') +_escapes_re = re.compile('[bnrt"\'\\\\/f]') +_newline_esc_re = re.compile('\n[ \t\n]*') +def _p_basicstr_content(s, content=_basicstr_re): + res = [] + while True: + res.append(s.expect_re(content).group(0)) + if not s.consume('\\'): + break + if s.consume_re(_newline_esc_re): + pass + elif s.consume_re(_short_uni_re) or s.consume_re(_long_uni_re): + res.append(_chr(int(s.last().group(1), 16))) + else: + s.expect_re(_escapes_re) + res.append(_escapes[s.last().group(0)]) + return ''.join(res) + +_key_re = re.compile(r'[0-9a-zA-Z-_]+') +def _p_key(s): + with s: + s.expect('"') + r = _p_basicstr_content(s, _basicstr_re) + s.expect('"') + return r + if s.consume('\''): + if s.consume('\'\''): + r = s.expect_re(_litstr_ml_re).group(0) + s.expect('\'\'\'') + else: + r = s.expect_re(_litstr_re).group(0) + s.expect('\'') + return r + return s.expect_re(_key_re).group(0) + +_float_re = re.compile(r'[+-]?(?:0|[1-9](?:_?\d)*)(?:\.\d(?:_?\d)*)?(?:[eE][+-]?(?:\d(?:_?\d)*))?') +_datetime_re = re.compile(r'(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(\.\d+)?(?:Z|([+-]\d{2}):(\d{2}))') + +_basicstr_ml_re = re.compile(r'(?:(?:|"|"")[^"\\\000-\011\013-\037])*') +_litstr_re = re.compile(r"[^'\000-\037]*") +_litstr_ml_re = re.compile(r"(?:(?:|'|'')(?:[^'\000-\011\013-\037]))*") +def _p_value(s, object_pairs_hook): + pos = s.pos() + + if s.consume('true'): + return 'bool', s.last(), True, pos + if s.consume('false'): + return 'bool', s.last(), False, pos + + if s.consume('"'): + if s.consume('""'): + r = _p_basicstr_content(s, _basicstr_ml_re) + s.expect('"""') + else: + r = _p_basicstr_content(s, _basicstr_re) + s.expect('"') + return 'str', r, r, pos + + if s.consume('\''): + if s.consume('\'\''): + r = s.expect_re(_litstr_ml_re).group(0) + s.expect('\'\'\'') + else: + r = s.expect_re(_litstr_re).group(0) + s.expect('\'') + return 'str', r, r, pos + + if s.consume_re(_datetime_re): + m = s.last() + s0 = m.group(0) + r = map(int, m.groups()[:6]) + if m.group(7): + micro = float(m.group(7)) + else: + micro = 0 + + if m.group(8): + g = int(m.group(8), 10) * 60 + int(m.group(9), 10) + tz = _TimeZone(datetime.timedelta(0, g * 60)) + else: + tz = _TimeZone(datetime.timedelta(0, 0)) + + y, m, d, H, M, S = r + dt = datetime.datetime(y, m, d, H, M, S, int(micro * 1000000), tz) + return 'datetime', s0, dt, pos + + if s.consume_re(_float_re): + m = s.last().group(0) + r = m.replace('_','') + if '.' in m or 'e' in m or 'E' in m: + return 'float', m, float(r), pos + else: + return 'int', m, int(r, 10), pos + + if s.consume('['): + items = [] + with s: + while True: + _p_ews(s) + items.append(_p_value(s, object_pairs_hook=object_pairs_hook)) + s.commit() + _p_ews(s) + s.expect(',') + s.commit() + _p_ews(s) + s.expect(']') + return 'array', None, items, pos + + if s.consume('{'): + _p_ws(s) + items = object_pairs_hook() + if not s.consume('}'): + k = _p_key(s) + _p_ws(s) + s.expect('=') + _p_ws(s) + items[k] = _p_value(s, object_pairs_hook=object_pairs_hook) + _p_ws(s) + while s.consume(','): + _p_ws(s) + k = _p_key(s) + _p_ws(s) + s.expect('=') + _p_ws(s) + items[k] = _p_value(s, object_pairs_hook=object_pairs_hook) + _p_ws(s) + s.expect('}') + return 'table', None, items, pos + + s.fail() + +def _p_stmt(s, object_pairs_hook): + pos = s.pos() + if s.consume( '['): + is_array = s.consume('[') + _p_ws(s) + keys = [_p_key(s)] + _p_ws(s) + while s.consume('.'): + _p_ws(s) + keys.append(_p_key(s)) + _p_ws(s) + s.expect(']') + if is_array: + s.expect(']') + return 'table_array' if is_array else 'table', keys, pos + + key = _p_key(s) + _p_ws(s) + s.expect('=') + _p_ws(s) + value = _p_value(s, object_pairs_hook=object_pairs_hook) + return 'kv', (key, value), pos + +_stmtsep_re = re.compile(r'(?:[ \t]*(?:#[^\n]*)?\n)+[ \t]*') +def _p_toml(s, object_pairs_hook): + stmts = [] + _p_ews(s) + with s: + stmts.append(_p_stmt(s, object_pairs_hook=object_pairs_hook)) + while True: + s.commit() + s.expect_re(_stmtsep_re) + stmts.append(_p_stmt(s, object_pairs_hook=object_pairs_hook)) + _p_ews(s) + s.expect_eof() + return stmts + +class _TimeZone(datetime.tzinfo): + def __init__(self, offset): + self._offset = offset + + def utcoffset(self, dt): + return self._offset + + def dst(self, dt): + return None + + def tzname(self, dt): + m = self._offset.total_seconds() // 60 + if m < 0: + res = '-' + m = -m + else: + res = '+' + h = m // 60 + m = m - h * 60 + return '{}{:.02}{:.02}'.format(res, h, m) diff --git a/plug/pytoml/writer.py b/plug/pytoml/writer.py new file mode 100644 index 00000000..6eaf5d76 --- /dev/null +++ b/plug/pytoml/writer.py @@ -0,0 +1,127 @@ +from __future__ import unicode_literals +import io, datetime, math, sys + +if sys.version_info[0] == 3: + long = int + unicode = str + + +def dumps(obj, sort_keys=False): + fout = io.StringIO() + dump(obj, fout, sort_keys=sort_keys) + return fout.getvalue() + + +_escapes = {'\n': 'n', '\r': 'r', '\\': '\\', '\t': 't', '\b': 'b', '\f': 'f', '"': '"'} + + +def _escape_string(s): + res = [] + start = 0 + + def flush(): + if start != i: + res.append(s[start:i]) + return i + 1 + + i = 0 + while i < len(s): + c = s[i] + if c in '"\\\n\r\t\b\f': + start = flush() + res.append('\\' + _escapes[c]) + elif ord(c) < 0x20: + start = flush() + res.append('\\u%04x' % ord(c)) + i += 1 + + flush() + return '"' + ''.join(res) + '"' + + +def _escape_id(s): + if any(not c.isalnum() and c not in '-_' for c in s): + return _escape_string(s) + return s + + +def _format_list(v): + return '[{0}]'.format(', '.join(_format_value(obj) for obj in v)) + +# Formula from: +# https://docs.python.org/2/library/datetime.html#datetime.timedelta.total_seconds +# Once support for py26 is dropped, this can be replaced by td.total_seconds() +def _total_seconds(td): + return ((td.microseconds + + (td.seconds + td.days * 24 * 3600) * 10**6) / 10.0**6) + +def _format_value(v): + if isinstance(v, bool): + return 'true' if v else 'false' + if isinstance(v, int) or isinstance(v, long): + return unicode(v) + if isinstance(v, float): + if math.isnan(v) or math.isinf(v): + raise ValueError("{0} is not a valid TOML value".format(v)) + else: + return repr(v) + elif isinstance(v, unicode) or isinstance(v, bytes): + return _escape_string(v) + elif isinstance(v, datetime.datetime): + offs = v.utcoffset() + offs = _total_seconds(offs) // 60 if offs is not None else 0 + + if offs == 0: + suffix = 'Z' + else: + if offs > 0: + suffix = '+' + else: + suffix = '-' + offs = -offs + suffix = '{0}{1:.02}{2:.02}'.format(suffix, offs // 60, offs % 60) + + if v.microsecond: + return v.strftime('%Y-%m-%dT%H:%M:%S.%f') + suffix + else: + return v.strftime('%Y-%m-%dT%H:%M:%S') + suffix + elif isinstance(v, list): + return _format_list(v) + else: + raise RuntimeError(v) + + +def dump(obj, fout, sort_keys=False): + tables = [((), obj, False)] + + while tables: + name, table, is_array = tables.pop() + if name: + section_name = '.'.join(_escape_id(c) for c in name) + if is_array: + fout.write('[[{0}]]\n'.format(section_name)) + else: + fout.write('[{0}]\n'.format(section_name)) + + table_keys = sorted(table.keys()) if sort_keys else table.keys() + new_tables = [] + has_kv = False + for k in table_keys: + v = table[k] + if isinstance(v, dict): + new_tables.append((name + (k,), v, False)) + elif isinstance(v, list) and v and all(isinstance(o, dict) for o in v): + new_tables.extend((name + (k,), d, True) for d in v) + elif v is None: + # based on mojombo's comment: https://github.com/toml-lang/toml/issues/146#issuecomment-25019344 + fout.write( + '#{} = null # To use: uncomment and replace null with value\n'.format(_escape_id(k))) + has_kv = True + else: + fout.write('{0} = {1}\n'.format(_escape_id(k), _format_value(v))) + has_kv = True + + tables.extend(reversed(new_tables)) + + if (name or has_kv) and tables: + fout.write('\n') diff --git a/plug/requirements/parser.py b/plug/requirements/parser.py index 881d94c2..c8ff26d3 100644 --- a/plug/requirements/parser.py +++ b/plug/requirements/parser.py @@ -26,7 +26,7 @@ def parse(reqstr): reqstr = reqstr.read() # combine consecutive lines seperated by a backslash - reqstr = reqstr.replace('\\\n', ' '); + reqstr = reqstr.replace('\\\n', ' ') for line in reqstr.splitlines(): line = line.strip() diff --git a/plug/utils.py b/plug/utils.py index 45addd1e..33347c3a 100644 --- a/plug/utils.py +++ b/plug/utils.py @@ -4,6 +4,7 @@ from collections import OrderedDict except ImportError: from ordereddict import OrderedDict +import sys from reqPackage import ReqPackage from distPackage import DistPackage __version__ = '0.10.1' @@ -58,3 +59,12 @@ def guess_version(pkg_key, default='?'): return default else: return getattr(m, '__version__', default) + + +def is_string(obj): + """Check whether an object is a string""" + if sys.version_info < (3,): + # Python 2.x only + return isinstance(obj, basestring) + else: + return isinstance(obj, str) diff --git a/test/inspect.test.js b/test/inspect.test.js index 9bc35039..edbb3d8c 100644 --- a/test/inspect.test.js +++ b/test/inspect.test.js @@ -1,27 +1,194 @@ var test = require('tap').test; +var fs = require('fs'); +var os = require('os'); var path = require('path'); +var process = require('process'); var sinon = require('sinon'); var plugin = require('../lib'); var subProcess = require('../lib/sub-process'); +var testUtils = require('./test-utils'); -test('install requirements (may take a while)', function (t) { + +test('install requirements in "pip-app" venv (may take a while)', function (t) { chdirWorkspaces('pip-app'); - return subProcess.execute('pip', - ['install', '-r', 'requirements.txt', '--disable-pip-version-check'] - ) - .then(function () { + testUtils.ensureVirtualenv('pip-app'); + t.teardown(testUtils.activateVirtualenv('pip-app')); + try { + testUtils.pipInstall(); t.pass('installed pip packages'); - }) - .catch(function (error) { + t.end(); + } + catch (error) { t.bailout(error); - }); + } }); +var pipAppExpectedDependencies = { + django: { + data: { + name: 'django', + version: '1.6.1', + }, + msg: 'django looks ok', + }, + jinja2: { + data: { + name: 'jinja2', + version: '2.7.2', + dependencies: { + markupsafe: { + name: 'markupsafe', + version: /.+$/, + }, + }, + }, + msg: 'jinja2 looks ok', + }, + 'python-etcd': { + data: { + name: 'python-etcd', + version: '0.4.5', + dependencies: { + dnspython: { + name: 'dnspython', + version: /.+$/, + }, + urllib3: { + name: 'urllib3', + version: /.+$/, + }, + }, + }, + msg: 'python-etcd is ok', + }, + 'django-select2': { + data: { + name: 'django-select2', + version: '6.0.1', + dependencies: { + 'django-appconf': { + name: 'django-appconf', + }, + }, + }, + msg: 'django-select2 looks ok', + }, + irc: { + data: { + name: 'irc', + version: '16.2', + dependencies: { + 'more-itertools': {}, + 'jaraco.functools': {}, + 'jaraco.collections': { + dependencies: { + 'jaraco.text': {}, + } + }, + 'jaraco.text': { + dependencies: { + 'jaraco.collections': {} + } + }, + } + }, + msg: 'irc ok, even though it has a cyclic dep, yay!', + }, + testtools: { + data: { + name: 'testtools', + version: '2.3.0', + dependencies: { + 'pbr': {}, + 'extras': {}, + 'fixtures': {}, + 'unittest2': {}, + 'traceback2': {}, + 'python-mimeparse': {}, + } + }, + msg: 'testtools ok, even though it\'s cyclic, yay!', + }, +}; + test('inspect', function (t) { - chdirWorkspaces('pip-app'); + return Promise.resolve().then(function () { + chdirWorkspaces('pip-app'); + t.teardown(testUtils.activateVirtualenv('pip-app')); + }) + .then(function () { + return plugin.inspect('.', 'requirements.txt') + }) + .then(function (result) { + var plugin = result.plugin; + var pkg = result.package; + + t.test('plugin', function (t) { + t.ok(plugin, 'plugin'); + t.equal(plugin.name, 'snyk-python-plugin', 'name'); + t.match(plugin.runtime, 'Python', 'runtime'); + t.end(); + }); + + t.test('package', function (t) { + t.ok(pkg, 'package'); + t.equal(pkg.name, 'pip-app', 'name'); + t.equal(pkg.version, '0.0.0', 'version'); + t.end(); + }); + + t.test('package dependencies', function (t) { + Object.keys(pipAppExpectedDependencies).forEach(function (depName) { + t.match( + pkg.dependencies[depName], + pipAppExpectedDependencies[depName].data, + pipAppExpectedDependencies[depName].msg + ); + }); + + t.end(); + }); + + t.end(); + }); +}); - return plugin.inspect('.', 'requirements.txt') +test('transitive dep not installed', function (t) { + return Promise.resolve().then(function () { + chdirWorkspaces('pip-app'); + var venvCreated = testUtils.ensureVirtualenv('pip-app-without-markupsafe'); + t.teardown(testUtils.activateVirtualenv('pip-app-without-markupsafe')); + if (venvCreated) { + testUtils.pipInstall(); + testUtils.pipUninstall('MarkupSafe'); + } + }) + .then(function () { + return plugin.inspect('.', 'requirements.txt') + .then(function () { + t.fail('should have failed'); + }) + .catch(function (error) { + t.equal(error.message, 'Please run `pip install -r requirements.txt`'); + t.end(); + }) + }); +}); + +test('transitive dep not installed, but with allowMissing option', function (t) { + return Promise.resolve().then(function () { + chdirWorkspaces('pip-app'); + var venvCreated = testUtils.ensureVirtualenv('pip-app-without-markupsafe'); + t.teardown(testUtils.activateVirtualenv('pip-app-without-markupsafe')); + if (venvCreated) { + testUtils.pipInstall(); + testUtils.pipUninstall('MarkupSafe'); + } + }) + .then(function () { + return plugin.inspect('.', 'requirements.txt', {allowMissing: true}) + }) .then(function (result) { var plugin = result.plugin; var pkg = result.package; @@ -37,6 +204,7 @@ test('inspect', function (t) { t.ok(pkg, 'package'); t.equal(pkg.name, 'pip-app', 'name'); t.equal(pkg.version, '0.0.0', 'version'); + // t.equal(pkg.full, 'pip-app@0.0.0', 'full'); // do we need this? t.end(); }); @@ -49,12 +217,7 @@ test('inspect', function (t) { t.match(pkg.dependencies.jinja2, { name: 'jinja2', version: '2.7.2', - dependencies: { - markupsafe: { - name: 'markupsafe', - version: /.+$/, - }, - }, + dependencies: {}, }, 'jinja2 looks ok'); t.match(pkg.dependencies['python-etcd'], { @@ -82,38 +245,6 @@ test('inspect', function (t) { }, }, 'django-select2 looks ok'); - t.match(pkg.dependencies['irc'], { - name: 'irc', - version: '16.2', - dependencies: { - 'more-itertools': {}, - 'jaraco.functools': {}, - 'jaraco.collections': { - dependencies: { - 'jaraco.text': {}, - } - }, - 'jaraco.text': { - dependencies: { - 'jaraco.collections': {} - } - }, - } - }, 'irc ok, even though it has a cyclic dep, yay!') - - t.match(pkg.dependencies['testtools'], { - name: 'testtools', - version: '2.3.0', - dependencies: { - 'pbr': {}, - 'extras': {}, - 'fixtures': {}, - 'unittest2': {}, - 'traceback2': {}, - 'python-mimeparse': {}, - } - }, 'testtools ok, even though it\'s cyclic, yay!') - t.end(); }); @@ -121,275 +252,390 @@ test('inspect', function (t) { }); }); -test('transitive dep not installed', function (t) { - chdirWorkspaces('pip-app'); - return pipUninstall('MarkupSafe') +test('deps not installed', function (t) { + return Promise.resolve().then(function () { + chdirWorkspaces('pip-app-deps-not-installed'); + t.teardown(testUtils.activateVirtualenv('pip-app')); + }) .then(function () { - return plugin.inspect('.', 'requirements.txt') - .then(function () { - t.fail('should have failed'); - }) - .catch(function (error) { - t.equal(error.message, 'Please run `pip install -r requirements.txt`'); - }); - }) + return plugin.inspect('.', 'requirements.txt') + }) + .then(function () { + t.fail('should have failed'); + }) + .catch(function (error) { + t.equal(error.message, 'Please run `pip install -r requirements.txt`'); + t.end(); + }); }); -test('transitive dep not installed, but with allowMissing option', function (t) { - chdirWorkspaces('pip-app'); - - return pipInstall() - .then(function () { - return pipUninstall('MarkupSafe') - }).then(function () { - return plugin.inspect('.', 'requirements.txt', {allowMissing: true}) - .then(function (result) { - var plugin = result.plugin; - var pkg = result.package; - - t.test('plugin', function (t) { - t.ok(plugin, 'plugin'); - t.equal(plugin.name, 'snyk-python-plugin', 'name'); - t.match(plugin.runtime, 'Python', 'runtime'); - t.end(); - }); - - t.test('package', function (t) { - t.ok(pkg, 'package'); - t.equal(pkg.name, 'pip-app', 'name'); - t.equal(pkg.version, '0.0.0', 'version'); - // t.equal(pkg.full, 'pip-app@0.0.0', 'full'); // do we need this? - t.end(); - }); - - t.test('package dependencies', function (t) { - t.same(pkg.dependencies.django, { - name: 'django', - version: '1.6.1', - }, 'django looks ok'); - - t.match(pkg.dependencies.jinja2, { - name: 'jinja2', - version: '2.7.2', - dependencies: {}, - }, 'jinja2 looks ok'); - - t.match(pkg.dependencies['python-etcd'], { - name: 'python-etcd', - version: '0.4.5', - dependencies: { - dnspython: { - name: 'dnspython', - version: /.+$/, - }, - urllib3: { - name: 'urllib3', - version: /.+$/, - }, - }, - }, 'python-etcd is ok'); - - t.match(pkg.dependencies['django-select2'], { - name: 'django-select2', - version: '6.0.1', - dependencies: { - 'django-appconf': { - name: 'django-appconf', - }, - }, - }, 'django-select2 looks ok'); - - t.end(); - }); - - t.end(); - }); - - }) -}); +test('deps not installed, but with allowMissing option', function (t) { + return Promise.resolve().then(function () { + chdirWorkspaces('pip-app-deps-not-installed'); + t.teardown(testUtils.activateVirtualenv('pip-app')); + }) + .then(function () { + return plugin.inspect('.', 'requirements.txt', { allowMissing: true }) + }) + .then(function (result) { + var plugin = result.plugin; + var pkg = result.package; -test('deps not installed', function (t) { - chdirWorkspaces('pip-app-deps-not-installed'); - return plugin.inspect('.', 'requirements.txt') - .then(function () { - t.fail('should have failed'); - }) - .catch(function (error) { - t.equal(error.message, 'Please run `pip install -r requirements.txt`'); + t.test('plugin', function (t) { + t.ok(plugin, 'plugin'); + t.equal(plugin.name, 'snyk-python-plugin', 'name'); + t.match(plugin.runtime, 'Python', 'runtime'); + t.end(); }); -}); -test('deps not installed, but with allowMissing option', function (t) { - chdirWorkspaces('pip-app-deps-not-installed'); - return plugin.inspect('.', 'requirements.txt', { allowMissing: true }) - .then(function (result) { - var plugin = result.plugin; - var pkg = result.package; - - t.test('plugin', function (t) { - t.ok(plugin, 'plugin'); - t.equal(plugin.name, 'snyk-python-plugin', 'name'); - t.match(plugin.runtime, 'Python', 'runtime'); - t.end(); - }); - - t.test('package', function (t) { - t.ok(pkg, 'package'); - t.equal(pkg.name, 'pip-app-deps-not-installed', 'name'); - t.equal(pkg.version, '0.0.0', 'version'); - t.end(); - }); + t.test('package', function (t) { + t.ok(pkg, 'package'); + t.equal(pkg.name, 'pip-app-deps-not-installed', 'name'); + t.equal(pkg.version, '0.0.0', 'version'); + t.end(); }); + + t.end(); + }); }); test('uses provided exec command', function (t) { - var command = 'echo'; - var execute = sinon.stub(subProcess, 'execute'); - execute.onFirstCall().returns(Promise.resolve('abc')); - execute.onSecondCall().returns(Promise.resolve('{}')); - t.teardown(execute.restore); - - return plugin.inspect('.', 'requirements.txt', { - command: command, + return Promise.resolve().then(function () { + var execute = sinon.stub(subProcess, 'execute'); + execute.onFirstCall().returns(Promise.resolve('abc')); + execute.onSecondCall().returns(Promise.resolve('{}')); + t.teardown(execute.restore); + return execute; }) - .then(function () { + .then(function (execute) { + var command = 'echo'; + return plugin.inspect('.', 'requirements.txt', { command: command }) + .then(function (result) { t.ok(execute.calledTwice, 'execute called twice'); t.equal(execute.firstCall.args[0], command, 'uses command'); t.equal(execute.secondCall.args[0], command, 'uses command'); + t.end(); }); + }) }); test('package name differs from requirement', function (t) { - chdirWorkspaces('pip-app-deps-with-dashes'); - return pipInstall() - .then(function () { - return plugin.inspect('.', 'requirements.txt') - .then(function (result) { - var pkg = result.package; - t.same(pkg.dependencies['dj-database-url'], { - name: 'dj-database-url', - version: '0.4.2', - }, 'dj-database-url looks ok'); - t.same(pkg.dependencies['posix-ipc'], { - name: 'posix-ipc', - version: '1.0.0', - }, 'posix-ipc looks ok'); - t.end(); - }); - }) - .catch(function (error) { - t.fail(error); - }); + return Promise.resolve().then(function () { + chdirWorkspaces('pip-app-deps-with-dashes'); + var venvCreated = testUtils.ensureVirtualenv('pip-app-deps-with-dashes'); + t.teardown(testUtils.activateVirtualenv('pip-app-deps-with-dashes')); + if (venvCreated) { + testUtils.pipInstall(); + } + }) + .then(function () { + return plugin.inspect('.', 'requirements.txt') + }) + .then(function (result) { + var pkg = result.package; + t.same(pkg.dependencies['dj-database-url'], { + name: 'dj-database-url', + version: '0.4.2', + }, 'dj-database-url looks ok'); + t.same(pkg.dependencies['posix-ipc'], { + name: 'posix-ipc', + version: '1.0.0', + }, 'posix-ipc looks ok'); + t.end(); + }); }); test('package depends on platform', function (t) { - chdirWorkspaces('pip-app-deps-conditional'); - return pipInstall() - .then(function () { - return plugin.inspect('.', 'requirements.txt') - .then(function (result) { - var pkg = result.package; - t.notOk(pkg.dependencies.pypiwin32, 'win32 dep ignored'); - t.same(pkg.dependencies['posix-ipc'], { - name: 'posix-ipc', - version: '1.0.0', - }, 'posix-ipc looks ok'); - t.end(); - }); - }) - .catch(function (error) { - t.fail(error); - }); + return Promise.resolve().then(function () { + chdirWorkspaces('pip-app-deps-conditional'); + var venvCreated = testUtils.ensureVirtualenv('pip-app-deps-conditional'); + t.teardown(testUtils.activateVirtualenv('pip-app-deps-conditional')); + if (venvCreated) { + testUtils.pipInstall(); + } + }) + .then(function () { + return plugin.inspect('.', 'requirements.txt') + }) + .then(function (result) { + var pkg = result.package; + t.notOk(pkg.dependencies.pypiwin32, 'win32 dep ignored'); + t.same(pkg.dependencies['posix-ipc'], { + name: 'posix-ipc', + version: '1.0.0', + }, 'posix-ipc looks ok'); + t.end(); + }); }); test('editables ignored', function (t) { - chdirWorkspaces('pip-app-deps-editable'); - return pipInstall() - .then(function () { - return plugin.inspect('.', 'requirements.txt') - .then(function (result) { - var pkg = result.package; - t.notOk(pkg.dependencies['simple'], 'editable dep ignored'); - t.notOk(pkg.dependencies['sample'], 'editable subdir dep ignored'); - t.same(pkg.dependencies['posix-ipc'], { - name: 'posix-ipc', - version: '1.0.0', - }, 'posix-ipc looks ok'); - t.end(); - }); - }) - .catch(function (error) { - t.fail(error); - }); + return Promise.resolve().then(function () { + chdirWorkspaces('pip-app-deps-editable'); + var venvCreated = testUtils.ensureVirtualenv('pip-app-deps-editable'); + t.teardown(testUtils.activateVirtualenv('pip-app-deps-editable')); + if (venvCreated) { + testUtils.pipInstall(); + } + }) + .then(function () { + return plugin.inspect('.', 'requirements.txt') + }) + .then(function (result) { + var pkg = result.package; + t.notOk(pkg.dependencies['simple'], 'editable dep ignored'); + t.notOk(pkg.dependencies['sample'], 'editable subdir dep ignored'); + t.same(pkg.dependencies['posix-ipc'], { + name: 'posix-ipc', + version: '1.0.0', + }, 'posix-ipc looks ok'); + t.end(); + }); }); -test('deps withs options', function (t) { - chdirWorkspaces('pip-app-with-options'); +test('deps with options', function (t) { + return Promise.resolve().then(function () { + chdirWorkspaces('pip-app-with-options'); + var venvCreated = testUtils.ensureVirtualenv('pip-app-with-options'); + t.teardown(testUtils.activateVirtualenv('pip-app-with-options')); + if (venvCreated) { + testUtils.pipInstall(); + } + }) + .then(function () { + return plugin.inspect('.', 'requirements.txt') + }) + .then(function (result) { + var plugin = result.plugin; + var pkg = result.package; - return pipInstall() - .then(function () { - return plugin.inspect('.', 'requirements.txt') - .then(function (result) { - var plugin = result.plugin; - var pkg = result.package; - - t.test('plugin', function (t) { - t.ok(plugin, 'plugin'); - t.equal(plugin.name, 'snyk-python-plugin', 'name'); - t.match(plugin.runtime, 'Python', 'runtime'); - t.end(); - }); - - t.test('package', function (t) { - t.ok(pkg, 'package'); - t.equal(pkg.name, 'pip-app-with-options', 'name'); - t.equal(pkg.version, '0.0.0', 'version'); - t.end(); - }); - - t.test('package dependencies', function (t) { - t.match(pkg.dependencies.markupsafe, { - name: 'markupsafe', - version: '1.0', - }, 'MarkupSafe looks ok'); - - t.match(pkg.dependencies.dnspython, { - name: 'dnspython', - version: '1.13.0', - }, 'dnspython looks ok'); - - t.end(); - }); - - t.end(); - }); + t.test('plugin', function (t) { + t.ok(plugin, 'plugin'); + t.equal(plugin.name, 'snyk-python-plugin', 'name'); + t.match(plugin.runtime, 'Python', 'runtime'); + t.end(); }); + + t.test('package', function (t) { + t.ok(pkg, 'package'); + t.equal(pkg.name, 'pip-app-with-options', 'name'); + t.equal(pkg.version, '0.0.0', 'version'); + t.end(); + }); + + t.test('package dependencies', function (t) { + t.match(pkg.dependencies.markupsafe, { + name: 'markupsafe', + version: '1.0', + }, 'MarkupSafe looks ok'); + + t.match(pkg.dependencies.dnspython, { + name: 'dnspython', + version: '1.13.0', + }, 'dnspython looks ok'); + + t.end(); + }); + + t.end(); + }); }); test('trusted host ignored', function (t) { - chdirWorkspaces('pip-app-trusted-host'); - return pipInstall() - .then(function () { - return plugin.inspect('.', 'requirements.txt') - .then(function (result) { - t.ok(result.package.dependencies, 'does not error'); - t.end(); - }); - }) - .catch(function (error) { - t.fail(error); + return Promise.resolve().then(function () { + chdirWorkspaces('pip-app-trusted-host'); + var venvCreated = testUtils.ensureVirtualenv('pip-app-trusted-host'); + t.teardown(testUtils.activateVirtualenv('pip-app-trusted-host')); + if (venvCreated) { + testUtils.pipInstall(); + } + }) + .then(function () { + return plugin.inspect('.', 'requirements.txt') + }) + .then(function (result) { + t.ok(result.package.dependencies, 'does not error'); + t.end(); + }); +}); + +test('inspect Pipfile', function (t) { + return Promise.resolve().then(function () { + chdirWorkspaces('pipfile-pipapp'); + t.teardown(testUtils.activateVirtualenv('pip-app')); + }) + .then(function () { + return plugin.inspect('.', 'Pipfile') + }) + .then(function (result) { + var pkg = result.package; + + t.test('package dependencies', function (t) { + t.notOk(pkg.dependencies['django'], 'django skipped (editable)'); + + t.match(pkg.dependencies['django-select2'], { + name: 'django-select2', + version: '6.0.1', + dependencies: { + 'django-appconf': { + name: 'django-appconf', + }, + }, + }, 'django-select2 looks ok'); + + t.match(pkg.dependencies['python-etcd'], { + name: 'python-etcd', + version: /^0\.4.*$/, + }, 'python-etcd looks ok'); + + t.notOk(pkg.dependencies['e1839a8'], + 'dummy local package skipped (editable)'); + + t.ok(pkg.dependencies['jinja2'] !== undefined, 'jinja2 found'); + t.ok(pkg.dependencies['testtools'] !== undefined, 'testtools found'); + + t.end(); }); + + t.end(); + }); }); -function pipInstall() { - return subProcess.execute('pip', - ['install', '-r', 'requirements.txt', '--disable-pip-version-check']); -} +test('inspect Pipfile with pinned versions', function (t) { + return Promise.resolve().then(function () { + chdirWorkspaces('pipfile-pipapp-pinned'); + t.teardown(testUtils.activateVirtualenv('pip-app')); + }) + .then(function () { + return plugin.inspect('.', 'Pipfile') + }) + .then(function (result) { + var pkg = result.package; + + t.test('package dependencies', function (t) { + Object.keys(pipAppExpectedDependencies).forEach(function (depName) { + t.match( + pkg.dependencies[depName], + pipAppExpectedDependencies[depName].data, + pipAppExpectedDependencies[depName].msg + ); + }); + + t.end(); + }); + + t.end(); + }); +}); + +var pipenvAppExpectedDependencies = { + 'python-etcd': { + data: { + name: 'python-etcd', + version: /^0\.4/, + }, + msg: 'python-etcd1 found with version >=0.4,<0.5', + }, + jinja2: { + data: { + name: 'jinja2', + version: /^0|1|2\.[0-6]/, + }, + msg: 'jinja2 found with version <2.7', + }, + testtools: { + data: { + name: 'testtools', + }, + msg: 'testtools found', + }, +}; + +test('inspect pipenv app with user-created virtualenv', function (t) { + return Promise.resolve().then(function () { + chdirWorkspaces('pipenv-app'); + + var venvCreated = testUtils.ensureVirtualenv('pipenv-app'); + t.teardown(testUtils.activateVirtualenv('pipenv-app')); + if (venvCreated) { + return testUtils.pipenvInstall(); + } + }) + .then(function () { + return plugin.inspect('.', 'Pipfile') + }) + .then(function (result) { + var pkg = result.package; + + t.test('package dependencies', function (t) { + Object.keys(pipenvAppExpectedDependencies).forEach(function (depName) { + t.match( + pkg.dependencies[depName], + pipenvAppExpectedDependencies[depName].data, + pipenvAppExpectedDependencies[depName].msg + ); + }); + + t.end(); + }); + + t.end(); + }); +}); + +test('inspect pipenv app with auto-created virtualenv', function (t) { + return Promise.resolve().then(function () { + chdirWorkspaces('pipenv-app'); + + // Use several teardown callbacks, called in reverse order. + var teardowns = []; + t.teardown(function () { + while (teardowns.length > 0) { + teardowns.pop()(); + } + }); + + if (testUtils.getActiveVenvName() !== null) { + teardowns.push(testUtils.deactivateVirtualenv()); + } + + // Set the WORKON_HOME env var to make pipenv put its auto-created + // virtualenv where we want it. + teardowns.push(testUtils.setWorkonHome()); + + // Have pipenv create and update a virtualenv if it doesn't exist. + var proc = subProcess.executeSync('pipenv', ['--venv']); + if (proc.status !== 0) { + teardowns.push(function () { + fs.unlinkSync('Pipfile.lock', function (err) {}); + }); + var updateProc = subProcess.executeSync('pipenv', ['update']); + if (updateProc.status !== 0) { + t.bailout('Failed to install dependencies using `pipenv update`'); + } + } + }) + .then(function () { + return plugin.inspect('.', 'Pipfile') + }) + .then(function (result) { + var pkg = result.package; + + t.test('package dependencies', function (t) { + Object.keys(pipenvAppExpectedDependencies).forEach(function (depName) { + t.match( + pkg.dependencies[depName], + pipenvAppExpectedDependencies[depName].data, + pipenvAppExpectedDependencies[depName].msg + ); + }); + + t.end(); + }); + + t.end(); + }); +}); -function pipUninstall(pkgName) { - return subProcess.execute('pip', - ['uninstall', '-y', pkgName]); -} function chdirWorkspaces(dir) { process.chdir(path.resolve(__dirname, 'workspaces', dir)); diff --git a/test/test-utils.js b/test/test-utils.js new file mode 100644 index 00000000..5dd0a65f --- /dev/null +++ b/test/test-utils.js @@ -0,0 +1,163 @@ +var fs = require('fs'); +var path = require('path'); +var process = require('process'); + +var subProcess = require('../lib/sub-process'); + + +module.exports = { + getActiveVenvName, + activateVirtualenv, + deactivateVirtualenv, + ensureVirtualenv, + pipInstall, + pipUninstall, + pipenvInstall, + setWorkonHome, +}; + +function getActiveVenvName() { + return process.env.VIRTUAL_ENV + ? path.basename(process.env.VIRTUAL_ENV) + : null; +} + +function activateVirtualenv(venvName) { + var venvDir = path.join(path.resolve(__dirname), '.venvs', venvName); + + var binDirName = process.platform === 'win32' ? 'Scripts' : 'bin'; + var binDir = path.resolve(venvDir, binDirName); + + var origProcessEnv = Object.assign({}, process.env); + + if (process.env.VIRTUAL_ENV) { + var pathElements = process.env.PATH.split(path.delimiter); + var index = pathElements.indexOf(process.env.VIRTUAL_ENV); + if (index > -1) { + pathElements.splice(index, 1); + } + process.env.PATH = pathElements.join(path.delimiter); + } + + // simulate the "activate" virtualenv script + process.env.PATH = binDir + path.delimiter + process.env.PATH; + process.env.VIRTUAL_ENV = venvDir; + delete process.env.PYTHONHOME; + + return function revert() { + process.env.VIRTUAL_ENV = origProcessEnv.VIRTUAL_ENV; + process.env.PATH = origProcessEnv.PATH; + process.env.PYTHONHOME = origProcessEnv.PYTHONHOME; + }; +} + +function deactivateVirtualenv() { + if (getActiveVenvName() === null) { + console.warn( + 'Attempted to deactivate a virtualenv when none was active.'); + return; + } + + var origProcessEnv = Object.assign({}, process.env); + + // simulate the "deactivate" virtualenv script + var pathElements = process.env.PATH.split(path.delimiter); + var binDirName = process.platform === 'win32' ? 'Scripts' : 'bin'; + var venvBinDir = path.join(process.env.VIRTUAL_ENV, binDirName); + var index = pathElements.indexOf(venvBinDir); + if (index > -1) { + pathElements.splice(index, 1); + } + process.env.PATH = pathElements.join(path.delimiter); + delete process.env.VIRTUAL_ENV; + delete process.env.PYTHONHOME; + + return function revert() { + process.env.VIRTUAL_ENV = origProcessEnv.VIRTUAL_ENV; + process.env.PATH = origProcessEnv.PATH; + process.env.PYTHONHOME = origProcessEnv.PYTHONHOME; + }; +} + +function ensureVirtualenv(venvName) { + var venvsBaseDir = path.join(path.resolve(__dirname), '.venvs'); + try { + fs.accessSync(venvsBaseDir, fs.R_OK); + } + catch (e) { + fs.mkdirSync(venvsBaseDir); + } + + var venvDir = path.join(venvsBaseDir, venvName); + try { + fs.accessSync(venvDir, fs.R_OK); + } + catch (e) { + var revert = function () {}; + if (process.env.VIRTUAL_ENV) { + revert = deactivateVirtualenv(); + } + try { + var proc = subProcess.executeSync('virtualenv', [venvDir]); + if (proc.status !== 0) { + console.error(proc.stdout.toString() + '\n' + proc.stderr.toString()); + throw new Error('Failed to create virtualenv in ' + venvDir); + } + } + finally { + revert(); + } + return true; + } + + return false; +} + +function pipInstall() { + var proc = subProcess.executeSync('pip', + ['install', '-r', 'requirements.txt', '--disable-pip-version-check']); + if (proc.status !== 0) { + throw new Error( + 'Failed to install requirements with pip.' + + ' venv = ' + JSON.stringify(getActiveVenvName()) + ); + } +} + +function pipUninstall(pkgName) { + var proc = subProcess.executeSync('pip', + ['uninstall', '-y', pkgName]); + if (proc.status !== 0) { + throw new Error( + 'Failed to uninstall "' + pkgName + '" with pip.' + + ' venv = ' + JSON.stringify(getActiveVenvName()) + ); + } +} + +function pipenvInstall() { + subProcess.executeSync('pip', ['install', 'pipenv']); + try { + subProcess.executeSync('pipenv', ['update']); + } + finally { + fs.unlinkSync('Pipfile.lock', function (err) {}); + } +} + +function setWorkonHome() { + var venvsBaseDir = path.join(path.resolve(__dirname), '.venvs'); + try { + fs.accessSync(venvsBaseDir, fs.R_OK); + } + catch (e) { + fs.mkdirSync(venvsBaseDir); + } + + var origWorkonHome = process.env.WORKON_HOME; + process.env.WORKON_HOME = venvsBaseDir; + + return function revert() { + process.env.WORKON_HOME = origWorkonHome; + }; +} diff --git a/test/workspaces/pipenv-app/Pipfile b/test/workspaces/pipenv-app/Pipfile new file mode 100644 index 00000000..99f115e0 --- /dev/null +++ b/test/workspaces/pipenv-app/Pipfile @@ -0,0 +1,14 @@ +[[source]] +url = "https://pypi.org/simple" +verify_ssl = true +name = "pypi" + +[packages] +python-etcd = ">=0.4,<0.5" +testtools = "*" +"Jinja2" = { version = "<2.7" } + +[dev-packages] +virtualenv = "*" + +[requires] diff --git a/test/workspaces/pipenv-app/README b/test/workspaces/pipenv-app/README new file mode 100644 index 00000000..8f8f215f --- /dev/null +++ b/test/workspaces/pipenv-app/README @@ -0,0 +1,2 @@ +This is a small pipenv-based config with a variety of types of dependency +defintions, based on pip-app and the pipenv example files. diff --git a/test/workspaces/pipfile-pipapp-pinned/Pipfile b/test/workspaces/pipfile-pipapp-pinned/Pipfile new file mode 100644 index 00000000..eb087de7 --- /dev/null +++ b/test/workspaces/pipfile-pipapp-pinned/Pipfile @@ -0,0 +1,16 @@ +[[source]] +url = "https://pypi.org/simple" +verify_ssl = true +name = "pypi" + +[packages] +python-etcd = "==0.4.5" +irc = "==16.2" +testtools = "==2.3.0" +"Jinja2" = "==2.7.2" +Django = "==1.6.1" +"Django-Select2" = "==6.0.1" + +[dev-packages] + +[requires] diff --git a/test/workspaces/pipfile-pipapp-pinned/Pipfile.lock b/test/workspaces/pipfile-pipapp-pinned/Pipfile.lock new file mode 100644 index 00000000..cbe0120a --- /dev/null +++ b/test/workspaces/pipfile-pipapp-pinned/Pipfile.lock @@ -0,0 +1,232 @@ +{ + "_meta": { + "hash": { + "sha256": "bd1e9c623aaaa8657487c300cc7af3ef666c70fbd375aeeaf0f918db1428ab6b" + }, + "pipfile-spec": 6, + "requires": {}, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": true + } + ] + }, + "default": { + "argparse": { + "hashes": [ + "sha256:62b089a55be1d8949cd2bc7e0df0bddb9e028faefc8c32038cc84862aefdd6e4", + "sha256:c31647edb69fd3d465a847ea3157d37bed1f95f19760b11a47aa91c04b666314" + ], + "version": "==1.4.0" + }, + "django": { + "hashes": [ + "sha256:989d42289663ac88169ac2abe8d50b82b29b2fe135307badf588a3d2235c1eef", + "sha256:cf011874f54a16e7452e0fe1e7f4ec144b95b47ecf31766c9f1f8cf438f09c06" + ], + "index": "pypi", + "version": "==1.6.1" + }, + "django-appconf": { + "hashes": [ + "sha256:6a4d9aea683b4c224d97ab8ee11ad2d29a37072c0c6c509896dd9857466fb261", + "sha256:ddab987d14b26731352c01ee69c090a4ebfc9141ed223bef039d79587f22acd9" + ], + "version": "==1.0.2" + }, + "django-select2": { + "hashes": [ + "sha256:a07ed1d3bf51a4f512c3d5880fd6dfb6539fa73ed79522185b4ff1233125c8e5" + ], + "index": "pypi", + "version": "==6.0.1" + }, + "dnspython": { + "hashes": [ + "sha256:40f563e1f7a7b80dc5a4e76ad75c23da53d62f1e15e6e517293b04e1f84ead7c", + "sha256:861e6e58faa730f9845aaaa9c6c832851fbf89382ac52915a51f89c71accdd31" + ], + "version": "==1.15.0" + }, + "extras": { + "hashes": [ + "sha256:132e36de10b9c91d5d4cc620160a476e0468a88f16c9431817a6729611a81b4e", + "sha256:f689f08df47e2decf76aa6208c081306e7bd472630eb1ec8a875c67de2366e87" + ], + "version": "==1.0.0" + }, + "fixtures": { + "hashes": [ + "sha256:2a551b0421101de112d9497fb5f6fd25e5019391c0fbec9bad591ecae981420d", + "sha256:fcf0d60234f1544da717a9738325812de1f42c2fa085e2d9252d8fff5712b2ef" + ], + "version": "==3.0.0" + }, + "inflect": { + "hashes": [ + "sha256:51d3d0fe0db77fa9315c45ce5933a64d9043a36d42e8b1a082d3379dc39754cf", + "sha256:7a71eed8a666c0c2b0463bb850a9a5c51603699836bf251521374ceffeb9c322" + ], + "version": "==0.3.1" + }, + "irc": { + "hashes": [ + "sha256:7d55f587c0c801b756653056242cb3735b3e5221c010815f3228081a0169f187", + "sha256:cce90b6ed5e9bdbe9dfe1fbe6ff40752924a234656cad73744f4780086018e8b" + ], + "index": "pypi", + "version": "==16.2" + }, + "jaraco.classes": { + "hashes": [ + "sha256:2c646c10deb2058dcb0e2b022a1124f5a1cbf8643fd09f5065fb5715982b4d9a", + "sha256:d101c45efd518a3ed76409a23ad2319bafeda13c3252395fe4d8ec195dd45f00" + ], + "version": "==1.5" + }, + "jaraco.collections": { + "hashes": [ + "sha256:1e6904f662a31dce5ba5702eae342d7a66d178dd1e4195824385ce5988e361d5", + "sha256:d9cb10e7bca89b680381adb38476f6d782ec45751d802ce7387da5310b669515" + ], + "version": "==1.5.3" + }, + "jaraco.functools": { + "hashes": [ + "sha256:1d653159ae32e00f30390a6db355fdb091ea16ae27fea2dee8ed8f03e4eaa62d", + "sha256:60835b5fba2205d1cab2fd40884a0bb538693bc2f602d54c45ec3bab74c425fc" + ], + "version": "==1.19" + }, + "jaraco.itertools": { + "hashes": [ + "sha256:9ca316fbfffef258f80405547c1e08462c879108ce918b226ccd330b4f2de2fa", + "sha256:9f70940b6dd63ac12c1f60c3fac6c01163804a21a046848b0e58070788cacf75" + ], + "version": "==2.3" + }, + "jaraco.logging": { + "hashes": [ + "sha256:23b10eb0d65afe042fad16eabdf5956bdc863e110c5cf3fa1b4bc59f3a7f37a5", + "sha256:625977c56644d040d2023d8a3639d6b9ad0f490bc26e17be7bb5754a255178fb" + ], + "version": "==1.5.2" + }, + "jaraco.stream": { + "hashes": [ + "sha256:2b56514708224864d93b8a6e916c02ae7ae4ce822f05946704719bb294971a1b", + "sha256:7e0829a81c373515f1c1ca16e39aa44b2f209fa8ce078daaa9532dabbfbd608d" + ], + "version": "==1.2" + }, + "jaraco.text": { + "hashes": [ + "sha256:d1b29e833f317629a7c8aff3c03521e61e1df6da188d7fa2f698b0281e1a5a2b", + "sha256:e5868da943d272af894a147680b1ac5902b82e59a2407e5c95d3f22ce8e049e2" + ], + "version": "==1.10.1" + }, + "jinja2": { + "hashes": [ + "sha256:310a35fbccac3af13ebf927297f871ac656b9da1d248b1fe6765affa71b53235" + ], + "index": "pypi", + "version": "==2.7.2" + }, + "linecache2": { + "hashes": [ + "sha256:4b26ff4e7110db76eeb6f5a7b64a82623839d595c2038eeda662f2a2db78e97c", + "sha256:e78be9c0a0dfcbac712fe04fbf92b96cddae80b1b842f24248214c8496f006ef" + ], + "version": "==1.0.0" + }, + "markupsafe": { + "hashes": [ + "sha256:a6be69091dac236ea9c6bc7d012beab42010fa914c459791d627dad4910eb665" + ], + "version": "==1.0" + }, + "more-itertools": { + "hashes": [ + "sha256:2b6b9893337bfd9166bee6a62c2b0c9fe7735dcf85948b387ec8cba30e85d8e8", + "sha256:6703844a52d3588f951883005efcf555e49566a48afd4db4e965d69b883980d3", + "sha256:a18d870ef2ffca2b8463c0070ad17b5978056f403fb64e3f15fe62a52db21cc0" + ], + "version": "==4.2.0" + }, + "pbr": { + "hashes": [ + "sha256:3747c6f017f2dc099986c325239661948f9f5176f6880d9fdef164cb664cd665", + "sha256:a9c27eb8f0e24e786e544b2dbaedb729c9d8546342b5a6818d8eda098ad4340d" + ], + "version": "==4.0.4" + }, + "python-etcd": { + "hashes": [ + "sha256:f1b5ebb825a3e8190494f5ce1509fde9069f2754838ed90402a8c11e1f52b8cb" + ], + "index": "pypi", + "version": "==0.4.5" + }, + "python-mimeparse": { + "hashes": [ + "sha256:76e4b03d700a641fd7761d3cd4fdbbdcd787eade1ebfac43f877016328334f78", + "sha256:a295f03ff20341491bfe4717a39cd0a8cc9afad619ba44b77e86b0ab8a2b8282" + ], + "version": "==1.6.0" + }, + "pytz": { + "hashes": [ + "sha256:65ae0c8101309c45772196b21b74c46b2e5d11b6275c45d251b150d5da334555", + "sha256:c06425302f2cf668f1bba7a0a03f3c1d34d4ebeef2c72003da308b3947c7f749" + ], + "version": "==2018.4" + }, + "six": { + "hashes": [ + "sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9", + "sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb" + ], + "version": "==1.11.0" + }, + "tempora": { + "hashes": [ + "sha256:4be862dbdc6fcbcc57f4b355258daa41fc0932af99680142bfa20836379f57a5", + "sha256:e7496537422e72bb331e460b5d1c864dd95d42474ca454a61ab8760be37a2f26" + ], + "version": "==1.11" + }, + "testtools": { + "hashes": [ + "sha256:5827ec6cf8233e0f29f51025addd713ca010061204fdea77484a2934690a0559", + "sha256:a2be448869171b6e0f26d9544088b8b98439ec180ce272040236d570a40bcbed" + ], + "index": "pypi", + "version": "==2.3.0" + }, + "traceback2": { + "hashes": [ + "sha256:05acc67a09980c2ecfedd3423f7ae0104839eccb55fc645773e1caa0951c3030", + "sha256:8253cebec4b19094d67cc5ed5af99bf1dba1285292226e98a31929f87a5d6b23" + ], + "version": "==1.4.0" + }, + "unittest2": { + "hashes": [ + "sha256:13f77d0875db6d9b435e1d4f41e74ad4cc2eb6e1d5c824996092b3430f088bb8", + "sha256:22882a0e418c284e1f718a822b3b022944d53d2d908e1690b319a9d3eb2c0579" + ], + "version": "==1.1.0" + }, + "urllib3": { + "hashes": [ + "sha256:a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf", + "sha256:b5725a0bd4ba422ab0e66e89e030c806576753ea3ee08554382c14e685d117b5" + ], + "version": "==1.23" + } + }, + "develop": {} +} diff --git a/test/workspaces/pipfile-pipapp-pinned/README b/test/workspaces/pipfile-pipapp-pinned/README new file mode 100644 index 00000000..d425257e --- /dev/null +++ b/test/workspaces/pipfile-pipapp-pinned/README @@ -0,0 +1,10 @@ +This is a small pipenv-based config with all dependency versions pinned +in the Pipfile, based on pip-app. + +This was created by running the following commands in a clean python3.6 +virtualenv: + +pip install pipenv +pipenv install -r test/workspaces/pip-app/requirements.txt +# manually empty the "requires" section of the Pipfile +pipenv lock diff --git a/test/workspaces/pipfile-pipapp/Pipfile b/test/workspaces/pipfile-pipapp/Pipfile new file mode 100644 index 00000000..19823db0 --- /dev/null +++ b/test/workspaces/pipfile-pipapp/Pipfile @@ -0,0 +1,16 @@ +[[source]] +url = "https://pypi.org/simple" +verify_ssl = true +name = "pypi" + +[packages] +python-etcd = ">=0.4,<0.5" +testtools = "*" +"Jinja2" = { version = "*" } +django = { git = 'https://github.com/django/django.git', ref = '1.6.1', editable = true } +"Django-Select2" = { version = "==6.0.1" } +"e1839a8" = {path = ".", editable = true} + +[dev-packages] + +[requires] diff --git a/test/workspaces/pipfile-pipapp/README b/test/workspaces/pipfile-pipapp/README new file mode 100644 index 00000000..8f8f215f --- /dev/null +++ b/test/workspaces/pipfile-pipapp/README @@ -0,0 +1,2 @@ +This is a small pipenv-based config with a variety of types of dependency +defintions, based on pip-app and the pipenv example files.