Skip to content

Commit

Permalink
feat: support projects using pipenv and Pipfile
Browse files Browse the repository at this point in the history
  • Loading branch information
taleinat authored and michael-go committed Jul 19, 2018
1 parent f01462e commit 52e2b57
Show file tree
Hide file tree
Showing 25 changed files with 1,708 additions and 328 deletions.
6 changes: 2 additions & 4 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,8 @@ cache:
- node_modules
before_script:
- export PATH=$HOME/.local/bin:$PATH
- pip install virtualenv --user `whoami`
- virtualenv ./env
- source ./env/bin/activate
- pip install pip==$PIP_VER
- pip install --user pip==$PIP_VER
- pip install --user -r dev-requirements.txt --disable-pip-version-check
script: npm test
jobs:
include:
Expand Down
11 changes: 10 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,10 @@ Snyk helps you find, fix and monitor for known vulnerabilities in your dependenc

## Snyk Python CLI Plugin

This plugin provides dependency metadata for Python projects that use `pip` and have a `requirements.txt` file.
This plugin provides dependency metadata for Python projects that use one of the following dependency management methods:

* `pip` with a `requirements.txt` file
* `pipenv` with a `Pipefile` file

## Contributing

Expand All @@ -17,6 +20,12 @@ This plugin provides dependency metadata for Python projects that use `pip` and
Prerequisites:
- Node 6 (or 8)
- Python (preferably 2.7)
- Installed outside of any virtualenv:
- [pip](https://pip.pypa.io/en/stable/installing/)
- the contents of `dev-requirements.txt`:
```
pip install --user -r dev-requirements.txt
```

Linting and testing:
```
Expand Down
2 changes: 2 additions & 0 deletions dev-requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
pipenv
virtualenv
55 changes: 41 additions & 14 deletions lib/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,22 @@ module.exports.__tests = {
function inspect(root, targetFile, options) {
if (!options) { options = {}; }
var command = options.command || 'python';
var baseargs = [];

if (path.basename(targetFile) === 'Pipfile') {
// Check that pipenv is available by running it.
var pipenvCheckProc = subProcess.executeSync('pipenv', ['--version']);
if (pipenvCheckProc.status !== 0) {
throw new Error('Failed to run `pipenv`; please make sure it is installed.');
}
command = 'pipenv';
baseargs = ['run', 'python'];
}

return Promise.all([
getMetaData(command, root),
getMetaData(command, baseargs, root),
getDependencies(
command, root, targetFile, options.allowMissing, options.args),
command, baseargs, root, targetFile, options.allowMissing, options.args),
])
.then(function (result) {
return {
Expand All @@ -27,8 +39,12 @@ function inspect(root, targetFile, options) {
});
}

function getMetaData(command, root) {
return subProcess.execute(command, ['--version'], { cwd: root })
function getMetaData(command, baseargs, root) {
return subProcess.execute(
command,
[].concat(baseargs, ['--version']),
{ cwd: root }
)
.then(function (output) {
return {
name: 'snyk-python-plugin',
Expand All @@ -38,17 +54,18 @@ function getMetaData(command, root) {
}

// Hack:
// We're using Zeit assets feature in order to support Python and Go testing
// We're using Zeit assets feature in order to support Python and Go testing
// within a binary release. By doing "path.join(__dirname, 'PATH'), Zeit adds
// PATH file auto to the assets. Sadly, Zeit doesn't support (as far as I
// understand) adding a full folder as an asset, and this is why we're adding
// the required files this way. In addition, Zeit doesn't support
// the required files this way. In addition, Zeit doesn't support
// path.resolve(), and this is why I'm using path.join()
function createAssets(){
assets = [];
assets.push(path.join(__dirname, '../plug/pip_resolve.py'));
assets.push(path.join(__dirname, '../plug/distPackage.py'));
assets.push(path.join(__dirname, '../plug/package.py'));
assets.push(path.join(__dirname, '../plug/pipfile.py'));
assets.push(path.join(__dirname, '../plug/reqPackage.py'));
assets.push(path.join(__dirname, '../plug/utils.py'));

Expand All @@ -58,6 +75,11 @@ function createAssets(){
assets.push(path.join(__dirname, '../plug/requirements/vcs.py'));
assets.push(path.join(__dirname, '../plug/requirements/__init__.py'));

assets.push(path.join(__dirname, '../plug/pytoml/__init__.py'));
assets.push(path.join(__dirname, '../plug/pytoml/core.py'));
assets.push(path.join(__dirname, '../plug/pytoml/parser.py'));
assets.push(path.join(__dirname, '../plug/pytoml/writer.py'));

return assets;
}

Expand All @@ -81,7 +103,7 @@ function getFilePathRelativeToDumpDir(filePath) {

// Unix
pathParts = filePath.split('/plug/');
return pathParts[1];
return pathParts[1];
}

function dumpAllFilesInTempDir(tempDirName) {
Expand All @@ -90,18 +112,19 @@ function dumpAllFilesInTempDir(tempDirName) {
{
throw new Error('The file `' + currentReadFilePath + '` is missing');
}
var relFilePathToDumpDir =

var relFilePathToDumpDir =
getFilePathRelativeToDumpDir(currentReadFilePath);

var writeFilePath = path.join(tempDirName, relFilePathToDumpDir);

var contents = fs.readFileSync(currentReadFilePath);
writeFile(writeFilePath, contents);
});
}

function getDependencies(command, root, targetFile, allowMissing, args) {
function getDependencies(command, baseargs, root, targetFile,
allowMissing, args) {
var tempDirObj = tmp.dirSync({
unsafeCleanup: true
});
Expand All @@ -110,7 +133,8 @@ function getDependencies(command, root, targetFile, allowMissing, args) {

return subProcess.execute(
command,
buildArgs(targetFile, allowMissing, tempDirObj.name, args),
[].concat(baseargs,
buildArgs(targetFile, allowMissing, tempDirObj.name, args)),
{ cwd: root }
)
.then(function (output) {
Expand All @@ -121,8 +145,11 @@ function getDependencies(command, root, targetFile, allowMissing, args) {
tempDirObj.removeCallback();
if (typeof error === 'string') {
if (error.indexOf('Required package missing') !== -1) {
// TODO: this should be checked in the CLI, not here
throw new Error('Please run `pip install -r ' + targetFile + '`');
var errMsg = 'Please run `pip install -r ' + targetFile + '`';
if (path.basename(targetFile) === 'Pipfile') {
errMsg = 'Please run `pipenv update`';
}
throw new Error(errMsg);
}
throw new Error(error);
}
Expand Down
20 changes: 18 additions & 2 deletions lib/sub-process.js
Original file line number Diff line number Diff line change
@@ -1,10 +1,20 @@
var childProcess = require('child_process');
var path = require('path');
var process = require('process');

module.exports.execute = function (command, args, options) {
var spawnOptions = { shell: true };
var _makeSpawnOptions = function(options) {
var spawnOptions = {shell: true};
if (options && options.cwd) {
spawnOptions.cwd = options.cwd;
}
if (options && options.env) {
spawnOptions.env = options.env;
}
return spawnOptions;
}

module.exports.execute = function (command, args, options) {
var spawnOptions = _makeSpawnOptions(options);

return new Promise(function (resolve, reject) {
var stdout = '';
Expand All @@ -22,3 +32,9 @@ module.exports.execute = function (command, args, options) {
});
});
};

module.exports.executeSync = function (command, args, options) {
var spawnOptions = _makeSpawnOptions(options);

return childProcess.spawnSync(command, args, spawnOptions);
};
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
},
"main": "lib/index.js",
"scripts": {
"test": "tap `find ./test -name '*.test.js'` --timeout=300",
"test": "tap `find ./test -name '*.test.js'` --timeout=900",
"lint": "jscs `find ./lib -name '*.js'` -v && jscs `find ./test -name '*.js'` -v",
"semantic-release": "semantic-release pre && npm publish && semantic-release post"
},
Expand Down
68 changes: 45 additions & 23 deletions plug/pip_resolve.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
import io
import sys
import os
import json
import re
import argparse
import utils
import requirements
import pipfile

# pip >= 10.0.0 moved all APIs to the _internal package reflecting the fact
# that pip does not currently have any public APIs. This is a temporary fix.
Expand All @@ -14,11 +16,13 @@
except ImportError:
from pip._internal import get_installed_distributions


def create_tree_of_packages_dependencies(dist_tree, packages_names, req_file_path, allow_missing=False):
"""Create packages dependencies tree
:param dict tree: the package tree
:param set packages_names: set of select packages to be shown in the output.
:param req_file_path: the path to requirements.txt file
:param req_file_path: the path to the dependencies file
(e.g. requirements.txt)
:rtype: dict
"""
DEPENDENCIES = 'dependencies'
Expand Down Expand Up @@ -99,17 +103,33 @@ def matches_environment(requirement):
This should be expanded to include other environment markers.
See: https://www.python.org/dev/peps/pep-0508/#environment-markers
"""
if 'sys_platform' in requirement.line:
match = sys_platform_re.findall(requirement.line)
# TODO: refactor this out into the Requirement classes
if isinstance(requirement, pipfile.PipfileRequirement):
markers_text = requirement.markers
else:
markers_text = requirement.line
if markers_text is not None and 'sys_platform' in markers_text:
match = sys_platform_re.findall(markers_text)
if len(match) > 0:
return match[0].lower() == sys_platform
return True

def is_testable(requirement):
return requirement.editable == False and requirement.vcs == None
return requirement.editable == False and requirement.vcs is None

def get_requirements_list(requirements_file_path):
# TODO: refactor recognizing the dependency manager to a single place
if os.path.basename(requirements_file_path) == 'Pipfile':
with io.open(requirements_file_path, 'r', encoding='utf-8') as f:
requirements_data = f.read()
req_list = list(pipfile.parse(requirements_data).get('packages', []))
else:
# assume this is a requirements.txt formatted file
# Note: requirements.txt files are unicode and can be in any encoding.
with open(requirements_file_path, 'r') as f:
requirements_data = f.read()
req_list = list(requirements.parse(requirements_data))

def get_requirements_list(requirements_file):
req_list = list(requirements.parse(requirements_file))
req_list = filter(matches_environment, req_list)
req_list = filter(is_testable, req_list)
required = [req.name.replace('_', '-') for req in req_list]
Expand All @@ -125,31 +145,33 @@ def create_dependencies_tree_by_req_file_path(requirements_file_path, allow_miss
# get all installed distributions tree
dist_tree = utils.construct_tree(dist_index)

# open the requirements.txt file and create dependencies tree out of it
with open(requirements_file_path, 'r') as requirements_file:
required = get_requirements_list(requirements_file)
installed = [p for p in dist_index]
packages = []
for r in required:
if r.lower() not in installed:
msg = 'Required package missing: ' + r.lower()
if allow_missing:
sys.stderr.write(msg + "\n")
else:
sys.exit(msg)
# create a list of dependencies from the dependencies file
required = get_requirements_list(requirements_file_path)
installed = [p for p in dist_index]
packages = []
for r in required:
if r.lower() not in installed:
msg = 'Required package missing: ' + r.lower()
if allow_missing:
sys.stderr.write(msg + "\n")
else:
packages.append(r);
sys.exit(msg)
else:
packages.append(r)

package_tree = create_tree_of_packages_dependencies(
dist_tree, packages, requirements_file_path, allow_missing)
# build a tree of dependencies
package_tree = create_tree_of_packages_dependencies(
dist_tree, packages, requirements_file_path, allow_missing)
print(json.dumps(package_tree))

def main():
parser = argparse.ArgumentParser()
parser.add_argument("requirements", help="requirements.txt path")
parser.add_argument("requirements",
help="dependencies file path (requirements.txt or Pipfile)")
parser.add_argument("--allow-missing",
action="store_true",
help="don't fail if some packages listed in requirements.txt are missing")
help="don't fail if some packages listed in the dependencies file " +
"are not installed")
args = parser.parse_args()

create_dependencies_tree_by_req_file_path(
Expand Down
56 changes: 56 additions & 0 deletions plug/pipfile.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
"""Simplistic parsing of Pipfile dependency files
This only extracts a small subset of the information present in a Pipfile,
as needed for the purposes of this library.
"""
from utils import is_string

import pytoml


class PipfileRequirement(object):
def __init__(self, name):
self.name = name

self.editable = False
self.vcs = None
self.vcs_uri = None
self.version = None
self.markers = None

@classmethod
def from_dict(cls, name, requirement_dict):
req = cls(name)

req.version = requirement_dict.get('version')
req.editable = requirement_dict.get('editable', False)
for vcs in ['git', 'hg', 'svn', 'bzr']:
if vcs in requirement_dict:
req.vcs = vcs
req.vcs_uri = requirement_dict[vcs]
break
req.markers = requirement_dict.get('markers')

return req


def parse(file_contents):
data = pytoml.loads(file_contents)

sections = ['packages', 'dev-packages']
res = dict.fromkeys(sections)
for section in sections:
if section not in data:
continue

section_data = data[section]

res[section] = [
PipfileRequirement.from_dict(
name,
value if not is_string(value) else {'version': value},
)
for name, value in sorted(section_data.items())
]

return res
Loading

0 comments on commit 52e2b57

Please sign in to comment.