From 9bb547a3b8b68964d3dc19c3c6f5dad777d7cec5 Mon Sep 17 00:00:00 2001 From: Dylan Pulver Date: Tue, 9 Jul 2024 10:40:06 -0400 Subject: [PATCH 1/4] feat: add support for pyproject.toml and env.yml/env.yaml files --- safety/scan/finder/file_finder.py | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/safety/scan/finder/file_finder.py b/safety/scan/finder/file_finder.py index aaacd7e..778b016 100644 --- a/safety/scan/finder/file_finder.py +++ b/safety/scan/finder/file_finder.py @@ -27,7 +27,7 @@ def should_exclude(excludes: Set[Path], to_analyze: Path) -> bool: return True except ValueError: pass - + return False @@ -37,9 +37,9 @@ class FileFinder(): find depending on the language type. """ - def __init__(self, max_level: int, ecosystems: List[Ecosystem], target: Path, + def __init__(self, max_level: int, ecosystems: List[Ecosystem], target: Path, console, live_status=None, - exclude: Optional[List[str]] = None, + exclude: Optional[List[str]] = None, include_files: Optional[Dict[FileType, List[Path]]] = None, handlers: Optional[Set[FileHandler]] = None) -> None: self.max_level = max_level @@ -47,9 +47,9 @@ def __init__(self, max_level: int, ecosystems: List[Ecosystem], target: Path, self.include_files = include_files if not handlers: - handlers = set(ECOSYSTEM_HANDLER_MAPPING[ecosystem]() + handlers = set(ECOSYSTEM_HANDLER_MAPPING[ecosystem]() for ecosystem in ecosystems) - + self.handlers = handlers self.file_count = 0 self.exclude_dirs: Set[Path] = set() @@ -65,7 +65,7 @@ def __init__(self, max_level: int, ecosystems: List[Ecosystem], target: Path, self.console = console self.live_status = live_status - + def process_directory(self, dir_path, max_deep: Optional[int]=None) -> Tuple[str, Dict[str, Set[Path]]]: files: Dict[str, Set[Path]] = {} level : int = 0 @@ -79,10 +79,10 @@ def process_directory(self, dir_path, max_deep: Optional[int]=None) -> Tuple[str dirs[:] = [d for d in dirs if not should_exclude(excludes=self.exclude_dirs, to_analyze=(root_path / Path(d)))] - + if dirs: LOG.info(f"Directories to inspect -> {', '.join(dirs)}") - + LOG.info(f"Current -> {root}") if self.live_status: self.live_status.update(f":mag: Scanning {root}") @@ -92,7 +92,7 @@ def process_directory(self, dir_path, max_deep: Optional[int]=None) -> Tuple[str del dirs[:] filenames[:] = [f for f in filenames if not should_exclude( - excludes=self.exclude_files, + excludes=self.exclude_files, to_analyze=Path(f))] self.file_count += len(filenames) @@ -106,6 +106,14 @@ def process_directory(self, dir_path, max_deep: Optional[int]=None) -> Tuple[str files[file_type.value] = set() files[file_type.value].add(inspectable_file) break + + special_files = {'pyproject.toml', 'env.yml', 'env.yaml'} + if file_name in special_files: + file_type = FileType(file_name) + inspectable_file = Path(root, file_name) + if file_type.value not in files or not files[file_type.value]: + files[file_type.value] = set() + files[file_type.value].add(inspectable_file) level += 1 return dir_path, files From 4c46b28734442e0be5b6c9300f974110d4fdb8e0 Mon Sep 17 00:00:00 2001 From: Dylan Pulver Date: Wed, 17 Jul 2024 13:43:55 -0400 Subject: [PATCH 2/4] WIP: pyproject.toml support --- pyproject.toml | 5 +- safety/scan/command.py | 123 +++++++++--------- safety/scan/ecosystems/python/dependencies.py | 55 ++++++-- safety/scan/finder/file_finder.py | 4 +- safety/scan/finder/handlers.py | 82 +++++++++--- 5 files changed, 184 insertions(+), 85 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index b0f0765..64e4add 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,6 @@ [build-system] -requires = ["setuptools>=42"] +requires = ["setuptools>=42", "insecure-package" + + ] build-backend = "setuptools.build_meta" + diff --git a/safety/scan/command.py b/safety/scan/command.py index 433e874..ab87199 100644 --- a/safety/scan/command.py +++ b/safety/scan/command.py @@ -46,15 +46,16 @@ class ScannableEcosystems(Enum): PYTHON = Ecosystem.PYTHON.value + PYPROJECT_TOML = Ecosystem.PYPROJECT_TOML.value -def process_report(obj: Any, console: Console, report: ReportModel, output: str, +def process_report(obj: Any, console: Console, report: ReportModel, output: str, save_as: Optional[Tuple[str, Path]], **kwargs): wait_msg = "Processing report" with console.status(wait_msg, spinner=DEFAULT_SPINNER) as status: json_format = report.as_v30().json() - + export_type, export_path = None, None if save_as: @@ -74,12 +75,12 @@ def process_report(obj: Any, console: Console, report: ReportModel, output: str, spdx_version = None if export_type: spdx_version = export_type.version if export_type.version and ScanExport.is_format(export_type, ScanExport.SPDX) else None - + if not spdx_version and output: spdx_version = output.version if output.version and ScanOutput.is_format(output, ScanOutput.SPDX) else None spdx_format = render_scan_spdx(report, obj, spdx_version=spdx_version) - + if export_type is ScanExport.HTML or output is ScanOutput.HTML: html_format = render_scan_html(report, obj) @@ -89,7 +90,7 @@ def process_report(obj: Any, console: Console, report: ReportModel, output: str, ScanExport.SPDX: spdx_format, ScanExport.SPDX_2_3: spdx_format, ScanExport.SPDX_2_2: spdx_format, - } + } output_format_mapping = { ScanOutput.JSON: json_format, @@ -106,7 +107,7 @@ def process_report(obj: Any, console: Console, report: ReportModel, output: str, msg = f"Saving {export_type} report at: {export_path}" status.update(msg) LOG.debug(msg) - save_report_as(report.metadata.scan_type, export_type, Path(export_path), + save_report_as(report.metadata.scan_type, export_type, Path(export_path), report_to_export) report_url = None @@ -131,7 +132,7 @@ def process_report(obj: Any, console: Console, report: ReportModel, output: str, f"[link]{project_url}[/link]") elif report.metadata.scan_type is ScanType.system_scan: lines.append(f"System scan report: [link]{report_url}[/link]") - + for line in lines: console.print(line, emoji=True) @@ -142,14 +143,14 @@ def process_report(obj: Any, console: Console, report: ReportModel, output: str, if output is ScanOutput.JSON: kwargs = {"json": report_to_output} else: - kwargs = {"data": report_to_output} + kwargs = {"data": report_to_output} console.print_json(**kwargs) else: console.print(report_to_output) console.quiet = True - + return report_url @@ -157,10 +158,10 @@ def generate_updates_arguments() -> list: """Generates a list of file types and update limits for apply fixes.""" fixes = [] limit_type = SecurityUpdates.UpdateLevel.PATCH - DEFAULT_FILE_TYPES = [FileType.REQUIREMENTS_TXT, FileType.PIPENV_LOCK, + DEFAULT_FILE_TYPES = [FileType.REQUIREMENTS_TXT, FileType.PIPENV_LOCK, FileType.POETRY_LOCK, FileType.VIRTUAL_ENVIRONMENT] fixes.extend([(default_file_type, limit_type) for default_file_type in DEFAULT_FILE_TYPES]) - + return fixes @@ -197,7 +198,7 @@ def scan(ctx: typer.Context, ] = ScanOutput.SCREEN, detailed_output: Annotated[bool, typer.Option("--detailed-output", - help=SCAN_DETAILED_OUTPUT, + help=SCAN_DETAILED_OUTPUT, show_default=False) ] = False, save_as: Annotated[Optional[Tuple[ScanExport, Path]], @@ -221,7 +222,7 @@ def scan(ctx: typer.Context, )] = None, apply_updates: Annotated[bool, typer.Option("--apply-fixes", - help=SCAN_APPLY_FIXES, + help=SCAN_APPLY_FIXES, show_default=False) ] = False ): @@ -240,9 +241,9 @@ def scan(ctx: typer.Context, ecosystems = [Ecosystem(member.value) for member in list(ScannableEcosystems)] to_include = {file_type: paths for file_type, paths in ctx.obj.config.scan.include_files.items() if file_type.ecosystem in ecosystems} - file_finder = FileFinder(target=target, ecosystems=ecosystems, + file_finder = FileFinder(target=target, ecosystems=ecosystems, max_level=ctx.obj.config.scan.max_depth, - exclude=ctx.obj.config.scan.ignore, + exclude=ctx.obj.config.scan.ignore, include_files=to_include, console=console) @@ -260,7 +261,7 @@ def scan(ctx: typer.Context, with console.status(wait_msg, spinner=DEFAULT_SPINNER): path, file_paths = file_finder.search() - print_detected_ecosystems_section(console, file_paths, + print_detected_ecosystems_section(console, file_paths, include_safety_prjs=True) target_ecosystems = ", ".join([member.value for member in ecosystems]) @@ -274,7 +275,7 @@ def scan(ctx: typer.Context, count = 0 ignored = set() - + affected_count = 0 dependency_vuln_detected = False @@ -288,8 +289,10 @@ def scan(ctx: typer.Context, display_apply_fix_suggestion = False with console.status(wait_msg, spinner=DEFAULT_SPINNER) as status: - for path, analyzed_file in process_files(paths=file_paths, + for path, analyzed_file in process_files(paths=file_paths, config=config): + print("now here", analyzed_file.dependency_results.dependencies) + print("now here", analyzed_file.file_type) count += len(analyzed_file.dependency_results.dependencies) if exit_code == 0 and analyzed_file.dependency_results.failed: @@ -298,7 +301,7 @@ def scan(ctx: typer.Context, if detailed_output: vulns_ignored = analyzed_file.dependency_results.ignored_vulns_data \ .values() - ignored_vulns_data = itertools.chain(vulns_ignored, + ignored_vulns_data = itertools.chain(vulns_ignored, ignored_vulns_data) ignored.update(analyzed_file.dependency_results.ignored_vulns.keys()) @@ -309,7 +312,7 @@ def scan(ctx: typer.Context, def sort_vulns_by_score(vuln: Vulnerability) -> int: if vuln.severity and vuln.severity.cvssv3: return vuln.severity.cvssv3.get("base_score", 0) - + return 0 to_fix_spec = [] @@ -327,10 +330,10 @@ def sort_vulns_by_score(vuln: Vulnerability) -> int: for spec in affected_specifications: if file_matched_for_fix: to_fix_spec.append(spec) - + console.print() vulns_to_report = sorted( - [vuln for vuln in spec.vulnerabilities if not vuln.ignored], + [vuln for vuln in spec.vulnerabilities if not vuln.ignored], key=sort_vulns_by_score, reverse=True) @@ -346,14 +349,14 @@ def sort_vulns_by_score(vuln: Vulnerability) -> int: console.print(Padding(f"{msg}]", (0, 0, 0, 1)), emoji=True, overflow="crop") - + if detailed_output or vulns_found < 3: for vuln in vulns_to_report: - render_to_console(vuln, console, - rich_kwargs={"emoji": True, + render_to_console(vuln, console, + rich_kwargs={"emoji": True, "overflow": "crop"}, detailed_output=detailed_output) - + lines = [] # Put remediation here @@ -381,16 +384,16 @@ def sort_vulns_by_score(vuln: Vulnerability) -> int: console.print(Padding(line, (0, 0, 0, 1)), emoji=True) console.print( - Padding(f"Learn more: [link]{spec.remediation.more_info_url}[/link]", - (0, 0, 0, 1)), emoji=True) + Padding(f"Learn more: [link]{spec.remediation.more_info_url}[/link]", + (0, 0, 0, 1)), emoji=True) else: console.print() console.print(f":white_check_mark: [file_title]{path.relative_to(target)}: No issues found.[/file_title]", emoji=True) if(ctx.obj.auth.stage == Stage.development - and analyzed_file.ecosystem == Ecosystem.PYTHON - and analyzed_file.file_type == FileType.REQUIREMENTS_TXT + and analyzed_file.ecosystem == Ecosystem.PYTHON + and analyzed_file.file_type == FileType.REQUIREMENTS_TXT and any(affected_specifications) and not apply_updates): display_apply_fix_suggestion = True @@ -405,12 +408,12 @@ def sort_vulns_by_score(vuln: Vulnerability) -> int: if file_matched_for_fix: to_fix_files.append((file, to_fix_spec)) - files.append(file) + files.append(file) if display_apply_fix_suggestion: console.print() print_fixes_section(console, requirements_txt_found, detailed_output) - + console.print() print_brief(console, ctx.obj.project, count, affected_count, fixes_count) @@ -418,18 +421,18 @@ def sort_vulns_by_score(vuln: Vulnerability) -> int: is_detailed_output=detailed_output, ignored_vulns_data=ignored_vulns_data) - + version = ctx.obj.schema metadata = ctx.obj.metadata telemetry = ctx.obj.telemetry ctx.obj.project.files = files report = ReportModel(version=version, - metadata=metadata, + metadata=metadata, telemetry=telemetry, files=[], projects=[ctx.obj.project]) - + report_url = process_report(ctx.obj, console, report, **{**ctx.params}) project_url = f"{SAFETY_PLATFORM_URL}{ctx.obj.project.url_path}" @@ -440,7 +443,7 @@ def sort_vulns_by_score(vuln: Vulnerability) -> int: no_output = output is not ScanOutput.SCREEN prompt = output is ScanOutput.SCREEN - + # TODO: rename that 'no_output' confusing name if not no_output: console.print() @@ -462,11 +465,11 @@ def sort_vulns_by_score(vuln: Vulnerability) -> int: if any(policy_limits): update_limits = [policy_limit.value for policy_limit in policy_limits] - - fixes = process_fixes_scan(file_to_fix, + + fixes = process_fixes_scan(file_to_fix, specs_to_fix, update_limits, output, no_output=no_output, prompt=prompt) - + if not no_output: console.print("-" * console.size.width) @@ -484,7 +487,7 @@ def sort_vulns_by_score(vuln: Vulnerability) -> int: @scan_system_app.command( cls=SafetyCLICommand, help=CLI_SYSTEM_SCAN_COMMAND_HELP, - options_metavar="[COMMAND-OPTIONS]", + options_metavar="[COMMAND-OPTIONS]", name=CMD_SYSTEM_NAME, epilog=DEFAULT_EPILOG) @handle_cmd_exception @inject_metadata @@ -521,7 +524,7 @@ def system_scan(ctx: typer.Context, typer.Option( help=SYSTEM_SCAN_OUTPUT_HELP, show_default=False) - ] = SystemScanOutput.SCREEN, + ] = SystemScanOutput.SCREEN, save_as: Annotated[Optional[Tuple[SystemScanExport, Path]], typer.Option( help=SYSTEM_SCAN_SAVE_AS_HELP, @@ -575,9 +578,9 @@ def system_scan(ctx: typer.Context, for file_type, paths in target_paths.items(): current = file_paths.get(file_type, set()) current.update(paths) - file_paths[file_type] = current + file_paths[file_type] = current - scan_project_command = get_command_for(name=CMD_PROJECT_NAME, + scan_project_command = get_command_for(name=CMD_PROJECT_NAME, typer_instance=scan_project_app) projects_dirs = set() @@ -587,12 +590,12 @@ def system_scan(ctx: typer.Context, with console.status(":mag:", spinner=DEFAULT_SPINNER) as status: # Handle projects first if FileType.SAFETY_PROJECT.value in file_paths.keys(): - projects_file_paths = file_paths[FileType.SAFETY_PROJECT.value] + projects_file_paths = file_paths[FileType.SAFETY_PROJECT.value] basic_params = ctx.params.copy() basic_params.pop("targets", None) prjs_console = Console(quiet=True) - + for project_path in projects_file_paths: projects_dirs.add(project_path.parent) project_dir = str(project_path.parent) @@ -607,7 +610,7 @@ def system_scan(ctx: typer.Context, if not project or not project.id: LOG.warn(f"{project_path} parsed but project id is not defined or valid.") continue - + if not ctx.obj.platform_enabled: msg = f"project found and skipped, navigate to `{project.project_path}` and scan this project with ‘safety scan’" console.print(f"{project.id}: {msg}") @@ -615,8 +618,8 @@ def system_scan(ctx: typer.Context, msg = f"Existing project found at {project_dir}" console.print(f"{project.id}: {msg}") - project_data[project.id] = {"path": project_dir, - "report_url": None, + project_data[project.id] = {"path": project_dir, + "report_url": None, "project_url": None, "failed_exception": None} @@ -642,7 +645,7 @@ def system_scan(ctx: typer.Context, "save_as": (None, None), "upload_request_id": upload_request_id, "local_policy": local_policy_file, "console": prjs_console} try: - # TODO: Refactor to avoid calling invoke, also, launch + # TODO: Refactor to avoid calling invoke, also, launch # this on background. console.print( Padding(f"Running safety scan for {project.id} project", @@ -660,7 +663,7 @@ def system_scan(ctx: typer.Context, (0, 0, 0, 1)), emoji=True) LOG.exception(f"Failed to run scan on project {project.id}, " \ f"Upload request ID: {upload_request_id}. Reason {e}") - + console.print() file_paths.pop(FileType.SAFETY_PROJECT.value, None) @@ -670,18 +673,18 @@ def system_scan(ctx: typer.Context, status.update(":mag: Finishing projects processing.") for k, f_paths in file_paths.items(): - file_paths[k] = {fp for fp in f_paths - if not should_exclude(excludes=projects_dirs, + file_paths[k] = {fp for fp in f_paths + if not should_exclude(excludes=projects_dirs, to_analyze=fp)} - + pkgs_count = 0 file_count = 0 venv_count = 0 for path, analyzed_file in process_files(paths=file_paths, config=config): status.update(f":mag: {path}") - files.append(FileModel(location=path, - file_type=analyzed_file.file_type, + files.append(FileModel(location=path, + file_type=analyzed_file.file_type, results=analyzed_file.dependency_results)) file_pkg_count = len(analyzed_file.dependency_results.dependencies) @@ -718,7 +721,7 @@ def system_scan(ctx: typer.Context, pkgs_count += file_pkg_count console.print(f":package: {file_pkg_count} {msg} in {path}", emoji=True) - + if affected_pkgs_count <= 0: msg = "No vulnerabilities found" else: @@ -738,7 +741,7 @@ def system_scan(ctx: typer.Context, telemetry=telemetry, files=files, projects=projects) - + console.print() total_count = sum([finder.file_count for finder in file_finders], 0) console.print(f"Searched {total_count:,} files for dependency security issues") @@ -749,16 +752,16 @@ def system_scan(ctx: typer.Context, console.print() proccessed = dict(filter( - lambda item: item[1]["report_url"] and item[1]["project_url"], + lambda item: item[1]["report_url"] and item[1]["project_url"], project_data.items())) - + if proccessed: run_word = "runs" if len(proccessed) == 1 else "run" console.print(f"Project {pluralize('scan', len(proccessed))} {run_word} on {len(proccessed)} existing {pluralize('project', len(proccessed))}:") for prj, data in proccessed.items(): console.print(f"[bold]{prj}[/bold] at {data['path']}") - for detail in [f"{prj} dashboard: {data['project_url']}"]: + for detail in [f"{prj} dashboard: {data['project_url']}"]: console.print(Padding(detail, (0, 0, 0, 1)), emoji=True, overflow="crop") process_report(ctx.obj, console, report, **{**ctx.params}) diff --git a/safety/scan/ecosystems/python/dependencies.py b/safety/scan/ecosystems/python/dependencies.py index 51dfccf..f3aec09 100644 --- a/safety/scan/ecosystems/python/dependencies.py +++ b/safety/scan/ecosystems/python/dependencies.py @@ -2,7 +2,7 @@ from pathlib import Path import sys from typing import Generator, List, Optional - +import toml from safety_schemas.models import FileType, PythonDependency from safety_schemas.models.package import PythonSpecification from ..base import InspectableFile @@ -181,7 +181,7 @@ def read_virtual_environment_dependencies(f: InspectableFile) \ if not site_pkgs_path.resolve().exists(): # Unable to find packages for foo env return - + dep_paths = site_pkgs_path.glob("*/METADATA") for path in dep_paths: @@ -193,23 +193,62 @@ def read_virtual_environment_dependencies(f: InspectableFile) \ yield PythonDependency(name=dep_name, version=dep_version, specifications=[ - PythonSpecification(f"{dep_name}=={dep_version}", - found=site_pkgs_path)], + PythonSpecification(f"{dep_name}=={dep_version}", + found=site_pkgs_path)], found=site_pkgs_path, insecure_versions=[], - secure_versions=[], latest_version=None, + secure_versions=[], latest_version=None, latest_version_without_known_vulnerabilities=None, more_info_url=None) +def read_pyproject_toml_dependencies(file: Path) -> Generator[PythonDependency, None, None]: + with open(file, 'r') as f: + data = toml.load(f) + dependencies = [] + + # Handle 'build-system.requires' + if 'build-system' in data and 'requires' in data['build-system']: + dependencies.extend(data['build-system']['requires']) + + # Handle 'project.dependencies' + if 'project' in data and 'dependencies' in data['project']: + dependencies.extend(data['project']['dependencies']) + + # Handle 'tool.poetry.dependencies' + if 'tool' in data and 'poetry' in data['tool'] and 'dependencies' in data['tool']['poetry']: + for dep, version in data['tool']['poetry']['dependencies'].items(): + if isinstance(version, str): + dependencies.append(f"{dep}=={version}") + else: + dependencies.append(dep) + + for dep in dependencies: + dep_name, dep_version = (dep.split("==") + [None])[:2] + yield PythonDependency( + name=dep_name, + version=dep_version, + specifications=[ + PythonSpecification(f"{dep_name}=={dep_version}" if dep_version else dep_name, found=file) + ], + found=file, + insecure_versions=[], + secure_versions=[], + latest_version=None, + latest_version_without_known_vulnerabilities=None, + more_info_url=None + ) def get_dependencies(f: InspectableFile) -> List[PythonDependency]: if not f.file_type: return [] - - if f.file_type in [FileType.REQUIREMENTS_TXT, FileType.POETRY_LOCK, + + if f.file_type in [FileType.REQUIREMENTS_TXT, FileType.POETRY_LOCK, FileType.PIPENV_LOCK]: return list(read_dependencies(f.file, resolve=True)) - + if f.file_type == FileType.VIRTUAL_ENVIRONMENT: return list(read_virtual_environment_dependencies(f)) + if f.file_type == FileType.PYPROJECT_TOML: + return list(read_pyproject_toml_dependencies(f.file)) + return [] \ No newline at end of file diff --git a/safety/scan/finder/file_finder.py b/safety/scan/finder/file_finder.py index 778b016..26f4cda 100644 --- a/safety/scan/finder/file_finder.py +++ b/safety/scan/finder/file_finder.py @@ -8,7 +8,7 @@ from safety.errors import SafetyException -from .handlers import FileHandler, ECOSYSTEM_HANDLER_MAPPING +from .handlers import FileHandler, ECOSYSTEM_HANDLER_MAPPING, PyProjectTomlHandler LOG = logging.getLogger(__name__) @@ -46,6 +46,7 @@ def __init__(self, max_level: int, ecosystems: List[Ecosystem], target: Path, self.target = target self.include_files = include_files + print("ecosystems", ecosystems) if not handlers: handlers = set(ECOSYSTEM_HANDLER_MAPPING[ecosystem]() for ecosystem in ecosystems) @@ -116,6 +117,7 @@ def process_directory(self, dir_path, max_deep: Optional[int]=None) -> Tuple[str files[file_type.value].add(inspectable_file) level += 1 + return dir_path, files def search(self) -> Tuple[str, Dict[str, Set[Path]]]: diff --git a/safety/scan/finder/handlers.py b/safety/scan/finder/handlers.py index 4e2f696..fcc2e6c 100644 --- a/safety/scan/finder/handlers.py +++ b/safety/scan/finder/handlers.py @@ -2,15 +2,15 @@ import os from pathlib import Path from types import MappingProxyType -from typing import Dict, List, Optional, Tuple - +from typing import Dict, List, Optional, Tuple, Set +import toml from safety_schemas.models import Ecosystem, FileType NOT_IMPLEMENTED = "You should implement this." class FileHandler(ABC): - + def __init__(self) -> None: self.ecosystem: Optional[Ecosystem] = None @@ -28,14 +28,14 @@ def can_handle(self, root: str, file_name: str, include_files: Dict[FileType, Li return f_type # Let's compare by name only for now - # We can put heavier logic here, but for speed reasons, + # We can put heavier logic here, but for speed reasons, # right now is very basic, we will improve this later. # Custom matching per File Type if file_name.lower().endswith(f_type.value.lower()): return f_type - + return None - + @abstractmethod def download_required_assets(self, session) -> Dict[str, str]: return NotImplementedError(NOT_IMPLEMENTED) @@ -43,40 +43,92 @@ def download_required_assets(self, session) -> Dict[str, str]: class PythonFileHandler(FileHandler): # Example of a Python File Handler - + def __init__(self) -> None: super().__init__() self.ecosystem = Ecosystem.PYTHON - + def download_required_assets(self, session): from safety.safety import fetch_database - + SAFETY_DB_DIR = os.getenv("SAFETY_DB_DIR") db = False if SAFETY_DB_DIR is None else SAFETY_DB_DIR fetch_database(session=session, full=False, db=db, cached=True, - telemetry=True, ecosystem=Ecosystem.PYTHON, + telemetry=True, ecosystem=Ecosystem.PYTHON, from_cache=False) - + fetch_database(session=session, full=True, db=db, cached=True, - telemetry=True, ecosystem=Ecosystem.PYTHON, + telemetry=True, ecosystem=Ecosystem.PYTHON, from_cache=False) class SafetyProjectFileHandler(FileHandler): # Example of a Python File Handler - + def __init__(self) -> None: super().__init__() self.ecosystem = Ecosystem.SAFETY_PROJECT - + def download_required_assets(self, session): pass - + + +class PyProjectTomlHandler(FileHandler): + def __init__(self) -> None: + super().__init__() + self.ecosystem = Ecosystem.PYPROJECT_TOML + + def download_required_assets(self, session): + from safety.safety import fetch_database + + SAFETY_DB_DIR = os.getenv("SAFETY_DB_DIR") + + db = False if SAFETY_DB_DIR is None else SAFETY_DB_DIR + + + fetch_database(session=session, full=False, db=db, cached=True, + telemetry=True, ecosystem=Ecosystem.PYTHON, + from_cache=False) + + fetch_database(session=session, full=True, db=db, cached=True, + telemetry=True, ecosystem=Ecosystem.PYTHON, + from_cache=False) + + def can_handle(self, root: str, file_name: str, include_files: Dict[FileType, List[Path]]) -> Optional[FileType]: + if file_name == 'pyproject.toml': + print("recognized") + return FileType.PYPROJECT_TOML + return None + + def handle(self, file_path: Path) -> Set[str]: + with open(file_path, 'r') as file: + data = toml.load(file) + print("printing data", data) + dependencies = set() + + # Handle 'build-system.requires' + if 'build-system' in data and 'requires' in data['build-system']: + dependencies.update(data['build-system']['requires']) + + # Handle 'project.dependencies' + if 'project' in data and 'dependencies' in data['project']: + dependencies.update(data['project']['dependencies']) + + # Handle 'tool.poetry.dependencies' + if 'tool' in data and 'poetry' in data['tool'] and 'dependencies' in data['tool']['poetry']: + for dep, version in data['tool']['poetry']['dependencies'].items(): + dependencies.add(f"{dep}=={version}" if isinstance(version, str) else dep) + + return dependencies + + + ECOSYSTEM_HANDLER_MAPPING = MappingProxyType({ Ecosystem.PYTHON: PythonFileHandler, Ecosystem.SAFETY_PROJECT: SafetyProjectFileHandler, + Ecosystem.PYPROJECT_TOML: PyProjectTomlHandler, }) From 958a39e2e8093421a84da358bef2f403f42a9e7e Mon Sep 17 00:00:00 2001 From: Dylan Pulver Date: Mon, 16 Sep 2024 15:16:50 -0400 Subject: [PATCH 3/4] temp fixes --- safety/scan/command.py | 2 -- safety/scan/finder/handlers.py | 6 +++--- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/safety/scan/command.py b/safety/scan/command.py index b33eeee..230b9c6 100644 --- a/safety/scan/command.py +++ b/safety/scan/command.py @@ -45,8 +45,6 @@ class ScannableEcosystems(Enum): """Enum representing scannable ecosystems.""" PYTHON = Ecosystem.PYTHON.value - PYPROJECT_TOML = Ecosystem.PYPROJECT_TOML.value - def process_report( obj: Any, console: Console, report: ReportModel, output: str, diff --git a/safety/scan/finder/handlers.py b/safety/scan/finder/handlers.py index 56f70fa..f9ee158 100644 --- a/safety/scan/finder/handlers.py +++ b/safety/scan/finder/handlers.py @@ -52,7 +52,7 @@ def can_handle(self, root: str, file_name: str, include_files: Dict[FileType, Li return None @abstractmethod - def download_required_assets(self, session) -> Dict[str, str]: + def download_required_assets(self, session): """ Abstract method to download required assets for handling files. Should be implemented by subclasses. @@ -116,7 +116,7 @@ def download_required_assets(self, session): class PyProjectTomlHandler(FileHandler): def __init__(self) -> None: super().__init__() - self.ecosystem = Ecosystem.PYPROJECT_TOML + self.ecosystem = Ecosystem.PYTHON def download_required_assets(self, session): from safety.safety import fetch_database @@ -174,5 +174,5 @@ def download_required_assets(self, session) -> None: ECOSYSTEM_HANDLER_MAPPING = MappingProxyType({ Ecosystem.PYTHON: PythonFileHandler, Ecosystem.SAFETY_PROJECT: SafetyProjectFileHandler, - Ecosystem.PYPROJECT_TOML: PyProjectTomlHandler, + # Ecosystem.PYPROJECT_TOML: PyProjectTomlHandler, }) From 42790a2952a0ed2be4171c55f437fc5245c83932 Mon Sep 17 00:00:00 2001 From: Dylan Pulver Date: Mon, 16 Sep 2024 15:20:35 -0400 Subject: [PATCH 4/4] fixes --- safety/scan/finder/handlers.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/safety/scan/finder/handlers.py b/safety/scan/finder/handlers.py index f9ee158..fca4146 100644 --- a/safety/scan/finder/handlers.py +++ b/safety/scan/finder/handlers.py @@ -2,7 +2,7 @@ import os from pathlib import Path from types import MappingProxyType -from typing import Dict, List, Optional, Tuple, Set +from typing import Dict, List, Optional, Set import toml from safety_schemas.models import Ecosystem, FileType @@ -162,14 +162,6 @@ def handle(self, file_path: Path) -> Set[str]: return dependencies - - - def download_required_assets(self, session) -> None: - """ - No required assets to download for Safety project files. - """ - pass - # Mapping of ecosystems to their corresponding file handlers ECOSYSTEM_HANDLER_MAPPING = MappingProxyType({ Ecosystem.PYTHON: PythonFileHandler,