diff --git a/Dockerfile b/Dockerfile index b39bcc8..21341dd 100644 --- a/Dockerfile +++ b/Dockerfile @@ -78,7 +78,7 @@ RUN python3 -m pip install --no-cache-dir \ # YAML (Checkov, cfn-nag) # RUN echo "gem: --no-document" >> /etc/gemrc && \ - python3 -m pip install checkov && \ + python3 -m pip install checkov pathspec && \ gem install cfn-nag # @@ -120,7 +120,8 @@ RUN mkdir -p /src && \ # Install CDK Nag stub dependencies # # Update NPM to latest -COPY ./utils /ash/utils/ +RUN mkdir -p /ash/utils +COPY ./utils/cdk-nag-scan /ash/utils/cdk-nag-scan/ RUN npm install -g npm && \ cd /ash/utils/cdk-nag-scan && \ npm install --quiet @@ -128,6 +129,8 @@ RUN npm install -g npm && \ # # COPY ASH source to /ash instead of / to isolate # +COPY ./utils/cfn-to-cdk /ash/utils/cfn-to-cdk/ +COPY ./utils/*.* /ash/utils/ COPY ./appsec_cfn_rules /ash/appsec_cfn_rules/ COPY ./ash-multi /ash/ash diff --git a/ash b/ash index f140b93..a642bae 100755 --- a/ash +++ b/ash @@ -8,7 +8,6 @@ export ASH_IMAGE_NAME=${ASH_IMAGE_NAME:-"automated-security-helper:local"} # Set local variables SOURCE_DIR="" OUTPUT_DIR="" -OCI_RUNNER="" DOCKER_EXTRA_ARGS="" ASH_ARGS="" NO_BUILD="NO" @@ -93,7 +92,7 @@ if [[ "${RESOLVED_OCI_RUNNER}" == "" ]]; then exit 1 # else, build and run the image else - if [ "${DEBUG}" = "YES" ]; then + if [[ "${DEBUG}" = "YES" ]]; then set -x fi echo "Resolved OCI_RUNNER to: ${RESOLVED_OCI_RUNNER}" @@ -116,15 +115,16 @@ else --rm \ -e ACTUAL_SOURCE_DIR=${SOURCE_DIR} \ -e ACTUAL_OUTPUT_DIR=${OUTPUT_DIR} \ - --mount type=bind,source="${SOURCE_DIR}",destination=/src,readonly \ - --mount type=bind,source="${OUTPUT_DIR}",destination=/out \ + -e ASH_DEBUG=${DEBUG} \ + --mount type=bind,source="${SOURCE_DIR}",destination=/src,readonly,bind-propagation=shared \ + --mount type=bind,source="${OUTPUT_DIR}",destination=/out,bind-propagation=shared \ --tmpfs /run/scan/src:rw,noexec,nosuid ${ASH_IMAGE_NAME} \ ash \ --source-dir /src \ --output-dir /out \ $ASH_ARGS fi - if [ "${DEBUG}" = "YES" ]; then + if [[ "${DEBUG}" = "YES" ]]; then set +x fi fi diff --git a/ash-multi b/ash-multi index 8b4cfb9..aa55e09 100755 --- a/ash-multi +++ b/ash-multi @@ -62,10 +62,7 @@ print_usage() { echo -e "\t-q | --quiet Don't print verbose text about the build process." echo -e "\t-c | --no-color Don't print colorized output." echo -e "\t-s | --single-process Run ash scanners serially rather than as separate, parallel sub-processes." - echo -e "\t-o | --oci-runner Use the specified OCI runner instead of docker to run the containerized tools." - echo -e "\t-f | --finch Use finch instead of docker to run the containerized tools." - echo -e "\t WARNING: The '--finch|-f' option is deprecated and will be removed in a future" - echo -e "\t release. Please switch to using '--oci-runner finch' in scripts instead.\n" + echo -e "\t-o | --oci-runner Use the specified OCI runner instead of docker to run the containerized tools.\n" echo -e "For more information please visit https://github.com/awslabs/automated-security-helper" } @@ -78,18 +75,19 @@ get_all_files() { pushd . >/dev/null 2>&1 # cd to the source directory as a starting point cd ${_ASH_SOURCE_DIR} - # Check if the source directory is a git repository and clone it to the run directory - if [[ "$(git rev-parse --is-inside-work-tree 2>/dev/null)" == "true" ]]; then - echo "Source is a git repository. Using git ls-files to exclude files from scanning." - src_files=$(git ls-files) - else - echo "Source is not a git repository. Using find to list all files instead." - src_files=$(find "${_ASH_SOURCE_DIR}" \( -path '*/node_modules*' -prune -o -path '*/cdk.out*' -prune -o -path '*/.venv*' -prune -o -path '*/venv*' -prune \) -o -type f -name '*') - fi; + src_files=$(python "${_ASH_UTILS_LOCATION}/get-scan-set.py" $(pwd)) + # # Check if the source directory is a git repository and clone it to the run directory + # if [[ "$(git rev-parse --is-inside-work-tree 2>/dev/null)" == "true" ]]; then + # echo "Source is a git repository. Using git ls-files to exclude files from scanning." + # src_files=$($(python "${_ASH_UTILS_LOCATION}/get-scan-set.py" ${_ASH_SOURCE_DIR}) + # # src_files=$(git ls-files) + # else + # echo "Source is not a git repository. Using find to list all files instead." + # src_files=$(find "${_ASH_SOURCE_DIR}" \( -path '*/node_modules*' -prune -o -path '*/cdk.out*' -prune -o -path '*/.venv*' -prune -o -path '*/venv*' -prune \) -o -type f -name '*') + # fi; popd >/dev/null 2>&1 all_files+=( "$src_files" ) - } # shellcheck disable=SC2120 @@ -166,19 +164,12 @@ validate_input() { if [[ -z ${SOURCE_DIR} ]]; then SOURCE_DIR="$(pwd)"; else SOURCE_DIR=$(cd "${SOURCE_DIR}"; pwd); fi # Transform any relative path to absolute if [[ -z ${OUTPUT_DIR} ]]; then OUTPUT_DIR="$(pwd)" - # Create the OUTPUT_DIR/work recursively if it doesn't already exist. - # -p flag is included will create missing parent dirs and skip if - # the dir already exists. - mkdir -p "${OUTPUT_DIR}/work" - else - # Create the OUTPUT_DIR/work recursively if it doesn't already exist. - # -p flag is included will create missing parent dirs and skip if - # the dir already exists. - mkdir -p "${OUTPUT_DIR}/work" - # The mkdir call needs to be done before absolute path resolution in case - # OUTPUT_DIR itself doesn't exist yet. - OUTPUT_DIR=$(cd "${OUTPUT_DIR}"; pwd) # Transform any relative path to absolute fi + if [ -d "${OUTPUT_DIR}/work" ]; then + rm -rf "${OUTPUT_DIR}/work" + fi + mkdir -p "${OUTPUT_DIR}/work" + OUTPUT_DIR=$(cd "${OUTPUT_DIR}"; pwd) # Transform any relative path to absolute CFNRULES_LOCATION=$(cd "${CFNRULES_LOCATION}"; pwd) # Transform any relative path to absolute UTILS_LOCATION=$(cd "${UTILS_LOCATION}"; pwd) # Transform any relative path to absolute } @@ -292,7 +283,7 @@ run_security_check() { set -e START_TIME=$(date +%s) -VERSION=("1.2.0-e-06Mar2024") +VERSION=("1.2.3-e-15Mar2024") OCI_RUNNER="docker" # Overrides default OCI Runner used by ASH @@ -303,8 +294,8 @@ GIT_EXTENSIONS=("git") PY_EXTENSIONS=("py" "pyc" "ipynb") INFRA_EXTENSIONS=("yaml" "yml" "tf" "json" "dockerfile") CFN_EXTENSIONS=("yaml" "yml" "json" "template") -JS_EXTENSIONS=("js") -GRYPE_EXTENSIONS=("js" "py" "java" "go" "cs" "sh") +JS_EXTENSIONS=("js" "jsx" "ts" "tsx") +GRYPE_EXTENSIONS=("js" "jsx" "ts" "tsx" "py" "java" "go" "cs" "sh") DOCKERFILE_LOCATION="$(dirname "${BASH_SOURCE[0]}")"/"helper_dockerfiles" UTILS_LOCATION="$(dirname "${BASH_SOURCE[0]}")"/"utils" @@ -446,12 +437,16 @@ echo -e "\n${LPURPLE}ASH version ${GREEN}$VERSION${NC}\n" # nosemgrep IFS=$'\n' # Support directories with spaces, make the loop iterate over newline instead of space -# Extract all zip files to temp dir *within $OUTPUT_DIR* before scanning -for zipfile in $(find "${SOURCE_DIR}" -iname "*.zip"); +pushd . >/dev/null 2>&1 +cd "${SOURCE_DIR}" +# for zipfile in $(find "${SOURCE_DIR}" -iname "*.zip"); +for zipfile in $(python "${_ASH_UTILS_LOCATION}/get-scan-set.py" . | grep '\.zip$'); do - unzip ${QUIET_OUTPUT} -d "${OUTPUT_DIR}"/work/$(basename "${zipfile%.*}") $zipfile + tgt_dir=$(dirname "${OUTPUT_DIR}"/work/"${zipfile:2}")/"$(basename "${zipfile%.*}")" + mkdir -p "${tgt_dir}" + unzip ${QUIET_OUTPUT} -d "${tgt_dir}" $zipfile done - +popd >/dev/null 2>&1 unset IFS declare -a all_files='' # Variable will be populated inside 'map_extensions_and_files' block @@ -585,7 +580,9 @@ for pid in "${JOBS[@]}"; do done # Cleanup any previous file -rm -f "${OUTPUT_DIR}"/"${AGGREGATED_RESULTS_REPORT_FILENAME}" +if [[ -n "${AGGREGATED_RESULTS_REPORT_FILENAME}" && -n "${OUTPUT_DIR}" && -f "${OUTPUT_DIR}"/"${AGGREGATED_RESULTS_REPORT_FILENAME}" ]]; then + rm -f "${OUTPUT_DIR}"/"${AGGREGATED_RESULTS_REPORT_FILENAME}" +fi # if an extension was not found, no report file will be in place, so skip the final report if [[ $(find "${OUTPUT_DIR}/work" -iname "*_report_result.txt" | wc -l | awk '{print $1}') -gt 0 ]]; @@ -603,7 +600,9 @@ then done # Cleanup work directory containing all temp files - rm -rf "${OUTPUT_DIR}"/work + if [[ -n "${OUTPUT_DIR}" && -d "${OUTPUT_DIR}"/work ]]; then + rm -rf "${OUTPUT_DIR}"/work + fi RESOLVED_OUTPUT_DIR=${ACTUAL_OUTPUT_DIR:-${OUTPUT_DIR}} echo -e "${GREEN}\nYour final report can be found here:${NC} ${RESOLVED_OUTPUT_DIR}/${AGGREGATED_RESULTS_REPORT_FILENAME}" diff --git a/utils/cdk-docker-execute.sh b/utils/cdk-docker-execute.sh index ecc268b..6101ec7 100644 --- a/utils/cdk-docker-execute.sh +++ b/utils/cdk-docker-execute.sh @@ -1,4 +1,5 @@ #!/bin/bash + abs() { # compute the absolute value of the input parameter input=$1 if [[ $input -lt 0 ]]; then @@ -30,6 +31,8 @@ _ASH_UTILS_LOCATION=${_ASH_UTILS_LOCATION:-/utils} _ASH_CFNRULES_LOCATION=${_ASH_CFNRULES_LOCATION:-/cfnrules} _ASH_RUN_DIR=${_ASH_RUN_DIR:-/run/scan/src} +source ${_ASH_UTILS_LOCATION}/common.sh + # # Allow the container to run Git commands against a repo in ${_ASH_SOURCE_DIR} # @@ -41,7 +44,8 @@ cd ${_ASH_SOURCE_DIR} # Check if the source directory is a git repository and clone it to the run directory if [[ "$(git rev-parse --is-inside-work-tree 2>/dev/null)" == "true" ]]; then if [[ "$_ASH_EXEC_MODE" != "local" ]]; then - git clone ${_ASH_SOURCE_DIR} ${_ASH_RUN_DIR} >/dev/null 2>&1 + debug_echo "Shallow cloning git repo to ${_ASH_RUN_DIR} to remove ignored files from being scanned" + git clone --depth=1 --single-branch ${_ASH_SOURCE_DIR} ${_ASH_RUN_DIR} >/dev/null 2>&1 fi _ASH_SOURCE_DIR=${_ASH_RUN_DIR} cd ${_ASH_RUN_DIR} @@ -79,8 +83,7 @@ cd ${_ASH_OUTPUT_DIR} # DIRECTORY="ash_cf2cdk_output" # Check if this directory already exist from previous ASH run -if [ -d "${_ASH_OUTPUT_DIR}/$DIRECTORY" ]; then - # Delete this directory and its files and recreate it. +if [[ -n "${_ASH_OUTPUT_DIR}" && -d "${_ASH_OUTPUT_DIR}/$DIRECTORY" ]]; then rm -rf "${_ASH_OUTPUT_DIR}/$DIRECTORY" fi mkdir -p "${_ASH_OUTPUT_DIR}/$DIRECTORY" 2> /dev/null @@ -103,6 +106,7 @@ RC=0 # cdk --version >> ${REPORT_PATH} # echo "----------------------" >> ${REPORT_PATH} +debug_echo "Starting all scanners within the CDK scanner tool set" echo -e "\nstarting to investigate ..." >> ${REPORT_PATH} cfn_files=($(readlink -f $(grep -lri 'AWSTemplateFormatVersion' ${_ASH_SOURCE_DIR} --exclude-dir={cdk.out,utils,.aws-sam,ash_cf2cdk_output} --exclude=ash) 2>/dev/null)) @@ -123,6 +127,7 @@ npm install --silent # Now, for each file, run a cdk synth to subject the file to CDK-NAG scanning # if [ "${#cfn_files[@]}" -gt 0 ]; then + debug_echo "Found CloudFormation files to scan, starting scan" echo "found ${#cfn_files[@]} files to scan. Starting scans ..." >> ${REPORT_PATH} for file in "${cfn_files[@]}"; do @@ -137,6 +142,7 @@ if [ "${#cfn_files[@]}" -gt 0 ]; then # Use CDK to synthesize the CDK application, # running CDK-NAG on the inserted CloudFormation template # + debug_echo "Importing CloudFormation template file ${file} to apply CDK Nag rules against it" npx cdk synth --context fileName="${file}" --quiet 2>> ${REPORT_PATH} CRC=$? echo "<<<<<< end cdk-nag result for ${cfn_filename} <<<<<<" >> ${REPORT_PATH} @@ -161,9 +167,12 @@ unset IFS # # Clean up the CDK application temporary working folder # -rm -rf ${CDK_WORK_DIR} +if [[ -n "${CDK_WORK_DIR}" && -d "${CDK_WORK_DIR}" ]]; then + rm -rf ${CDK_WORK_DIR} +fi # cd back to the original folder in case path changed during scan cd ${_CURRENT_DIR} +debug_echo "Finished all scanners within the CDK scanner tool set" exit $RC diff --git a/utils/common.sh b/utils/common.sh new file mode 100644 index 0000000..30a45eb --- /dev/null +++ b/utils/common.sh @@ -0,0 +1,14 @@ +export ASH_ROOT_DIR="$(cd $(dirname "$(dirname "$0")"); pwd)" +export ASH_UTILS_DIR="${ASH_ROOT_DIR}/utils" + +# LPURPLE='\033[1;35m' +# LGRAY='\033[0;37m' +# GREEN='\033[0;32m' +# RED='\033[0;31m' +# YELLOW='\033[0;33m' +# CYAN='\033[0;36m' +# NC='\033[0m' # No Color + +debug_echo() { + [[ "${ASH_DEBUG:-"NO"}" != "NO" ]] && echo -e "\033[0;33m[$(date '+%Y-%m-%d %H:%M:%S')] DEBUG:\033[0m ${1}" +} diff --git a/utils/get-scan-set.py b/utils/get-scan-set.py new file mode 100755 index 0000000..61d7a3d --- /dev/null +++ b/utils/get-scan-set.py @@ -0,0 +1,101 @@ +#!/usr/bin/env python +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +import sys +from typing import List +from pathspec import PathSpec +import argparse +import os +from glob import glob + +ASH_INCLUSIONS=[ + "**/cdk.out/asset.*", + "!**/*.template.json", # CDK output template default path pattern +] + +def get_ash_ignorespec_lines( + path, + ignorefiles: List[str] = [] +) -> List[str]: + ashignores = [ + f"{path}/.ashignore", + *[ + item + for item in glob(f"{path}/**/.ashignore") + ] + ] + semgrepignores = [ + f"{path}/.semgrepignore", + *[ + item + for item in glob(f"{path}/**/.semgrepignore") + ] + ] + gitignores = [ + f"{path}/.gitignore", + *[ + item + for item in glob(f"{path}/**/.gitignore") + ] + ] + all_ignores = list(set([ + *gitignores, + *semgrepignores, + *ashignores, + *[ + f"{path}/{file}" + for file in ignorefiles + ] + ])) + lines = ['.git'] + for ignorefile in all_ignores: + if os.path.isfile(ignorefile): + # print(f"Reading: {ignorefile}", file=sys.stderr) + with open(ignorefile) as f: + lines.extend(f.readlines()) + lines = [ line.strip() for line in lines ] + lines.extend(ASH_INCLUSIONS) + return lines + +def get_ash_ignorespec( + lines: List[str], +) -> PathSpec: + spec = PathSpec.from_lines('gitwildmatch', lines) + return spec + +def get_files_not_matching_spec( + path, + spec, +): + full = [] + included = [] + for item in os.walk(path): + for file in item[2]: + full.append(os.path.join(item[0], file)) + if not spec.match_file(os.path.join(item[0], file)): + inc_full = os.path.join(item[0], file) + # print(f"Including: {inc_full}", file=sys.stderr) + included.append(inc_full) + included = sorted(set(included)) + return included + +if __name__ == "__main__": + # set up argparse + parser = argparse.ArgumentParser(description="Get list of files not matching .gitignore underneath SourceDir arg path") + parser.add_argument("path", help="path to scan", default=os.getcwd(), type=str, nargs='?') + parser.add_argument("--ignorefile", help="ignore file to use in addition to the standard gitignore", default=[], type=str, nargs='*') + args = parser.parse_args() + + lines = get_ash_ignorespec_lines(args.path, args.ignorefile) + # semgrepignore_path = os.path.join(args.path, ".semgrepignore") + # if not os.path.exists(semgrepignore_path): + # with open(semgrepignore_path, "w") as f: + # f.write("\n".join(lines)) + + spec = get_ash_ignorespec(lines) + + files = get_files_not_matching_spec(args.path, spec) + for file in files: + # print(f"Returning: {file}", file=sys.stderr) + print(file, file=sys.stdout) diff --git a/utils/git-docker-execute.sh b/utils/git-docker-execute.sh index 057a27e..1f08121 100644 --- a/utils/git-docker-execute.sh +++ b/utils/git-docker-execute.sh @@ -31,6 +31,9 @@ _ASH_UTILS_LOCATION=${_ASH_UTILS_LOCATION:-/utils} _ASH_CFNRULES_LOCATION=${_ASH_CFNRULES_LOCATION:-/cfnrules} _ASH_RUN_DIR=${_ASH_RUN_DIR:-/run/scan/src} _ASH_IS_GIT_REPOSITORY=0 + +source ${_ASH_UTILS_LOCATION}/common.sh + # # Allow the container to run Git commands against a repo in ${_ASH_SOURCE_DIR} # @@ -44,6 +47,7 @@ cd ${_ASH_SOURCE_DIR} if [[ "$(git rev-parse --is-inside-work-tree 2>/dev/null)" == "true" ]]; then _ASH_IS_GIT_REPOSITORY=1 if [[ "$_ASH_EXEC_MODE" != "local" ]]; then + debug_echo "Normal cloning git repo to ${_ASH_RUN_DIR} to remove ignored files from being scanned but retain commit history for Git secrets inspection" git clone ${_ASH_SOURCE_DIR} ${_ASH_RUN_DIR} >/dev/null 2>&1 fi _ASH_SOURCE_DIR=${_ASH_RUN_DIR} diff --git a/utils/grype-docker-execute.sh b/utils/grype-docker-execute.sh index 62c9d22..2fb87b1 100644 --- a/utils/grype-docker-execute.sh +++ b/utils/grype-docker-execute.sh @@ -31,6 +31,8 @@ _ASH_UTILS_LOCATION=${_ASH_UTILS_LOCATION:-/utils} _ASH_CFNRULES_LOCATION=${_ASH_CFNRULES_LOCATION:-/cfnrules} _ASH_RUN_DIR=${_ASH_RUN_DIR:-/run/scan/src} +source ${_ASH_UTILS_LOCATION}/common.sh + # # Allow the container to run Git commands against a repo in ${_ASH_SOURCE_DIR} # @@ -42,7 +44,8 @@ cd ${_ASH_SOURCE_DIR} # Check if the source directory is a git repository and clone it to the run directory if [[ "$(git rev-parse --is-inside-work-tree 2>/dev/null)" == "true" ]]; then if [[ "$_ASH_EXEC_MODE" != "local" ]]; then - git clone ${_ASH_SOURCE_DIR} ${_ASH_RUN_DIR} >/dev/null 2>&1 + debug_echo "Shallow cloning git repo to ${_ASH_RUN_DIR} to remove ignored files from being scanned" + git clone --depth=1 --single-branch ${_ASH_SOURCE_DIR} ${_ASH_RUN_DIR} >/dev/null 2>&1 fi _ASH_SOURCE_DIR=${_ASH_RUN_DIR} cd ${_ASH_RUN_DIR} @@ -58,10 +61,12 @@ scan_paths=("${_ASH_SOURCE_DIR}" "${_ASH_OUTPUT_DIR}/work") # # Run Grype # +debug_echo "Starting all scanners within the Grype scanner tool set" for i in "${!scan_paths[@]}"; do scan_path=${scan_paths[$i]} cd ${scan_path} + debug_echo "Starting Grype scan of ${scan_path}" echo -e "\n>>>>>> Begin Grype output for ${scan_path} >>>>>>\n" >> ${REPORT_PATH} grype -f medium dir:${scan_path} --exclude="**/*-converted.py" --exclude="**/*_report_result.txt" >> ${REPORT_PATH} 2>&1 @@ -69,6 +74,7 @@ do RC=$(bumprc $RC $SRC) echo -e "\n<<<<<< End Grype output for ${scan_path} <<<<<<\n" >> ${REPORT_PATH} + debug_echo "Finished Grype scan of ${scan_path}" done # @@ -78,6 +84,7 @@ for i in "${!scan_paths[@]}"; do scan_path=${scan_paths[$i]} cd ${scan_path} + debug_echo "Starting Syft scan of ${scan_path}" echo -e "\n>>>>>> Begin Syft output for ${scan_path} >>>>>>\n" >> ${REPORT_PATH} syft ${scan_path} --exclude="**/*-converted.py" --exclude="**/*_report_result.txt" >> ${REPORT_PATH} 2>&1 @@ -85,6 +92,7 @@ do RC=$(bumprc $RC $SRC) echo -e "\n<<<<<< End Syft output for ${scan_path} <<<<<<\n" >> ${REPORT_PATH} + debug_echo "Finished Syft scan of ${scan_path}" done # @@ -94,6 +102,7 @@ for i in "${!scan_paths[@]}"; do scan_path=${scan_paths[$i]} cd ${scan_path} + debug_echo "Starting Semgrep scan of ${scan_path}" echo -e "\n>>>>>> Begin Semgrep output for ${scan_path} >>>>>>\n" >> ${REPORT_PATH} semgrep --legacy --error --config=auto $scan_path --exclude="*-converted.py,*_report_result.txt" >> ${REPORT_PATH} 2>&1 @@ -101,9 +110,11 @@ do RC=$(bumprc $RC $SRC) echo -e "\n<<<<<< End Semgrep output for ${scan_path} <<<<<<\n" >> ${REPORT_PATH} + debug_echo "Finished Semgrep scan of ${scan_path}" done # cd back to the original SOURCE_DIR in case path changed during scan cd ${_ASH_SOURCE_DIR} +debug_echo "Finished all scanners within the Grype scanner tool set" exit $RC diff --git a/utils/js-docker-execute.sh b/utils/js-docker-execute.sh index 5ba521e..5da5282 100755 --- a/utils/js-docker-execute.sh +++ b/utils/js-docker-execute.sh @@ -1,5 +1,9 @@ #!/bin/bash +debug_echo() { + [[ "${ASH_DEBUG:-"NO"}" != "NO" ]] && echo "DEBUG: ${1}" +} + abs() { # compute the absolute value of the input parameter input=$1 if [[ $input -lt 0 ]]; then @@ -42,7 +46,7 @@ cd ${_ASH_SOURCE_DIR} # Check if the source directory is a git repository and clone it to the run directory if [[ "$(git rev-parse --is-inside-work-tree 2>/dev/null)" == "true" ]]; then if [[ "$_ASH_EXEC_MODE" != "local" ]]; then - git clone ${_ASH_SOURCE_DIR} ${_ASH_RUN_DIR} >/dev/null 2>&1 + git clone --depth=1 --single-branch ${_ASH_SOURCE_DIR} ${_ASH_RUN_DIR} >/dev/null 2>&1 fi _ASH_SOURCE_DIR=${_ASH_RUN_DIR} cd ${_ASH_RUN_DIR} diff --git a/utils/py-docker-execute.sh b/utils/py-docker-execute.sh index 43d1dab..e69dbe9 100644 --- a/utils/py-docker-execute.sh +++ b/utils/py-docker-execute.sh @@ -42,7 +42,7 @@ cd ${_ASH_SOURCE_DIR} # Check if the source directory is a git repository and clone it to the run directory if [[ "$(git rev-parse --is-inside-work-tree 2>/dev/null)" == "true" ]]; then if [[ "$_ASH_EXEC_MODE" != "local" ]]; then - git clone ${_ASH_SOURCE_DIR} ${_ASH_RUN_DIR} >/dev/null 2>&1 + git clone --depth=1 --single-branch ${_ASH_SOURCE_DIR} ${_ASH_RUN_DIR} >/dev/null 2>&1 fi _ASH_SOURCE_DIR=${_ASH_RUN_DIR} cd ${_ASH_RUN_DIR} diff --git a/utils/yaml-docker-execute.sh b/utils/yaml-docker-execute.sh index 7cb722a..16e6ae0 100644 --- a/utils/yaml-docker-execute.sh +++ b/utils/yaml-docker-execute.sh @@ -42,7 +42,7 @@ cd ${_ASH_SOURCE_DIR} # Check if the source directory is a git repository and clone it to the run directory if [[ "$(git rev-parse --is-inside-work-tree 2>/dev/null)" == "true" ]]; then if [[ "$_ASH_EXEC_MODE" != "local" ]]; then - git clone ${_ASH_SOURCE_DIR} ${_ASH_RUN_DIR} >/dev/null 2>&1 + git clone --depth=1 --single-branch ${_ASH_SOURCE_DIR} ${_ASH_RUN_DIR} >/dev/null 2>&1 fi _ASH_SOURCE_DIR=${_ASH_RUN_DIR} cd ${_ASH_RUN_DIR}