From 66aec8e11161a4a44f154d10d6136ded8098405e Mon Sep 17 00:00:00 2001 From: Stewart Wallace Date: Thu, 2 Feb 2023 13:43:53 +0000 Subject: [PATCH 1/8] Sanitizing account names before using them in SFn Invocation (#598) * Sanatizing account names before using them in SFn Invocation * Addressing code review comments --- .../process_account_files.py | 58 ++++++++++--------- .../tests/test_account_file_processing.py | 51 ++++++++++++++-- 2 files changed, 77 insertions(+), 32 deletions(-) diff --git a/src/lambda_codebase/account_processing/process_account_files.py b/src/lambda_codebase/account_processing/process_account_files.py index 28b409845..8336ca969 100644 --- a/src/lambda_codebase/account_processing/process_account_files.py +++ b/src/lambda_codebase/account_processing/process_account_files.py @@ -12,6 +12,7 @@ import tempfile import logging from typing import Any, TypedDict +import re import yaml from yaml.error import YAMLError @@ -42,6 +43,7 @@ class AccountFileData(TypedDict): Class used to return YAML account file data and its related metadata like the execution_id of the CodePipeline that uploaded it. """ + content: Any execution_id: str @@ -65,8 +67,8 @@ def get_file_from_s3( try: LOGGER.debug( "Reading YAML from S3: %s from %s", - s3_object_location.get('object_key'), - s3_object_location.get('bucket_name'), + s3_object_location.get("object_key"), + s3_object_location.get("bucket_name"), ) s3_object = s3_resource.Object(**s3_object_location) object_adf_version = s3_object.metadata.get( @@ -80,12 +82,9 @@ def get_file_from_s3( s3_object_location, object_adf_version, ) - return { - "content": {}, - "execution_id": "" - } + return {"content": {}, "execution_id": ""} - with tempfile.TemporaryFile(mode='w+b') as file_pointer: + with tempfile.TemporaryFile(mode="w+b") as file_pointer: s3_object.download_fileobj(file_pointer) # Move pointer to the start of the file @@ -98,16 +97,16 @@ def get_file_from_s3( except ClientError as error: LOGGER.error( "Failed to download %s from %s, due to %s", - s3_object_location.get('object_key'), - s3_object_location.get('bucket_name'), + s3_object_location.get("object_key"), + s3_object_location.get("bucket_name"), error, ) raise except YAMLError as yaml_error: LOGGER.error( "Failed to parse YAML file: %s from %s, due to %s", - s3_object_location.get('object_key'), - s3_object_location.get('bucket_name'), + s3_object_location.get("object_key"), + s3_object_location.get("bucket_name"), yaml_error, ) raise @@ -129,19 +128,23 @@ def process_account(account_lookup, account): def process_account_list(all_accounts, accounts_in_file): - account_lookup = { - account["Name"]: account["Id"] for account in all_accounts - } - processed_accounts = list(map( - lambda account: process_account( - account_lookup=account_lookup, - account=account, - ), - accounts_in_file - )) + account_lookup = {account["Name"]: account["Id"] for account in all_accounts} + processed_accounts = list( + map( + lambda account: process_account( + account_lookup=account_lookup, + account=account, + ), + accounts_in_file, + ) + ) return processed_accounts +def sanitize_account_name_for_snf(account_name): + return re.sub("[^a-zA-Z0-9_]", "_", account_name[:30]) + + def start_executions( sfn_client, processed_account_list, @@ -158,14 +161,14 @@ def start_executions( run_id, ) for account in processed_account_list: - full_account_name = account.get('account_full_name', 'no-account-name') + full_account_name = account.get("account_full_name", "no-account-name") # AWS Step Functions supports max 80 characters. # Since the run_id equals 49 characters plus the dash, we have 30 # characters available. To ensure we don't run over, lets use a # truncated version instead: - truncated_account_name = full_account_name[:30] - sfn_execution_name = f"{truncated_account_name}-{run_id}" - + sfn_execution_name = ( + f"{sanitize_account_name_for_snf(full_account_name)}-{run_id}" + ) LOGGER.debug( "Payload for %s: %s", sfn_execution_name, @@ -182,8 +185,9 @@ def lambda_handler(event, context): """Main Lambda Entry point""" LOGGER.debug( "Processing event: %s", - json.dumps(event, indent=2) if LOGGER.isEnabledFor(logging.DEBUG) - else "--data-hidden--" + json.dumps(event, indent=2) + if LOGGER.isEnabledFor(logging.DEBUG) + else "--data-hidden--", ) sfn_client = boto3.client("stepfunctions") s3_resource = boto3.resource("s3") diff --git a/src/lambda_codebase/account_processing/tests/test_account_file_processing.py b/src/lambda_codebase/account_processing/tests/test_account_file_processing.py index fadaa52a7..7e7e9ced8 100644 --- a/src/lambda_codebase/account_processing/tests/test_account_file_processing.py +++ b/src/lambda_codebase/account_processing/tests/test_account_file_processing.py @@ -2,7 +2,12 @@ Tests the account file processing lambda """ import unittest -from ..process_account_files import process_account, process_account_list, get_details_from_event +from ..process_account_files import ( + process_account, + process_account_list, + get_details_from_event, + sanitize_account_name_for_snf, +) class SuccessTestCase(unittest.TestCase): @@ -20,7 +25,7 @@ def test_process_account_when_account_exists(self): "account_full_name": "myTestAccountName", "account_id": 123456789012, "needs_created": False, - } + }, ) def test_process_account_when_account_does_not_exist(self): @@ -35,7 +40,7 @@ def test_process_account_when_account_does_not_exist(self): "alias": "MyCoolAlias", "account_full_name": "myTestAccountName", "needs_created": True, - } + }, ) def test_process_account_list(self): @@ -59,6 +64,43 @@ def test_process_account_list(self): ], ) + def test_get_sanitize_account_name(self): + self.assertEqual( + sanitize_account_name_for_snf("myTestAccountName"), "myTestAccountName" + ) + self.assertEqual( + sanitize_account_name_for_snf( + "thisIsALongerAccountNameForTestingTruncatedNames" + ), + "thisIsALongerAccountNameForTes", + ) + self.assertEqual( + sanitize_account_name_for_snf( + "thisIsALongerAccountName ForTestingTruncatedNames" + ), + "thisIsALongerAccountName_ForTe", + ) + self.assertEqual( + sanitize_account_name_for_snf("this accountname chars"), + "this_accountname__has_illegal_", + ) + self.assertEqual( + sanitize_account_name_for_snf("this accountname \\has illegal"), + "this_accountname__has_illegal", + ) + self.assertEqual( + sanitize_account_name_for_snf("^startswithanillegalchar"), + "_startswithanillegalchar", + ) + self.assertEqual( + len( + sanitize_account_name_for_snf( + "ReallyLongAccountNameThatShouldBeTruncatedBecauseItsTooLong" + ) + ), + 30, + ) + class FailureTestCase(unittest.TestCase): # pylint: disable=W0106 @@ -67,6 +109,5 @@ def test_event_parsing(self): with self.assertRaises(ValueError) as _error: get_details_from_event(sample_event) self.assertEqual( - str(_error.exception), - "No S3 Event details present in event trigger" + str(_error.exception), "No S3 Event details present in event trigger" ) From cb029ef5b8d00794f756b62086a5394f4e7f7879 Mon Sep 17 00:00:00 2001 From: lasv-az <122614993+lasv-az@users.noreply.github.com> Date: Mon, 13 Mar 2023 14:13:24 +0100 Subject: [PATCH 2/8] Update tf_scan.yml (#607) * Update tf_scan.yml Fix terrascan curl call. * Update tf_scan.yml --- samples/sample-terraform/tf_scan.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/sample-terraform/tf_scan.yml b/samples/sample-terraform/tf_scan.yml index 02f75f9a9..94b69c36a 100644 --- a/samples/sample-terraform/tf_scan.yml +++ b/samples/sample-terraform/tf_scan.yml @@ -3,7 +3,7 @@ version: 0.2 phases: install: commands: - - curl -L "$(curl -s https://api.github.com/repos/accurics/terrascan/releases/latest | grep -o -E "https://.+?_Linux_x86_64.tar.gz")" > terrascan.tar.gz + - curl -L "$(curl -s https://api.github.com/repositories/103084166/releases/latest | grep -o -E "https:\/\/.+?_Linux_x86_64.tar.gz")" > terrascan.tar.gz - tar -xf terrascan.tar.gz terrascan && rm terrascan.tar.gz - install terrascan /usr/local/bin && rm terrascan build: From bc5e5d4cfd5a2cd151ad81e06b3d6410b1647a2c Mon Sep 17 00:00:00 2001 From: lasv-az <122614993+lasv-az@users.noreply.github.com> Date: Fri, 17 Mar 2023 12:10:34 +0100 Subject: [PATCH 3/8] Update user-guide.md (#605) Terraform example deployment map fixed. --- docs/user-guide.md | 61 +++++++++++++++++++++++----------------------- 1 file changed, 31 insertions(+), 30 deletions(-) diff --git a/docs/user-guide.md b/docs/user-guide.md index 6b66c1cf2..22c8989eb 100644 --- a/docs/user-guide.md +++ b/docs/user-guide.md @@ -1100,36 +1100,37 @@ pipeline for more details in the setup and integration. Example Terraform deployment map: ```yaml -- name: sample-terraform - default_providers: - source: - provider: codecommit - properties: - account_id: 111111111111 # Source account id - build: - provider: codebuild - deploy: - provider: codebuild - properties: - image: "STANDARD_5_0" - environment_variables: - TARGET_ACCOUNTS: 111111111111,222222222222 # Target accounts - TARGET_OUS: /core/infrastructure,/sandbox # Target OUs - MANAGEMENT_ACCOUNT_ID: 333333333333 # Billing account - # Target regions, as a comma separated list is supported - # For example, "eu-west-1,us-east-1". - REGIONS: eu-west-1 - targets: - - name: terraform-scan # optional - properties: - spec_filename: tf_scan.yml # Terraform scan - - name: terraform-plan - properties: - spec_filename: tf_plan.yml # Terraform plan - - approval # manual approval - - name: terraform-apply - properties: - spec_filename: tf_apply.yml # Terraform apply +pipelines: + - name: sample-terraform + default_providers: + source: + provider: codecommit + properties: + account_id: 111111111111 # Source account id + build: + provider: codebuild + deploy: + provider: codebuild + properties: + image: "STANDARD_5_0" + environment_variables: + TARGET_ACCOUNTS: 111111111111,222222222222 # Target accounts + TARGET_OUS: /core/infrastructure,/sandbox # Target OUs + MANAGEMENT_ACCOUNT_ID: 333333333333 # Billing account + # Target regions, as a comma separated list is supported + # For example, "eu-west-1,us-east-1". + REGIONS: eu-west-1 + targets: + - name: terraform-scan # optional + properties: + spec_filename: tf_scan.yml # Terraform scan + - name: terraform-plan + properties: + spec_filename: tf_plan.yml # Terraform plan + - approval # manual approval + - name: terraform-apply + properties: + spec_filename: tf_apply.yml # Terraform apply ``` 1. Add a sample-terraform pipeline in ADF `deployment-map.yml` as shown above. From 419ef5831234c72f49611c6bf8e4f316e82d2d4f Mon Sep 17 00:00:00 2001 From: Simon Kok Date: Fri, 17 Mar 2023 12:14:23 +0100 Subject: [PATCH 4/8] Fix CodeDeploy sample to work in gov-cloud (#609) **Why?** At the moment, the install script for CodeDeploy would install CodeDeploy using the global S3 URL to fetch the file from the S3 bucket. As such: ``` https://aws-codedeploy-${REGION}.s3.amazonaws.com/latest/install ``` However, the CodeDeploy Gov cloud buckets are not available through this global URL. So when the REGION=`us-gov-east-1` or `us-gov-west-1` it would fail. **What?** To fix this, we should use the regional URL instead: ``` https://aws-codedeploy-${REGION}.s3.${REGION}.amazonaws.com/latest/install ``` --- .../sample-ec2-with-codedeploy/scripts/install-codedeploy.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/samples/sample-ec2-with-codedeploy/scripts/install-codedeploy.sh b/samples/sample-ec2-with-codedeploy/scripts/install-codedeploy.sh index 4d89627cf..96a9d1af5 100755 --- a/samples/sample-ec2-with-codedeploy/scripts/install-codedeploy.sh +++ b/samples/sample-ec2-with-codedeploy/scripts/install-codedeploy.sh @@ -36,7 +36,7 @@ function platformize(){ function execute(){ if [ ${PLAT} = "ubuntu" ]; then cd /tmp/ - wget https://aws-codedeploy-${REGION}.s3.amazonaws.com/latest/install + wget https://aws-codedeploy-${REGION}.s3.${REGION}.amazonaws.com/latest/install chmod +x ./install if ./install auto; then @@ -56,7 +56,7 @@ function execute(){ elif [ ${PLAT} = "amz" ]; then cd /tmp/ - wget https://aws-codedeploy-${REGION}.s3.amazonaws.com/latest/install + wget https://aws-codedeploy-${REGION}.s3.${REGION}.amazonaws.com/latest/install chmod +x ./install if ./install auto; then From c26724cc9546302bac6f7d5b5f7c3642946eea05 Mon Sep 17 00:00:00 2001 From: abhi1094 <44882655+abhi1094@users.noreply.github.com> Date: Wed, 19 Apr 2023 13:27:36 +0100 Subject: [PATCH 5/8] fix documentation error for codebuild custom inage (#622) --- docs/user-guide.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/docs/user-guide.md b/docs/user-guide.md index 22c8989eb..822291596 100644 --- a/docs/user-guide.md +++ b/docs/user-guide.md @@ -543,9 +543,10 @@ pipelines: # ... build: provider: codebuild - image: - repository_arn: arn:aws:ecr:region:111111111111:repository/test - tag: latest # optional (defaults to latest) + properties: + image: + repository_arn: arn:aws:ecr:region:111111111111:repository/test + tag: latest # optional (defaults to latest) targets: - # ... ``` From ed6550e62a09a3c766cf211332a6a2356899007a Mon Sep 17 00:00:00 2001 From: Alex Mackechnie <47723399+AlexMackechnie@users.noreply.github.com> Date: Wed, 19 Apr 2023 13:27:56 +0100 Subject: [PATCH 6/8] Remove unused sam build - the output of this command is overridden by the sam build for global.yml; (#613) --- src/template.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/src/template.yml b/src/template.yml index 73f8c49a7..090cc0f5b 100644 --- a/src/template.yml +++ b/src/template.yml @@ -1304,7 +1304,6 @@ Resources: - pytest -vvv build: commands: - - sam build -t adf-bootstrap/deployment/pipeline_management.yml - sam build -t adf-bootstrap/deployment/global.yml - >- sam package --output-template-file adf-bootstrap/deployment/global.yml From d04c168f108bf53c5d73d65a0ec46a1e02600b4a Mon Sep 17 00:00:00 2001 From: lydialim Date: Wed, 19 Apr 2023 14:28:23 +0200 Subject: [PATCH 7/8] [Bug] Fix custom pipeline type configuration not loaded (#612) * [Bug] Fix custom pipeline type not loaded It was getting the wrong property name so it always sets the default pipeline * Fix pipeline_type test cases --------- Co-authored-by: Lydia --- .../adf-build/shared/cdk/cdk_stacks/main.py | 2 +- .../shared/cdk/cdk_stacks/tests/test_pipeline_creation.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/main.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/main.py index be2867054..1fe8135b3 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/main.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/main.py @@ -40,7 +40,7 @@ def __init__( pipeline_type = ( stack_input['pipeline_input'] .get('params', {}) - .get('type', DEFAULT_PIPELINE) + .get('pipeline_type', DEFAULT_PIPELINE) .lower() ) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py index bd99b3af4..04a6a295e 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py @@ -15,7 +15,7 @@ def test_pipeline_generation_fails_if_pipeline_type_is_not_specified(mock): stack_input = {"pipeline_input": {"params": {}}} stack_input["pipeline_input"]["name"] = "test-stack" - stack_input["pipeline_input"]["params"]["type"] = "fail" + stack_input["pipeline_input"]["params"]["pipeline_type"] = "fail" app = core.App() with pytest.raises(ValueError): pipeline_stack = PipelineStack(app, stack_input) @@ -35,7 +35,6 @@ def test_pipeline_generation_works_when_no_type_specified(mock): def test_pipeline_generation_works_when_no_type_specified(mock): stack_input = {"pipeline_input": {"params": {}}} stack_input["pipeline_input"]["name"] = "test-stack" - stack_input["pipeline_input"]["params"]["type"] = "Default" app = core.App() PipelineStack(app, stack_input) From 33ef31c5619157b0ad1c5f21c5494bb7271e3049 Mon Sep 17 00:00:00 2001 From: Javy de Koning Date: Fri, 21 Apr 2023 14:06:17 +0200 Subject: [PATCH 8/8] Fixing broken link and build badge (#625) --- docs/serverless-application-repo.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/serverless-application-repo.md b/docs/serverless-application-repo.md index f8263d4a8..c3a784657 100644 --- a/docs/serverless-application-repo.md +++ b/docs/serverless-application-repo.md @@ -1,6 +1,6 @@ # AWS Deployment Framework -[![Build Status](https://travis-ci.org/awslabs/aws-deployment-framework.svg?branch=master)](https://travis-ci.org/awslabs/aws-deployment-framework) +[![Build Status](https://github.com/awslabs/aws-deployment-framework/workflows/ADF%20CI/badge.svg?branch=master)](https://github.com/awslabs/aws-deployment-framework/actions?query=workflow%3AADF%20CI+branch%3Amaster) The [AWS Deployment Framework](https://github.com/awslabs/aws-deployment-framework) *(ADF)* is an extensive and flexible framework to manage and deploy resources