Skip to content

Commit

Permalink
Merge branch 'awslabs:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
stemons authored May 4, 2023
2 parents 491d26e + 33ef31c commit e1c5cc4
Show file tree
Hide file tree
Showing 9 changed files with 118 additions and 73 deletions.
2 changes: 1 addition & 1 deletion docs/serverless-application-repo.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# AWS Deployment Framework

[![Build Status](https://travis-ci.org/awslabs/aws-deployment-framework.svg?branch=master)](https://travis-ci.org/awslabs/aws-deployment-framework)
[![Build Status](https://github.com/awslabs/aws-deployment-framework/workflows/ADF%20CI/badge.svg?branch=master)](https://github.com/awslabs/aws-deployment-framework/actions?query=workflow%3AADF%20CI+branch%3Amaster)

The [AWS Deployment Framework](https://github.com/awslabs/aws-deployment-framework)
*(ADF)* is an extensive and flexible framework to manage and deploy resources
Expand Down
68 changes: 35 additions & 33 deletions docs/user-guide.md
Original file line number Diff line number Diff line change
Expand Up @@ -543,9 +543,10 @@ pipelines:
# ...
build:
provider: codebuild
image:
repository_arn: arn:aws:ecr:region:111111111111:repository/test
tag: latest # optional (defaults to latest)
properties:
image:
repository_arn: arn:aws:ecr:region:111111111111:repository/test
tag: latest # optional (defaults to latest)
targets:
- # ...
```
Expand Down Expand Up @@ -1100,36 +1101,37 @@ pipeline for more details in the setup and integration.
Example Terraform deployment map:

```yaml
- name: sample-terraform
default_providers:
source:
provider: codecommit
properties:
account_id: 111111111111 # Source account id
build:
provider: codebuild
deploy:
provider: codebuild
properties:
image: "STANDARD_5_0"
environment_variables:
TARGET_ACCOUNTS: 111111111111,222222222222 # Target accounts
TARGET_OUS: /core/infrastructure,/sandbox # Target OUs
MANAGEMENT_ACCOUNT_ID: 333333333333 # Billing account
# Target regions, as a comma separated list is supported
# For example, "eu-west-1,us-east-1".
REGIONS: eu-west-1
targets:
- name: terraform-scan # optional
properties:
spec_filename: tf_scan.yml # Terraform scan
- name: terraform-plan
properties:
spec_filename: tf_plan.yml # Terraform plan
- approval # manual approval
- name: terraform-apply
properties:
spec_filename: tf_apply.yml # Terraform apply
pipelines:
- name: sample-terraform
default_providers:
source:
provider: codecommit
properties:
account_id: 111111111111 # Source account id
build:
provider: codebuild
deploy:
provider: codebuild
properties:
image: "STANDARD_5_0"
environment_variables:
TARGET_ACCOUNTS: 111111111111,222222222222 # Target accounts
TARGET_OUS: /core/infrastructure,/sandbox # Target OUs
MANAGEMENT_ACCOUNT_ID: 333333333333 # Billing account
# Target regions, as a comma separated list is supported
# For example, "eu-west-1,us-east-1".
REGIONS: eu-west-1
targets:
- name: terraform-scan # optional
properties:
spec_filename: tf_scan.yml # Terraform scan
- name: terraform-plan
properties:
spec_filename: tf_plan.yml # Terraform plan
- approval # manual approval
- name: terraform-apply
properties:
spec_filename: tf_apply.yml # Terraform apply
```

1. Add a sample-terraform pipeline in ADF `deployment-map.yml` as shown above.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ function platformize(){
function execute(){
if [ ${PLAT} = "ubuntu" ]; then
cd /tmp/
wget https://aws-codedeploy-${REGION}.s3.amazonaws.com/latest/install
wget https://aws-codedeploy-${REGION}.s3.${REGION}.amazonaws.com/latest/install
chmod +x ./install

if ./install auto; then
Expand All @@ -56,7 +56,7 @@ function execute(){

elif [ ${PLAT} = "amz" ]; then
cd /tmp/
wget https://aws-codedeploy-${REGION}.s3.amazonaws.com/latest/install
wget https://aws-codedeploy-${REGION}.s3.${REGION}.amazonaws.com/latest/install
chmod +x ./install

if ./install auto; then
Expand Down
2 changes: 1 addition & 1 deletion samples/sample-terraform/tf_scan.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ version: 0.2
phases:
install:
commands:
- curl -L "$(curl -s https://api.github.com/repos/accurics/terrascan/releases/latest | grep -o -E "https://.+?_Linux_x86_64.tar.gz")" > terrascan.tar.gz
- curl -L "$(curl -s https://api.github.com/repositories/103084166/releases/latest | grep -o -E "https:\/\/.+?_Linux_x86_64.tar.gz")" > terrascan.tar.gz
- tar -xf terrascan.tar.gz terrascan && rm terrascan.tar.gz
- install terrascan /usr/local/bin && rm terrascan
build:
Expand Down
58 changes: 31 additions & 27 deletions src/lambda_codebase/account_processing/process_account_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import tempfile
import logging
from typing import Any, TypedDict
import re
import yaml

from yaml.error import YAMLError
Expand Down Expand Up @@ -42,6 +43,7 @@ class AccountFileData(TypedDict):
Class used to return YAML account file data and its related
metadata like the execution_id of the CodePipeline that uploaded it.
"""

content: Any
execution_id: str

Expand All @@ -65,8 +67,8 @@ def get_file_from_s3(
try:
LOGGER.debug(
"Reading YAML from S3: %s from %s",
s3_object_location.get('object_key'),
s3_object_location.get('bucket_name'),
s3_object_location.get("object_key"),
s3_object_location.get("bucket_name"),
)
s3_object = s3_resource.Object(**s3_object_location)
object_adf_version = s3_object.metadata.get(
Expand All @@ -80,12 +82,9 @@ def get_file_from_s3(
s3_object_location,
object_adf_version,
)
return {
"content": {},
"execution_id": ""
}
return {"content": {}, "execution_id": ""}

with tempfile.TemporaryFile(mode='w+b') as file_pointer:
with tempfile.TemporaryFile(mode="w+b") as file_pointer:
s3_object.download_fileobj(file_pointer)

# Move pointer to the start of the file
Expand All @@ -98,16 +97,16 @@ def get_file_from_s3(
except ClientError as error:
LOGGER.error(
"Failed to download %s from %s, due to %s",
s3_object_location.get('object_key'),
s3_object_location.get('bucket_name'),
s3_object_location.get("object_key"),
s3_object_location.get("bucket_name"),
error,
)
raise
except YAMLError as yaml_error:
LOGGER.error(
"Failed to parse YAML file: %s from %s, due to %s",
s3_object_location.get('object_key'),
s3_object_location.get('bucket_name'),
s3_object_location.get("object_key"),
s3_object_location.get("bucket_name"),
yaml_error,
)
raise
Expand All @@ -129,19 +128,23 @@ def process_account(account_lookup, account):


def process_account_list(all_accounts, accounts_in_file):
account_lookup = {
account["Name"]: account["Id"] for account in all_accounts
}
processed_accounts = list(map(
lambda account: process_account(
account_lookup=account_lookup,
account=account,
),
accounts_in_file
))
account_lookup = {account["Name"]: account["Id"] for account in all_accounts}
processed_accounts = list(
map(
lambda account: process_account(
account_lookup=account_lookup,
account=account,
),
accounts_in_file,
)
)
return processed_accounts


def sanitize_account_name_for_snf(account_name):
return re.sub("[^a-zA-Z0-9_]", "_", account_name[:30])


def start_executions(
sfn_client,
processed_account_list,
Expand All @@ -158,14 +161,14 @@ def start_executions(
run_id,
)
for account in processed_account_list:
full_account_name = account.get('account_full_name', 'no-account-name')
full_account_name = account.get("account_full_name", "no-account-name")
# AWS Step Functions supports max 80 characters.
# Since the run_id equals 49 characters plus the dash, we have 30
# characters available. To ensure we don't run over, lets use a
# truncated version instead:
truncated_account_name = full_account_name[:30]
sfn_execution_name = f"{truncated_account_name}-{run_id}"

sfn_execution_name = (
f"{sanitize_account_name_for_snf(full_account_name)}-{run_id}"
)
LOGGER.debug(
"Payload for %s: %s",
sfn_execution_name,
Expand All @@ -182,8 +185,9 @@ def lambda_handler(event, context):
"""Main Lambda Entry point"""
LOGGER.debug(
"Processing event: %s",
json.dumps(event, indent=2) if LOGGER.isEnabledFor(logging.DEBUG)
else "--data-hidden--"
json.dumps(event, indent=2)
if LOGGER.isEnabledFor(logging.DEBUG)
else "--data-hidden--",
)
sfn_client = boto3.client("stepfunctions")
s3_resource = boto3.resource("s3")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,12 @@
Tests the account file processing lambda
"""
import unittest
from ..process_account_files import process_account, process_account_list, get_details_from_event
from ..process_account_files import (
process_account,
process_account_list,
get_details_from_event,
sanitize_account_name_for_snf,
)


class SuccessTestCase(unittest.TestCase):
Expand All @@ -20,7 +25,7 @@ def test_process_account_when_account_exists(self):
"account_full_name": "myTestAccountName",
"account_id": 123456789012,
"needs_created": False,
}
},
)

def test_process_account_when_account_does_not_exist(self):
Expand All @@ -35,7 +40,7 @@ def test_process_account_when_account_does_not_exist(self):
"alias": "MyCoolAlias",
"account_full_name": "myTestAccountName",
"needs_created": True,
}
},
)

def test_process_account_list(self):
Expand All @@ -59,6 +64,43 @@ def test_process_account_list(self):
],
)

def test_get_sanitize_account_name(self):
self.assertEqual(
sanitize_account_name_for_snf("myTestAccountName"), "myTestAccountName"
)
self.assertEqual(
sanitize_account_name_for_snf(
"thisIsALongerAccountNameForTestingTruncatedNames"
),
"thisIsALongerAccountNameForTes",
)
self.assertEqual(
sanitize_account_name_for_snf(
"thisIsALongerAccountName ForTestingTruncatedNames"
),
"thisIsALongerAccountName_ForTe",
)
self.assertEqual(
sanitize_account_name_for_snf("this accountname <has illegal> chars"),
"this_accountname__has_illegal_",
)
self.assertEqual(
sanitize_account_name_for_snf("this accountname \\has illegal"),
"this_accountname__has_illegal",
)
self.assertEqual(
sanitize_account_name_for_snf("^startswithanillegalchar"),
"_startswithanillegalchar",
)
self.assertEqual(
len(
sanitize_account_name_for_snf(
"ReallyLongAccountNameThatShouldBeTruncatedBecauseItsTooLong"
)
),
30,
)


class FailureTestCase(unittest.TestCase):
# pylint: disable=W0106
Expand All @@ -67,6 +109,5 @@ def test_event_parsing(self):
with self.assertRaises(ValueError) as _error:
get_details_from_event(sample_event)
self.assertEqual(
str(_error.exception),
"No S3 Event details present in event trigger"
str(_error.exception), "No S3 Event details present in event trigger"
)
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def __init__(
pipeline_type = (
stack_input['pipeline_input']
.get('params', {})
.get('type', DEFAULT_PIPELINE)
.get('pipeline_type', DEFAULT_PIPELINE)
.lower()
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
def test_pipeline_generation_fails_if_pipeline_type_is_not_specified(mock):
stack_input = {"pipeline_input": {"params": {}}}
stack_input["pipeline_input"]["name"] = "test-stack"
stack_input["pipeline_input"]["params"]["type"] = "fail"
stack_input["pipeline_input"]["params"]["pipeline_type"] = "fail"
app = core.App()
with pytest.raises(ValueError):
pipeline_stack = PipelineStack(app, stack_input)
Expand All @@ -35,7 +35,6 @@ def test_pipeline_generation_works_when_no_type_specified(mock):
def test_pipeline_generation_works_when_no_type_specified(mock):
stack_input = {"pipeline_input": {"params": {}}}
stack_input["pipeline_input"]["name"] = "test-stack"
stack_input["pipeline_input"]["params"]["type"] = "Default"

app = core.App()
PipelineStack(app, stack_input)
Expand Down
1 change: 0 additions & 1 deletion src/template.yml
Original file line number Diff line number Diff line change
Expand Up @@ -1304,7 +1304,6 @@ Resources:
- pytest -vvv
build:
commands:
- sam build -t adf-bootstrap/deployment/pipeline_management.yml
- sam build -t adf-bootstrap/deployment/global.yml
- >-
sam package --output-template-file adf-bootstrap/deployment/global.yml
Expand Down

0 comments on commit e1c5cc4

Please sign in to comment.