From 70c3b1b9858465e35a8205d2b789d263f492545c Mon Sep 17 00:00:00 2001 From: Nikita Pivkin Date: Thu, 13 Jun 2024 19:38:19 +0600 Subject: [PATCH] refactor(checks): migrate AWS accessanalyzer, athena, cloudfront to Rego Signed-off-by: Nikita Pivkin --- .../accessanalyzer/enable_access_analyzer.go | 3 +- .../enable_access_analyzer.rego | 45 +++++++++ .../enable_access_analyzer_test.go | 75 --------------- .../enable_access_analyzer_test.rego | 26 +++++ .../aws/athena/enable_at_rest_encryption.go | 3 +- .../aws/athena/enable_at_rest_encryption.rego | 53 +++++++++++ .../athena/enable_at_rest_encryption_test.go | 95 ------------------- .../enable_at_rest_encryption_test.rego | 26 +++++ .../aws/athena/no_encryption_override.go | 3 +- .../aws/athena/no_encryption_override.rego | 40 ++++++++ .../aws/athena/no_encryption_override_test.go | 65 ------------- .../athena/no_encryption_override_test.rego | 16 ++++ checks/cloud/aws/cloudfront/enable_waf.go | 3 +- .../aws/cloudtrail/enable_all_regions.go | 3 +- .../aws/cloudtrail/enable_all_regions.rego | 43 +++++++++ .../aws/cloudtrail/enable_all_regions_test.go | 65 ------------- .../cloudtrail/enable_all_regions_test.rego | 16 ++++ .../aws/cloudtrail/enable_log_validation.go | 3 +- .../aws/cloudtrail/enable_log_validation.rego | 40 ++++++++ .../cloudtrail/enable_log_validation_test.go | 65 ------------- .../enable_log_validation_test.rego | 16 ++++ .../cloudtrail/encryption_customer_key.rego | 43 +++++++++ .../encryption_customer_key_test.go | 63 ------------ .../encryption_customer_key_test.rego | 16 ++++ .../ensure_cloudwatch_integration.go | 5 +- .../ensure_cloudwatch_integration.rego | 47 +++++++++ .../ensure_cloudwatch_integration_test.go | 64 ------------- .../ensure_cloudwatch_integration_test.rego | 16 ++++ .../aws/cloudtrail/no_public_log_access.go | 5 +- .../aws/cloudtrail/no_public_log_access.rego | 52 ++++++++++ .../cloudtrail/no_public_log_access_test.go | 86 ----------------- .../cloudtrail/no_public_log_access_test.rego | 24 +++++ .../require_bucket_access_logging.go | 5 +- .../require_bucket_access_logging.rego | 52 ++++++++++ .../require_bucket_access_logging_test.go | 92 ------------------ .../require_bucket_access_logging_test.rego | 33 +++++++ lib/s3.rego | 11 +++ lib/test.rego | 37 ++++++++ 38 files changed, 673 insertions(+), 682 deletions(-) create mode 100644 checks/cloud/aws/accessanalyzer/enable_access_analyzer.rego delete mode 100644 checks/cloud/aws/accessanalyzer/enable_access_analyzer_test.go create mode 100644 checks/cloud/aws/accessanalyzer/enable_access_analyzer_test.rego create mode 100644 checks/cloud/aws/athena/enable_at_rest_encryption.rego delete mode 100644 checks/cloud/aws/athena/enable_at_rest_encryption_test.go create mode 100644 checks/cloud/aws/athena/enable_at_rest_encryption_test.rego create mode 100644 checks/cloud/aws/athena/no_encryption_override.rego delete mode 100644 checks/cloud/aws/athena/no_encryption_override_test.go create mode 100644 checks/cloud/aws/athena/no_encryption_override_test.rego create mode 100644 checks/cloud/aws/cloudtrail/enable_all_regions.rego delete mode 100644 checks/cloud/aws/cloudtrail/enable_all_regions_test.go create mode 100644 checks/cloud/aws/cloudtrail/enable_all_regions_test.rego create mode 100644 checks/cloud/aws/cloudtrail/enable_log_validation.rego delete mode 100644 checks/cloud/aws/cloudtrail/enable_log_validation_test.go create mode 100644 checks/cloud/aws/cloudtrail/enable_log_validation_test.rego create mode 100644 checks/cloud/aws/cloudtrail/encryption_customer_key.rego delete mode 100644 checks/cloud/aws/cloudtrail/encryption_customer_key_test.go create mode 100644 checks/cloud/aws/cloudtrail/encryption_customer_key_test.rego create mode 100644 checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration.rego delete mode 100644 checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration_test.go create mode 100644 checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration_test.rego create mode 100644 checks/cloud/aws/cloudtrail/no_public_log_access.rego delete mode 100644 checks/cloud/aws/cloudtrail/no_public_log_access_test.go create mode 100644 checks/cloud/aws/cloudtrail/no_public_log_access_test.rego create mode 100644 checks/cloud/aws/cloudtrail/require_bucket_access_logging.rego delete mode 100644 checks/cloud/aws/cloudtrail/require_bucket_access_logging_test.go create mode 100644 checks/cloud/aws/cloudtrail/require_bucket_access_logging_test.rego create mode 100644 lib/s3.rego create mode 100644 lib/test.rego diff --git a/checks/cloud/aws/accessanalyzer/enable_access_analyzer.go b/checks/cloud/aws/accessanalyzer/enable_access_analyzer.go index 77f5afdf..4453db45 100755 --- a/checks/cloud/aws/accessanalyzer/enable_access_analyzer.go +++ b/checks/cloud/aws/accessanalyzer/enable_access_analyzer.go @@ -34,7 +34,8 @@ keys, AWS Lambda functions, and Amazon SQS(Simple Queue Service) queues. Links: []string{ "https://docs.aws.amazon.com/IAM/latest/UserGuide/what-is-access-analyzer.html", }, - Severity: severity.Low, + Severity: severity.Low, + Deprecated: true, }, func(s *state.State) (results scan.Results) { var enabled bool diff --git a/checks/cloud/aws/accessanalyzer/enable_access_analyzer.rego b/checks/cloud/aws/accessanalyzer/enable_access_analyzer.rego new file mode 100644 index 00000000..eb467998 --- /dev/null +++ b/checks/cloud/aws/accessanalyzer/enable_access_analyzer.rego @@ -0,0 +1,45 @@ +# METADATA +# title: Enable IAM Access analyzer for IAM policies about all resources in each region. +# description: | +# AWS IAM Access Analyzer helps you identify the resources in your organization and +# accounts, such as Amazon S3 buckets or IAM roles, that are shared with an external entity. +# This lets you identify unintended access to your resources and data. Access Analyzer +# identifies resources that are shared with external principals by using logic-based reasoning +# to analyze the resource-based policies in your AWS environment. IAM Access Analyzer +# continuously monitors all policies for S3 bucket, IAM roles, KMS(Key Management Service) +# keys, AWS Lambda functions, and Amazon SQS(Simple Queue Service) queues. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/IAM/latest/UserGuide/what-is-access-analyzer.html +# custom: +# id: AVD-AWS-0175 +# avd_id: AVD-AWS-0175 +# provider: aws +# service: accessanalyzer +# severity: LOW +# short_code: enable-access-analyzer +# recommended_action: Enable IAM Access analyzer across all regions. +# frameworks: +# cis-aws-1.4: +# - "1.20" +# input: +# selector: +# - type: cloud +# subtypes: +# - service: accessanalyzer +# provider: aws +package builtin.aws.accessanalyzer.aws0175 + +import rego.v1 + +deny contains res if { + not has_active_analyzer + res := result.new("Access Analyzer is not enabled.", {}) +} + +has_active_analyzer if { + some analyzer in input.aws.accessanalyzer.analyzers + analyzer.active.value +} diff --git a/checks/cloud/aws/accessanalyzer/enable_access_analyzer_test.go b/checks/cloud/aws/accessanalyzer/enable_access_analyzer_test.go deleted file mode 100644 index ecfedd49..00000000 --- a/checks/cloud/aws/accessanalyzer/enable_access_analyzer_test.go +++ /dev/null @@ -1,75 +0,0 @@ -package accessanalyzer - -import ( - "testing" - - trivyTypes "github.com/aquasecurity/trivy/pkg/iac/types" - - "github.com/aquasecurity/trivy/pkg/iac/providers/aws/accessanalyzer" - - "github.com/aquasecurity/trivy/pkg/iac/state" - - "github.com/aquasecurity/trivy/pkg/iac/scan" - - "github.com/stretchr/testify/assert" -) - -func TestASCheckNoSecretsInUserData(t *testing.T) { - tests := []struct { - name string - input accessanalyzer.AccessAnalyzer - expected bool - }{ - { - name: "No analyzers enabled", - input: accessanalyzer.AccessAnalyzer{}, - expected: true, - }, - { - name: "Analyzer disabled", - input: accessanalyzer.AccessAnalyzer{ - Analyzers: []accessanalyzer.Analyzer{ - { - Metadata: trivyTypes.NewTestMetadata(), - ARN: trivyTypes.String("arn:aws:accessanalyzer:us-east-1:123456789012:analyzer/test", trivyTypes.NewTestMetadata()), - Name: trivyTypes.String("test", trivyTypes.NewTestMetadata()), - Active: trivyTypes.Bool(false, trivyTypes.NewTestMetadata()), - }, - }, - }, - expected: true, - }, - { - name: "Analyzer enabled", - input: accessanalyzer.AccessAnalyzer{ - Analyzers: []accessanalyzer.Analyzer{ - { - Metadata: trivyTypes.NewTestMetadata(), - ARN: trivyTypes.String("arn:aws:accessanalyzer:us-east-1:123456789012:analyzer/test", trivyTypes.NewTestMetadata()), - Name: trivyTypes.String("test", trivyTypes.NewTestMetadata()), - Active: trivyTypes.Bool(true, trivyTypes.NewTestMetadata()), - }, - }, - }, - expected: false, - }, - } - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - var testState state.State - testState.AWS.AccessAnalyzer = test.input - results := CheckEnableAccessAnalyzer.Evaluate(&testState) - var found bool - for _, result := range results { - if result.Status() == scan.StatusFailed && result.Rule().LongID() == CheckEnableAccessAnalyzer.LongID() { - found = true - } - } - if test.expected { - assert.True(t, found, "Rule should have been found") - } else { - assert.False(t, found, "Rule should not have been found") - } - }) - } -} diff --git a/checks/cloud/aws/accessanalyzer/enable_access_analyzer_test.rego b/checks/cloud/aws/accessanalyzer/enable_access_analyzer_test.rego new file mode 100644 index 00000000..1e37b2b2 --- /dev/null +++ b/checks/cloud/aws/accessanalyzer/enable_access_analyzer_test.rego @@ -0,0 +1,26 @@ +package builtin.aws.accessanalyzer.aws0175_test + +import rego.v1 + +import data.builtin.aws.accessanalyzer.aws0175 as check +import data.lib.test + +test_disallow_no_analyzers if { + r := check.deny with input as {"aws": {"accessanalyzer": {"analyzers": []}}} + test.assert_equal_message("Access Analyzer is not enabled.", r) +} + +test_disallow_analyzer_disabled if { + r := check.deny with input as {"aws": {"accessanalyzer": {"analyzers": [{"active": {"value": false}}]}}} + test.assert_equal_message("Access Analyzer is not enabled.", r) +} + +test_allow_one_of_analyzer_disabled if { + r := check.deny with input as {"aws": {"accessanalyzer": {"analyzers": [{"active": {"value": false}}, {"active": {"value": true}}]}}} + test.assert_empty(r) +} + +test_allow_analyzer_enabled if { + r := check.deny with input as {"aws": {"accessanalyzer": {"analyzers": [{"active": {"value": true}}]}}} + test.assert_empty(r) +} diff --git a/checks/cloud/aws/athena/enable_at_rest_encryption.go b/checks/cloud/aws/athena/enable_at_rest_encryption.go index 940db308..32d5d367 100755 --- a/checks/cloud/aws/athena/enable_at_rest_encryption.go +++ b/checks/cloud/aws/athena/enable_at_rest_encryption.go @@ -34,7 +34,8 @@ var CheckEnableAtRestEncryption = rules.Register( Links: cloudFormationEnableAtRestEncryptionLinks, RemediationMarkdown: cloudFormationEnableAtRestEncryptionRemediationMarkdown, }, - Severity: severity.High, + Severity: severity.High, + Deprecated: true, }, func(s *state.State) (results scan.Results) { for _, workgroup := range s.AWS.Athena.Workgroups { diff --git a/checks/cloud/aws/athena/enable_at_rest_encryption.rego b/checks/cloud/aws/athena/enable_at_rest_encryption.rego new file mode 100644 index 00000000..a2af976c --- /dev/null +++ b/checks/cloud/aws/athena/enable_at_rest_encryption.rego @@ -0,0 +1,53 @@ +# METADATA +# title: Athena databases and workgroup configurations are created unencrypted at rest by default, they should be encrypted +# description: | +# Athena databases and workspace result sets should be encrypted at rests. These databases and query sets are generally derived from data in S3 buckets and should have the same level of at rest protection. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/athena/latest/ug/encryption.html +# custom: +# id: AVD-AWS-0006 +# avd_id: AVD-AWS-0006 +# provider: aws +# service: athena +# severity: HIGH +# short_code: enable-at-rest-encryption +# recommended_action: Enable encryption at rest for Athena databases and workgroup configurations +# input: +# selector: +# - type: cloud +# subtypes: +# - service: athena +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/athena_workgroup#encryption_configuration +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/athena_database#encryption_configuration +# good_examples: checks/cloud/aws/athena/enable_at_rest_encryption.tf.go +# bad_examples: checks/cloud/aws/athena/enable_at_rest_encryption.tf.go +# cloudformation: +# good_examples: checks/cloud/aws/athena/enable_at_rest_encryption.cf.go +# bad_examples: checks/cloud/aws/athena/enable_at_rest_encryption.cf.go +package builtin.aws.athena.aws0006 + +import rego.v1 + +encryption_type_none := "" + +deny contains res if { + some workgroup in input.aws.athena.workgroups + is_encryption_type_none(workgroup.encryption) + res := result.new("Workgroup does not have encryption configured.", workgroup) +} + +deny contains res if { + some database in input.aws.athena.databases + is_encryption_type_none(database.encryption) + res := result.new("Database does not have encryption configured.", database) +} + +is_encryption_type_none(encryption) if { + encryption.type.value == encryption_type_none +} diff --git a/checks/cloud/aws/athena/enable_at_rest_encryption_test.go b/checks/cloud/aws/athena/enable_at_rest_encryption_test.go deleted file mode 100644 index 02127836..00000000 --- a/checks/cloud/aws/athena/enable_at_rest_encryption_test.go +++ /dev/null @@ -1,95 +0,0 @@ -package athena - -import ( - "testing" - - trivyTypes "github.com/aquasecurity/trivy/pkg/iac/types" - - "github.com/aquasecurity/trivy/pkg/iac/state" - - "github.com/aquasecurity/trivy/pkg/iac/providers/aws/athena" - "github.com/aquasecurity/trivy/pkg/iac/scan" - - "github.com/stretchr/testify/assert" -) - -func TestCheckEnableAtRestEncryption(t *testing.T) { - tests := []struct { - name string - input athena.Athena - expected bool - }{ - { - name: "AWS Athena database unencrypted", - input: athena.Athena{ - Databases: []athena.Database{ - { - Metadata: trivyTypes.NewTestMetadata(), - Encryption: athena.EncryptionConfiguration{ - Metadata: trivyTypes.NewTestMetadata(), - Type: trivyTypes.String(athena.EncryptionTypeNone, trivyTypes.NewTestMetadata()), - }, - }, - }, - }, - expected: true, - }, - { - name: "AWS Athena workgroup unencrypted", - input: athena.Athena{ - Workgroups: []athena.Workgroup{ - { - Metadata: trivyTypes.NewTestMetadata(), - Encryption: athena.EncryptionConfiguration{ - Metadata: trivyTypes.NewTestMetadata(), - Type: trivyTypes.String(athena.EncryptionTypeNone, trivyTypes.NewTestMetadata()), - }, - }, - }, - }, - expected: true, - }, - { - name: "AWS Athena database and workgroup encrypted", - input: athena.Athena{ - Databases: []athena.Database{ - { - Metadata: trivyTypes.NewTestMetadata(), - Encryption: athena.EncryptionConfiguration{ - Metadata: trivyTypes.NewTestMetadata(), - Type: trivyTypes.String(athena.EncryptionTypeSSEKMS, trivyTypes.NewTestMetadata()), - }, - }, - }, - Workgroups: []athena.Workgroup{ - { - Metadata: trivyTypes.NewTestMetadata(), - Encryption: athena.EncryptionConfiguration{ - Metadata: trivyTypes.NewTestMetadata(), - Type: trivyTypes.String(athena.EncryptionTypeSSEKMS, trivyTypes.NewTestMetadata()), - }, - }, - }, - }, - expected: false, - }, - } - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - var testState state.State - testState.AWS.Athena = test.input - results := CheckEnableAtRestEncryption.Evaluate(&testState) - var found bool - for _, result := range results { - if result.Status() == scan.StatusFailed && result.Rule().LongID() == CheckEnableAtRestEncryption.LongID() { - found = true - } - } - if test.expected { - assert.True(t, found, "Rule should have been found") - } else { - assert.False(t, found, "Rule should not have been found") - } - }) - } -} diff --git a/checks/cloud/aws/athena/enable_at_rest_encryption_test.rego b/checks/cloud/aws/athena/enable_at_rest_encryption_test.rego new file mode 100644 index 00000000..4272ac39 --- /dev/null +++ b/checks/cloud/aws/athena/enable_at_rest_encryption_test.rego @@ -0,0 +1,26 @@ +package builtin.aws.athena.aws0006_test + +import rego.v1 + +import data.builtin.aws.athena.aws0006 as check +import data.lib.test + +test_disallow_database_unencrypted if { + inp := {"aws": {"athena": {"databases": [{"encryption": {"type": {"value": ""}}}]}}} + test.assert_equal_message("Database does not have encryption configured.", check.deny) with input as inp +} + +test_disallow_workgroup_unencrypted if { + inp := {"aws": {"athena": {"workgroups": [{"encryption": {"type": {"value": ""}}}]}}} + test.assert_equal_message("Workgroup does not have encryption configured.", check.deny) with input as inp +} + +test_allow_database_encrypted if { + inp := {"aws": {"athena": {"databases": [{"encryption": {"type": {"value": "SSE_S3"}}}]}}} + test.assert_empty(check.deny) with input as inp +} + +test_allow_workgroup_encrypted if { + inp := {"aws": {"athena": {"workgroups": [{"encryption": {"type": {"value": "SSE_S3"}}}]}}} + test.assert_empty(check.deny) with input as inp +} diff --git a/checks/cloud/aws/athena/no_encryption_override.go b/checks/cloud/aws/athena/no_encryption_override.go index 54d94d01..ba40c161 100755 --- a/checks/cloud/aws/athena/no_encryption_override.go +++ b/checks/cloud/aws/athena/no_encryption_override.go @@ -33,7 +33,8 @@ var CheckNoEncryptionOverride = rules.Register( Links: cloudFormationNoEncryptionOverrideLinks, RemediationMarkdown: cloudFormationNoEncryptionOverrideRemediationMarkdown, }, - Severity: severity.High, + Severity: severity.High, + Deprecated: true, }, func(s *state.State) (results scan.Results) { for _, workgroup := range s.AWS.Athena.Workgroups { diff --git a/checks/cloud/aws/athena/no_encryption_override.rego b/checks/cloud/aws/athena/no_encryption_override.rego new file mode 100644 index 00000000..8e43723c --- /dev/null +++ b/checks/cloud/aws/athena/no_encryption_override.rego @@ -0,0 +1,40 @@ +# METADATA +# title: Athena workgroups should enforce configuration to prevent client disabling encryption +# description: | +# Athena workgroup configuration should be enforced to prevent client side changes to disable encryption settings. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/athena/latest/ug/manage-queries-control-costs-with-workgroups.html +# custom: +# id: AVD-AWS-0007 +# avd_id: AVD-AWS-0007 +# provider: aws +# service: athena +# severity: HIGH +# short_code: no-encryption-override +# recommended_action: Enforce the configuration to prevent client overrides +# input: +# selector: +# - type: cloud +# subtypes: +# - service: athena +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/athena_workgroup#configuration +# good_examples: checks/cloud/aws/athena/no_encryption_override.tf.go +# bad_examples: checks/cloud/aws/athena/no_encryption_override.tf.go +# cloudformation: +# good_examples: checks/cloud/aws/athena/no_encryption_override.cf.go +# bad_examples: checks/cloud/aws/athena/no_encryption_override.cf.go +package builtin.aws.athena.aws0007 + +import rego.v1 + +deny contains res if { + some workgroup in input.aws.athena.workgroups + not workgroup.enforceconfiguration.value + res := result.new("The workgroup configuration is not enforced.", workgroup.enforceconfiguration) +} diff --git a/checks/cloud/aws/athena/no_encryption_override_test.go b/checks/cloud/aws/athena/no_encryption_override_test.go deleted file mode 100644 index 55ec5241..00000000 --- a/checks/cloud/aws/athena/no_encryption_override_test.go +++ /dev/null @@ -1,65 +0,0 @@ -package athena - -import ( - "testing" - - trivyTypes "github.com/aquasecurity/trivy/pkg/iac/types" - - "github.com/aquasecurity/trivy/pkg/iac/state" - - "github.com/aquasecurity/trivy/pkg/iac/providers/aws/athena" - "github.com/aquasecurity/trivy/pkg/iac/scan" - - "github.com/stretchr/testify/assert" -) - -func TestCheckNoEncryptionOverride(t *testing.T) { - tests := []struct { - name string - input athena.Athena - expected bool - }{ - { - name: "AWS Athena workgroup doesn't enforce configuration", - input: athena.Athena{ - Workgroups: []athena.Workgroup{ - { - Metadata: trivyTypes.NewTestMetadata(), - EnforceConfiguration: trivyTypes.Bool(false, trivyTypes.NewTestMetadata()), - }, - }, - }, - expected: true, - }, - { - name: "AWS Athena workgroup enforces configuration", - input: athena.Athena{ - Workgroups: []athena.Workgroup{ - { - Metadata: trivyTypes.NewTestMetadata(), - EnforceConfiguration: trivyTypes.Bool(true, trivyTypes.NewTestMetadata()), - }, - }, - }, - expected: false, - }, - } - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - var testState state.State - testState.AWS.Athena = test.input - results := CheckNoEncryptionOverride.Evaluate(&testState) - var found bool - for _, result := range results { - if result.Status() == scan.StatusFailed && result.Rule().LongID() == CheckNoEncryptionOverride.LongID() { - found = true - } - } - if test.expected { - assert.True(t, found, "Rule should have been found") - } else { - assert.False(t, found, "Rule should not have been found") - } - }) - } -} diff --git a/checks/cloud/aws/athena/no_encryption_override_test.rego b/checks/cloud/aws/athena/no_encryption_override_test.rego new file mode 100644 index 00000000..55c8140d --- /dev/null +++ b/checks/cloud/aws/athena/no_encryption_override_test.rego @@ -0,0 +1,16 @@ +package builtin.aws.athena.aws0007_test + +import rego.v1 + +import data.builtin.aws.athena.aws0007 as check +import data.lib.test + +test_allow_workgroup_enforce_configuration if { + inp := {"aws": {"athena": {"workgroups": [{"enforceconfiguration": {"value": true}}]}}} + test.assert_empty(check.deny) with input as inp +} + +test_disallow_workgroup_no_enforce_configuration if { + inp := {"aws": {"athena": {"workgroups": [{"enforceconfiguration": {"value": false}}]}}} + test.assert_equal_message("The workgroup configuration is not enforced.", check.deny) with input as inp +} diff --git a/checks/cloud/aws/cloudfront/enable_waf.go b/checks/cloud/aws/cloudfront/enable_waf.go index 38e94b0e..e28ec9e0 100755 --- a/checks/cloud/aws/cloudfront/enable_waf.go +++ b/checks/cloud/aws/cloudfront/enable_waf.go @@ -33,7 +33,8 @@ var CheckEnableWaf = rules.Register( Links: cloudFormationEnableWafLinks, RemediationMarkdown: cloudFormationEnableWafRemediationMarkdown, }, - Severity: severity.High, + Severity: severity.High, + Deprecated: true, }, func(s *state.State) (results scan.Results) { for _, dist := range s.AWS.Cloudfront.Distributions { diff --git a/checks/cloud/aws/cloudtrail/enable_all_regions.go b/checks/cloud/aws/cloudtrail/enable_all_regions.go index 35cf183b..153ca0cf 100755 --- a/checks/cloud/aws/cloudtrail/enable_all_regions.go +++ b/checks/cloud/aws/cloudtrail/enable_all_regions.go @@ -38,7 +38,8 @@ var CheckEnableAllRegions = rules.Register( Links: cloudFormationEnableAllRegionsLinks, RemediationMarkdown: cloudFormationEnableAllRegionsRemediationMarkdown, }, - Severity: severity.Medium, + Severity: severity.Medium, + Deprecated: true, }, func(s *state.State) (results scan.Results) { for _, trail := range s.AWS.CloudTrail.Trails { diff --git a/checks/cloud/aws/cloudtrail/enable_all_regions.rego b/checks/cloud/aws/cloudtrail/enable_all_regions.rego new file mode 100644 index 00000000..6c3dfd95 --- /dev/null +++ b/checks/cloud/aws/cloudtrail/enable_all_regions.rego @@ -0,0 +1,43 @@ +# METADATA +# title: Cloudtrail should be enabled in all regions regardless of where your AWS resources are generally homed +# description: | +# When creating Cloudtrail in the AWS Management Console the trail is configured by default to be multi-region, this isn't the case with the Terraform resource. Cloudtrail should cover the full AWS account to ensure you can track changes in regions you are not actively operting in. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/awscloudtrail/latest/userguide/receive-cloudtrail-log-files-from-multiple-regions.html +# custom: +# id: AVD-AWS-0014 +# avd_id: AVD-AWS-0014 +# provider: aws +# service: cloudtrail +# severity: MEDIUM +# short_code: enable-all-regions +# recommended_action: Enable Cloudtrail in all regions +# frameworks: +# cis-aws-1.2: +# - "2.5" +# input: +# selector: +# - type: cloud +# subtypes: +# - service: cloudtrail +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudtrail#is_multi_region_trail +# good_examples: checks/cloud/aws/cloudtrail/enable_all_regions.tf.go +# bad_examples: checks/cloud/aws/cloudtrail/enable_all_regions.tf.go +# cloudformation: +# good_examples: checks/cloud/aws/cloudtrail/enable_all_regions.cf.go +# bad_examples: checks/cloud/aws/cloudtrail/enable_all_regions.cf.go +package builtin.aws.cloudtrail.aws0014 + +import rego.v1 + +deny contains res if { + some trail in input.aws.cloudtrail.trails + not trail.ismultiregion.value + res := result.new("Trail is not enabled across all regions.", trail.ismultiregion) +} diff --git a/checks/cloud/aws/cloudtrail/enable_all_regions_test.go b/checks/cloud/aws/cloudtrail/enable_all_regions_test.go deleted file mode 100644 index 4ca1c625..00000000 --- a/checks/cloud/aws/cloudtrail/enable_all_regions_test.go +++ /dev/null @@ -1,65 +0,0 @@ -package cloudtrail - -import ( - "testing" - - trivyTypes "github.com/aquasecurity/trivy/pkg/iac/types" - - "github.com/aquasecurity/trivy/pkg/iac/state" - - "github.com/aquasecurity/trivy/pkg/iac/providers/aws/cloudtrail" - "github.com/aquasecurity/trivy/pkg/iac/scan" - - "github.com/stretchr/testify/assert" -) - -func TestCheckEnableAllRegions(t *testing.T) { - tests := []struct { - name string - input cloudtrail.CloudTrail - expected bool - }{ - { - name: "AWS CloudTrail not enabled across all regions", - input: cloudtrail.CloudTrail{ - Trails: []cloudtrail.Trail{ - { - Metadata: trivyTypes.NewTestMetadata(), - IsMultiRegion: trivyTypes.Bool(false, trivyTypes.NewTestMetadata()), - }, - }, - }, - expected: true, - }, - { - name: "AWS CloudTrail enabled across all regions", - input: cloudtrail.CloudTrail{ - Trails: []cloudtrail.Trail{ - { - Metadata: trivyTypes.NewTestMetadata(), - IsMultiRegion: trivyTypes.Bool(true, trivyTypes.NewTestMetadata()), - }, - }, - }, - expected: false, - }, - } - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - var testState state.State - testState.AWS.CloudTrail = test.input - results := CheckEnableAllRegions.Evaluate(&testState) - var found bool - for _, result := range results { - if result.Status() == scan.StatusFailed && result.Rule().LongID() == CheckEnableAllRegions.LongID() { - found = true - } - } - if test.expected { - assert.True(t, found, "Rule should have been found") - } else { - assert.False(t, found, "Rule should not have been found") - } - }) - } -} diff --git a/checks/cloud/aws/cloudtrail/enable_all_regions_test.rego b/checks/cloud/aws/cloudtrail/enable_all_regions_test.rego new file mode 100644 index 00000000..c004db30 --- /dev/null +++ b/checks/cloud/aws/cloudtrail/enable_all_regions_test.rego @@ -0,0 +1,16 @@ +package builtin.aws.cloudtrail.aws0014_test + +import rego.v1 + +import data.builtin.aws.cloudtrail.aws0014 as check +import data.lib.test + +test_disallow_cloudtrail_without_all_regions if { + r := check.deny with input as {"aws": {"cloudtrail": {"trails": [{"ismultiregion": {"value": false}}]}}} + test.assert_equal_message("CloudTrail is not enabled across all regions.", r) +} + +test_allow_cloudtrail_with_all_regions if { + r := check.deny with input as {"aws": {"cloudtrail": {"trails": [{"ismultiregion": {"value": true}}]}}} + test.assert_empty(r) +} diff --git a/checks/cloud/aws/cloudtrail/enable_log_validation.go b/checks/cloud/aws/cloudtrail/enable_log_validation.go index 39ae7313..1afa5ecb 100755 --- a/checks/cloud/aws/cloudtrail/enable_log_validation.go +++ b/checks/cloud/aws/cloudtrail/enable_log_validation.go @@ -33,7 +33,8 @@ var CheckEnableLogValidation = rules.Register( Links: cloudFormationEnableLogValidationLinks, RemediationMarkdown: cloudFormationEnableLogValidationRemediationMarkdown, }, - Severity: severity.High, + Severity: severity.High, + Deprecated: true, }, func(s *state.State) (results scan.Results) { for _, trail := range s.AWS.CloudTrail.Trails { diff --git a/checks/cloud/aws/cloudtrail/enable_log_validation.rego b/checks/cloud/aws/cloudtrail/enable_log_validation.rego new file mode 100644 index 00000000..a4e65997 --- /dev/null +++ b/checks/cloud/aws/cloudtrail/enable_log_validation.rego @@ -0,0 +1,40 @@ +# METADATA +# title: Cloudtrail log validation should be enabled to prevent tampering of log data +# description: | +# Log validation should be activated on Cloudtrail logs to prevent the tampering of the underlying data in the S3 bucket. It is feasible that a rogue actor compromising an AWS account might want to modify the log data to remove trace of their actions. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-log-file-validation-intro.html +# custom: +# id: AVD-AWS-0016 +# avd_id: AVD-AWS-0016 +# provider: aws +# service: cloudtrail +# severity: HIGH +# short_code: enable-log-validation +# recommended_action: Turn on log validation for Cloudtrail +# input: +# selector: +# - type: cloud +# subtypes: +# - service: cloudtrail +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudtrail#enable_log_file_validation +# good_examples: checks/cloud/aws/cloudtrail/enable_log_validation.tf.go +# bad_examples: checks/cloud/aws/cloudtrail/enable_log_validation.tf.go +# cloudformation: +# good_examples: checks/cloud/aws/cloudtrail/enable_log_validation.cf.go +# bad_examples: checks/cloud/aws/cloudtrail/enable_log_validation.cf.go +package builtin.aws.cloudtrail.aws0016 + +import rego.v1 + +deny contains res if { + some trail in input.aws.cloudtrail.trails + not trail.enablelogfilevalidation.value + res := result.new("Trail does not have log validation enabled.", trail.enablelogfilevalidation) +} diff --git a/checks/cloud/aws/cloudtrail/enable_log_validation_test.go b/checks/cloud/aws/cloudtrail/enable_log_validation_test.go deleted file mode 100644 index bfe1d465..00000000 --- a/checks/cloud/aws/cloudtrail/enable_log_validation_test.go +++ /dev/null @@ -1,65 +0,0 @@ -package cloudtrail - -import ( - "testing" - - trivyTypes "github.com/aquasecurity/trivy/pkg/iac/types" - - "github.com/aquasecurity/trivy/pkg/iac/state" - - "github.com/aquasecurity/trivy/pkg/iac/providers/aws/cloudtrail" - "github.com/aquasecurity/trivy/pkg/iac/scan" - - "github.com/stretchr/testify/assert" -) - -func TestCheckEnableLogValidation(t *testing.T) { - tests := []struct { - name string - input cloudtrail.CloudTrail - expected bool - }{ - { - name: "AWS CloudTrail without logfile validation", - input: cloudtrail.CloudTrail{ - Trails: []cloudtrail.Trail{ - { - Metadata: trivyTypes.NewTestMetadata(), - EnableLogFileValidation: trivyTypes.Bool(false, trivyTypes.NewTestMetadata()), - }, - }, - }, - expected: true, - }, - { - name: "AWS CloudTrail with logfile validation enabled", - input: cloudtrail.CloudTrail{ - Trails: []cloudtrail.Trail{ - { - Metadata: trivyTypes.NewTestMetadata(), - EnableLogFileValidation: trivyTypes.Bool(true, trivyTypes.NewTestMetadata()), - }, - }, - }, - expected: false, - }, - } - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - var testState state.State - testState.AWS.CloudTrail = test.input - results := CheckEnableLogValidation.Evaluate(&testState) - var found bool - for _, result := range results { - if result.Status() == scan.StatusFailed && result.Rule().LongID() == CheckEnableLogValidation.LongID() { - found = true - } - } - if test.expected { - assert.True(t, found, "Rule should have been found") - } else { - assert.False(t, found, "Rule should not have been found") - } - }) - } -} diff --git a/checks/cloud/aws/cloudtrail/enable_log_validation_test.rego b/checks/cloud/aws/cloudtrail/enable_log_validation_test.rego new file mode 100644 index 00000000..7436046e --- /dev/null +++ b/checks/cloud/aws/cloudtrail/enable_log_validation_test.rego @@ -0,0 +1,16 @@ +package builtin.aws.cloudtrail.aws0016_test + +import rego.v1 + +import data.builtin.aws.cloudtrail.aws0016 as check +import data.lib.test + +test_allow_trail_with_log_validation if { + inp := {"aws": {"cloudtrail": {"trails": [{"enablelogfilevalidation": {"value": true}}]}}} + test.assert_empty(check.deny) with input as inp +} + +test_disallow_trail_without_log_validation if { + inp := {"aws": {"cloudtrail": {"trails": [{"enablelogfilevalidation": {"value": false}}]}}} + test.assert_equal_message("Trail does not have log validation enabled.", check.deny) with input as inp +} diff --git a/checks/cloud/aws/cloudtrail/encryption_customer_key.rego b/checks/cloud/aws/cloudtrail/encryption_customer_key.rego new file mode 100644 index 00000000..e38ef37a --- /dev/null +++ b/checks/cloud/aws/cloudtrail/encryption_customer_key.rego @@ -0,0 +1,43 @@ +# METADATA +# title: CloudTrail should use Customer managed keys to encrypt the logs +# description: | +# Using Customer managed keys provides comprehensive control over cryptographic keys, enabling management of policies, permissions, and rotation, thus enhancing security and compliance measures for sensitive data and systems. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/awscloudtrail/latest/userguide/encrypting-cloudtrail-log-files-with-aws-kms.html +# - https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#key-mgmt +# custom: +# id: AVD-AWS-0015 +# avd_id: AVD-AWS-0015 +# provider: aws +# service: cloudtrail +# severity: HIGH +# short_code: encryption-customer-managed-key +# recommended_action: Use Customer managed key +# input: +# selector: +# - type: cloud +# subtypes: +# - service: cloudtrail +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudtrail#kms_key_id +# good_examples: checks/cloud/aws/cloudtrail/encryption_customer_key.tf.go +# bad_examples: checks/cloud/aws/cloudtrail/encryption_customer_key.tf.go +# cloudformation: +# links: +# - https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudtrail-trail.html#cfn-cloudtrail-trail-kmskeyid +# good_examples: checks/cloud/aws/cloudtrail/encryption_customer_key.cf.go +# bad_examples: checks/cloud/aws/cloudtrail/encryption_customer_key.cf.go +package builtin.aws.cloudtrail.aws0015 + +import rego.v1 + +deny contains res if { + some trail in input.aws.cloudtrail.trails + trail.kmskeyid.value == "" + res := result.new("CloudTrail does not use a customer managed key to encrypt the logs.", trail.kmskeyid) +} diff --git a/checks/cloud/aws/cloudtrail/encryption_customer_key_test.go b/checks/cloud/aws/cloudtrail/encryption_customer_key_test.go deleted file mode 100644 index b0d3f61b..00000000 --- a/checks/cloud/aws/cloudtrail/encryption_customer_key_test.go +++ /dev/null @@ -1,63 +0,0 @@ -package cloudtrail - -import ( - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/aquasecurity/trivy/pkg/iac/providers/aws/cloudtrail" - "github.com/aquasecurity/trivy/pkg/iac/scan" - "github.com/aquasecurity/trivy/pkg/iac/state" - trivyTypes "github.com/aquasecurity/trivy/pkg/iac/types" -) - -func TestEncryptionCustomerManagedKey(t *testing.T) { - tests := []struct { - name string - input cloudtrail.CloudTrail - expected bool - }{ - { - name: "AWS CloudTrail without CMK", - input: cloudtrail.CloudTrail{ - Trails: []cloudtrail.Trail{ - { - Metadata: trivyTypes.NewTestMetadata(), - KMSKeyID: trivyTypes.String("", trivyTypes.NewTestMetadata()), - }, - }, - }, - expected: true, - }, - { - name: "AWS CloudTrail with CMK", - input: cloudtrail.CloudTrail{ - Trails: []cloudtrail.Trail{ - { - Metadata: trivyTypes.NewTestMetadata(), - KMSKeyID: trivyTypes.String("some-kms-key", trivyTypes.NewTestMetadata()), - }, - }, - }, - expected: false, - }, - } - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - var testState state.State - testState.AWS.CloudTrail = test.input - results := EncryptionCustomerManagedKey.Evaluate(&testState) - var found bool - for _, result := range results { - if result.Status() == scan.StatusFailed && result.Rule().LongID() == EncryptionCustomerManagedKey.LongID() { - found = true - } - } - if test.expected { - assert.True(t, found, "Rule should have been found") - } else { - assert.False(t, found, "Rule should not have been found") - } - }) - } -} diff --git a/checks/cloud/aws/cloudtrail/encryption_customer_key_test.rego b/checks/cloud/aws/cloudtrail/encryption_customer_key_test.rego new file mode 100644 index 00000000..3005c9ba --- /dev/null +++ b/checks/cloud/aws/cloudtrail/encryption_customer_key_test.rego @@ -0,0 +1,16 @@ +package builtin.aws.cloudtrail.aws0015_test + +import rego.v1 + +import data.builtin.aws.cloudtrail.aws0015 as check +import data.lib.test + +test_allow_trail_with_cmk if { + inp := {"aws": {"cloudtrail": {"trails": [{"kmskeyid": {"value": "key-id"}}]}}} + test.assert_empty(check.deny) with input as inp +} + +test_disallow_trail_without_cmk if { + inp := {"aws": {"cloudtrail": {"trails": [{"kmskeyid": {"value": ""}}]}}} + test.assert_equal_message("CloudTrail does not use a customer managed key to encrypt the logs.", check.deny) with input as inp +} diff --git a/checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration.go b/checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration.go index 4f151796..969baa67 100755 --- a/checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration.go +++ b/checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/state" ) -var checkEnsureCloudwatchIntegration = rules.Register( +var CheckEnsureCloudwatchIntegration = rules.Register( scan.Rule{ AVDID: "AVD-AWS-0162", Provider: providers.AWSProvider, @@ -45,7 +45,8 @@ For a trail that is enabled in all Regions in an account, CloudTrail sends log f Links: cloudFormationEnsureCloudwatchIntegrationLinks, RemediationMarkdown: cloudFormationEnsureCloudwatchIntegrationRemediationMarkdown, }, - Severity: severity.Low, + Severity: severity.Low, + Deprecated: true, }, func(s *state.State) (results scan.Results) { for _, trail := range s.AWS.CloudTrail.Trails { diff --git a/checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration.rego b/checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration.rego new file mode 100644 index 00000000..e6aadf7a --- /dev/null +++ b/checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration.rego @@ -0,0 +1,47 @@ +# METADATA +# title: CloudTrail logs should be stored in S3 and also sent to CloudWatch Logs +# description: | +# CloudTrail is a web service that records AWS API calls made in a given account. The recorded information includes the identity of the API caller, the time of the API call, the source IP address of the API caller, the request parameters, and the response elements returned by the AWS service. +# CloudTrail uses Amazon S3 for log file storage and delivery, so log files are stored durably. In addition to capturing CloudTrail logs in a specified Amazon S3 bucket for long-term analysis, you can perform real-time analysis by configuring CloudTrail to send logs to CloudWatch Logs. +# For a trail that is enabled in all Regions in an account, CloudTrail sends log files from all those Regions to a CloudWatch Logs log group. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/awscloudtrail/latest/userguide/send-cloudtrail-events-to-cloudwatch-logs.html#send-cloudtrail-events-to-cloudwatch-logs-console +# custom: +# id: AVD-AWS-0162 +# avd_id: AVD-AWS-0162 +# provider: aws +# service: cloudtrail +# severity: LOW +# short_code: ensure-cloudwatch-integration +# recommended_action: Enable logging to CloudWatch +# frameworks: +# cis-aws-1.2: +# - "2.4" +# cis-aws-1.4: +# - "3.4" +# input: +# selector: +# - type: cloud +# subtypes: +# - service: cloudtrail +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudtrail +# good_examples: checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration.tf.go +# bad_examples: checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration.tf.go +# cloudformation: +# good_examples: checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration.cf.go +# bad_examples: checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration.cf.go +package builtin.aws.cloudtrail.aws0162 + +import rego.v1 + +deny contains res if { + some trail in input.aws.cloudtrail.trails + trail.cloudwatchlogsloggrouparn.value == "" + res := result.new("Trail does not have CloudWatch logging configured", trail) +} diff --git a/checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration_test.go b/checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration_test.go deleted file mode 100644 index 3700afcb..00000000 --- a/checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration_test.go +++ /dev/null @@ -1,64 +0,0 @@ -package cloudtrail - -import ( - "testing" - - trivyTypes "github.com/aquasecurity/trivy/pkg/iac/types" - - "github.com/aquasecurity/trivy/pkg/iac/scan" - "github.com/aquasecurity/trivy/pkg/iac/state" - - "github.com/aquasecurity/trivy/pkg/iac/providers/aws/cloudtrail" - "github.com/stretchr/testify/assert" -) - -func TestCheckEnsureCloudwatchIntegration(t *testing.T) { - tests := []struct { - name string - input cloudtrail.CloudTrail - expected bool - }{ - { - name: "Trail has cloudwatch configured", - input: cloudtrail.CloudTrail{ - Trails: []cloudtrail.Trail{ - { - Metadata: trivyTypes.NewTestMetadata(), - CloudWatchLogsLogGroupArn: trivyTypes.String("arn:aws:logs:us-east-1:123456789012:log-group:my-log-group", trivyTypes.NewTestMetadata()), - }, - }, - }, - expected: false, - }, - { - name: "Trail does not have cloudwatch configured", - input: cloudtrail.CloudTrail{ - Trails: []cloudtrail.Trail{ - { - Metadata: trivyTypes.NewTestMetadata(), - CloudWatchLogsLogGroupArn: trivyTypes.String("", trivyTypes.NewTestMetadata()), - }, - }, - }, - expected: true, - }, - } - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - var testState state.State - testState.AWS.CloudTrail = test.input - results := checkEnsureCloudwatchIntegration.Evaluate(&testState) - var found bool - for _, result := range results { - if result.Status() == scan.StatusFailed && result.Rule().LongID() == checkEnsureCloudwatchIntegration.LongID() { - found = true - } - } - if test.expected { - assert.True(t, found, "Rule should have been found") - } else { - assert.False(t, found, "Rule should not have been found") - } - }) - } -} diff --git a/checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration_test.rego b/checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration_test.rego new file mode 100644 index 00000000..c04d79ed --- /dev/null +++ b/checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration_test.rego @@ -0,0 +1,16 @@ +package builtin.aws.cloudtrail.aws0162_test + +import rego.v1 + +import data.builtin.aws.cloudtrail.aws0162 as check +import data.lib.test + +test_allow_cloudwatch_integration if { + inp := {"aws": {"cloudtrail": {"trails": [{"cloudwatchlogsloggrouparn": {"value": "log-group-arn"}}]}}} + test.assert_empty(check.deny) with input as inp +} + +test_disallow_without_cloudwatch_integration if { + inp := {"aws": {"cloudtrail": {"trails": [{"cloudwatchlogsloggrouparn": {"value": ""}}]}}} + test.assert_equal_message("CloudWatch integration is not configured.", check.deny) with input as inp +} diff --git a/checks/cloud/aws/cloudtrail/no_public_log_access.go b/checks/cloud/aws/cloudtrail/no_public_log_access.go index 0b19e1f4..ac6cedd9 100755 --- a/checks/cloud/aws/cloudtrail/no_public_log_access.go +++ b/checks/cloud/aws/cloudtrail/no_public_log_access.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/state" ) -var checkNoPublicLogAccess = rules.Register( +var CheckNoPublicLogAccess = rules.Register( scan.Rule{ AVDID: "AVD-AWS-0161", Provider: providers.AWSProvider, @@ -41,7 +41,8 @@ CloudTrail logs a record of every API call made in your account. These log files Links: cloudFormationNoPublicLogAccessLinks, RemediationMarkdown: cloudFormationNoPublicLogAccessRemediationMarkdown, }, - Severity: severity.Critical, + Severity: severity.Critical, + Deprecated: true, }, func(s *state.State) (results scan.Results) { for _, trail := range s.AWS.CloudTrail.Trails { diff --git a/checks/cloud/aws/cloudtrail/no_public_log_access.rego b/checks/cloud/aws/cloudtrail/no_public_log_access.rego new file mode 100644 index 00000000..16fffa0f --- /dev/null +++ b/checks/cloud/aws/cloudtrail/no_public_log_access.rego @@ -0,0 +1,52 @@ +# METADATA +# title: The S3 Bucket backing Cloudtrail should be private +# description: | +# CloudTrail logs a record of every API call made in your account. These log files are stored in an S3 bucket. CIS recommends that the S3 bucket policy, or access control list (ACL), applied to the S3 bucket that CloudTrail logs to prevents public access to the CloudTrail logs. Allowing public access to CloudTrail log content might aid an adversary in identifying weaknesses in the affected account's use or configuration. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/AmazonS3/latest/userguide/configuring-block-public-access-bucket.html +# custom: +# id: AVD-AWS-0161 +# avd_id: AVD-AWS-0161 +# provider: aws +# service: cloudtrail +# severity: CRITICAL +# short_code: no-public-log-access +# recommended_action: Restrict public access to the S3 bucket +# frameworks: +# cis-aws-1.4: +# - "3.3" +# cis-aws-1.2: +# - "2.3" +# input: +# selector: +# - type: cloud +# subtypes: +# - service: cloudtrail +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudtrail#is_multi_region_trail +# good_examples: checks/cloud/aws/cloudtrail/no_public_log_access.tf.go +# bad_examples: checks/cloud/aws/cloudtrail/no_public_log_access.tf.go +# cloudformation: +# good_examples: checks/cloud/aws/cloudtrail/no_public_log_access.cf.go +# bad_examples: checks/cloud/aws/cloudtrail/no_public_log_access.cf.go +package builtin.aws.cloudtrail.aws0161 + +import rego.v1 + +import data.lib.s3 + +deny contains res if { + some trail in input.aws.cloudtrail.trails + trail.bucketname.value != "" + + some bucket in input.aws.s3.buckets + bucket.name.value == trail.bucketname.value + + s3.bucket_has_public_access(bucket) + res := result.new("Trail S3 bucket is publicly exposed", bucket) +} diff --git a/checks/cloud/aws/cloudtrail/no_public_log_access_test.go b/checks/cloud/aws/cloudtrail/no_public_log_access_test.go deleted file mode 100644 index f5db0160..00000000 --- a/checks/cloud/aws/cloudtrail/no_public_log_access_test.go +++ /dev/null @@ -1,86 +0,0 @@ -package cloudtrail - -import ( - "testing" - - trivyTypes "github.com/aquasecurity/trivy/pkg/iac/types" - - "github.com/aquasecurity/trivy/pkg/iac/state" - - "github.com/aquasecurity/trivy/pkg/iac/providers/aws/cloudtrail" - "github.com/aquasecurity/trivy/pkg/iac/providers/aws/s3" - "github.com/aquasecurity/trivy/pkg/iac/scan" - - "github.com/stretchr/testify/assert" -) - -func TestCheckNoPublicLogAccess(t *testing.T) { - tests := []struct { - name string - inputCT cloudtrail.CloudTrail - inputS3 s3.S3 - expected bool - }{ - { - name: "Trail has bucket with no public access", - inputCT: cloudtrail.CloudTrail{ - Trails: []cloudtrail.Trail{ - { - Metadata: trivyTypes.NewTestMetadata(), - BucketName: trivyTypes.String("my-bucket", trivyTypes.NewTestMetadata()), - }, - }, - }, - inputS3: s3.S3{ - Buckets: []s3.Bucket{ - { - Metadata: trivyTypes.NewTestMetadata(), - Name: trivyTypes.String("my-bucket", trivyTypes.NewTestMetadata()), - ACL: trivyTypes.String("private", trivyTypes.NewTestMetadata()), - }, - }, - }, - expected: false, - }, - { - name: "Trail has bucket with public access", - inputCT: cloudtrail.CloudTrail{ - Trails: []cloudtrail.Trail{ - { - Metadata: trivyTypes.NewTestMetadata(), - BucketName: trivyTypes.String("my-bucket", trivyTypes.NewTestMetadata()), - }, - }, - }, - inputS3: s3.S3{ - Buckets: []s3.Bucket{ - { - Metadata: trivyTypes.NewTestMetadata(), - Name: trivyTypes.String("my-bucket", trivyTypes.NewTestMetadata()), - ACL: trivyTypes.String("public-read", trivyTypes.NewTestMetadata()), - }, - }, - }, - expected: true, - }, - } - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - var testState state.State - testState.AWS.CloudTrail = test.inputCT - testState.AWS.S3 = test.inputS3 - results := checkNoPublicLogAccess.Evaluate(&testState) - var found bool - for _, result := range results { - if result.Status() == scan.StatusFailed && result.Rule().LongID() == checkNoPublicLogAccess.LongID() { - found = true - } - } - if test.expected { - assert.True(t, found, "Rule should have been found") - } else { - assert.False(t, found, "Rule should not have been found") - } - }) - } -} diff --git a/checks/cloud/aws/cloudtrail/no_public_log_access_test.rego b/checks/cloud/aws/cloudtrail/no_public_log_access_test.rego new file mode 100644 index 00000000..4e76f6d3 --- /dev/null +++ b/checks/cloud/aws/cloudtrail/no_public_log_access_test.rego @@ -0,0 +1,24 @@ +package builtin.aws.cloudtrail.aws0161_test + +import rego.v1 + +import data.builtin.aws.cloudtrail.aws0161 as check +import data.lib.test + +test_allow_bucket_without_public_access if { + inp := {"aws": { + "cloudtrail": {"trails": [{"bucketname": {"value": "bucket_name"}}]}, + "s3": {"buckets": [{"name": {"value": "bucket_name"}, "acl": {"value": "private"}}]}, + }} + test.assert_empty(check.deny) with input as inp +} + +# TODO: count should be 2 +test_disallow_bucket_with_public_access if { + inp := {"aws": { + "cloudtrail": {"trails": [{"bucketname": {"value": "bucket_name"}}]}, + "s3": {"buckets": [{"name": {"value": "bucket_name"}, "acl": {"value": "public-read"}}]}, + }} + + test.assert_equal_message("Bucket has public access", check.deny) with input as inp +} diff --git a/checks/cloud/aws/cloudtrail/require_bucket_access_logging.go b/checks/cloud/aws/cloudtrail/require_bucket_access_logging.go index be4e6b04..e181f7b7 100755 --- a/checks/cloud/aws/cloudtrail/require_bucket_access_logging.go +++ b/checks/cloud/aws/cloudtrail/require_bucket_access_logging.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/state" ) -var checkBucketAccessLoggingRequired = rules.Register( +var CheckBucketAccessLoggingRequired = rules.Register( scan.Rule{ AVDID: "AVD-AWS-0163", Provider: providers.AWSProvider, @@ -44,7 +44,8 @@ By enabling S3 bucket logging on target S3 buckets, you can capture all events t Links: cloudFormationBucketAccessLoggingRequiredLinks, RemediationMarkdown: cloudFormationBucketAccessLoggingRequiredRemediationMarkdown, }, - Severity: severity.Low, + Severity: severity.Low, + Deprecated: true, }, func(s *state.State) (results scan.Results) { for _, trail := range s.AWS.CloudTrail.Trails { diff --git a/checks/cloud/aws/cloudtrail/require_bucket_access_logging.rego b/checks/cloud/aws/cloudtrail/require_bucket_access_logging.rego new file mode 100644 index 00000000..a37a477c --- /dev/null +++ b/checks/cloud/aws/cloudtrail/require_bucket_access_logging.rego @@ -0,0 +1,52 @@ +# METADATA +# title: You should enable bucket access logging on the CloudTrail S3 bucket. +# description: | +# Amazon S3 bucket access logging generates a log that contains access records for each request made to your S3 bucket. An access log record contains details about the request, such as the request type, the resources specified in the request worked, and the time and date the request was processed. +# CIS recommends that you enable bucket access logging on the CloudTrail S3 bucket. +# By enabling S3 bucket logging on target S3 buckets, you can capture all events that might affect objects in a target bucket. Configuring logs to be placed in a separate bucket enables access to log information, which can be useful in security and incident response workflows. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/AmazonS3/latest/userguide/ServerLogs.html +# custom: +# id: AVD-AWS-0163 +# avd_id: AVD-AWS-0163 +# provider: aws +# service: cloudtrail +# severity: LOW +# short_code: require-bucket-access-logging +# recommended_action: Enable access logging on the bucket +# frameworks: +# cis-aws-1.4: +# - "3.6" +# cis-aws-1.2: +# - "2.6" +# input: +# selector: +# - type: cloud +# subtypes: +# - service: cloudtrail +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudtrail#is_multi_region_trail +# good_examples: checks/cloud/aws/cloudtrail/require_bucket_access_logging.tf.go +# bad_examples: checks/cloud/aws/cloudtrail/require_bucket_access_logging.tf.go +# cloudformation: +# good_examples: checks/cloud/aws/cloudtrail/require_bucket_access_logging.cf.go +# bad_examples: checks/cloud/aws/cloudtrail/require_bucket_access_logging.cf.go +package builtin.aws.cloudtrail.aws0163 + +import rego.v1 + +deny contains res if { + some trail in input.aws.cloudtrail.trails + trail.bucketname.value != "" + + some bucket in input.aws.s3.buckets + bucket.name.value == trail.bucketname.value + not bucket.logging.enabled.value + + res := result.new("Trail S3 bucket does not have logging enabled", bucket) +} diff --git a/checks/cloud/aws/cloudtrail/require_bucket_access_logging_test.go b/checks/cloud/aws/cloudtrail/require_bucket_access_logging_test.go deleted file mode 100644 index 60b89080..00000000 --- a/checks/cloud/aws/cloudtrail/require_bucket_access_logging_test.go +++ /dev/null @@ -1,92 +0,0 @@ -package cloudtrail - -import ( - "testing" - - trivyTypes "github.com/aquasecurity/trivy/pkg/iac/types" - - "github.com/aquasecurity/trivy/pkg/iac/state" - - "github.com/aquasecurity/trivy/pkg/iac/providers/aws/cloudtrail" - "github.com/aquasecurity/trivy/pkg/iac/providers/aws/s3" - "github.com/aquasecurity/trivy/pkg/iac/scan" - - "github.com/stretchr/testify/assert" -) - -func TestCheckBucketAccessLoggingRequired(t *testing.T) { - tests := []struct { - name string - inputCT cloudtrail.CloudTrail - inputS3 s3.S3 - expected bool - }{ - { - name: "Trail has bucket with logging enabled", - inputCT: cloudtrail.CloudTrail{ - Trails: []cloudtrail.Trail{ - { - Metadata: trivyTypes.NewTestMetadata(), - BucketName: trivyTypes.String("my-bucket", trivyTypes.NewTestMetadata()), - }, - }, - }, - inputS3: s3.S3{ - Buckets: []s3.Bucket{ - { - Metadata: trivyTypes.NewTestMetadata(), - Name: trivyTypes.String("my-bucket", trivyTypes.NewTestMetadata()), - Logging: s3.Logging{ - Metadata: trivyTypes.NewTestMetadata(), - Enabled: trivyTypes.Bool(true, trivyTypes.NewTestMetadata()), - }, - }, - }, - }, - expected: false, - }, - { - name: "Trail has bucket without logging enabled", - inputCT: cloudtrail.CloudTrail{ - Trails: []cloudtrail.Trail{ - { - Metadata: trivyTypes.NewTestMetadata(), - BucketName: trivyTypes.String("my-bucket", trivyTypes.NewTestMetadata()), - }, - }, - }, - inputS3: s3.S3{ - Buckets: []s3.Bucket{ - { - Metadata: trivyTypes.NewTestMetadata(), - Name: trivyTypes.String("my-bucket", trivyTypes.NewTestMetadata()), - Logging: s3.Logging{ - Metadata: trivyTypes.NewTestMetadata(), - Enabled: trivyTypes.Bool(false, trivyTypes.NewTestMetadata()), - }, - }, - }, - }, - expected: true, - }, - } - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - var testState state.State - testState.AWS.CloudTrail = test.inputCT - testState.AWS.S3 = test.inputS3 - results := checkBucketAccessLoggingRequired.Evaluate(&testState) - var found bool - for _, result := range results { - if result.Status() == scan.StatusFailed && result.Rule().LongID() == checkBucketAccessLoggingRequired.LongID() { - found = true - } - } - if test.expected { - assert.True(t, found, "Rule should have been found") - } else { - assert.False(t, found, "Rule should not have been found") - } - }) - } -} diff --git a/checks/cloud/aws/cloudtrail/require_bucket_access_logging_test.rego b/checks/cloud/aws/cloudtrail/require_bucket_access_logging_test.rego new file mode 100644 index 00000000..5b09f1ef --- /dev/null +++ b/checks/cloud/aws/cloudtrail/require_bucket_access_logging_test.rego @@ -0,0 +1,33 @@ +package builtin.aws.cloudtrail.aws0163_test + +import rego.v1 + +import data.builtin.aws.cloudtrail.aws0163 as check +import data.lib.test + +test_allow_bucket_with_logging_enabled if { + inp := {"aws": { + "cloudtrail": {"trails": [{"bucketname": {"value": "bucket1"}}]}, + "s3": {"buckets": [{ + "name": {"value": "bucket1"}, + "logging": {"enabled": {"value": true}}, + }]}, + }} + + test.assert_empty(check.deny) with input as inp +} + +test_disallow_bucket_with_logging_disabled if { + inp := {"aws": { + "cloudtrail": {"trails": [{"bucketname": {"value": "bucket1"}}]}, + "s3": {"buckets": [{ + "name": {"value": "bucket1"}, + "logging": {"enabled": {"value": false}}, + }]}, + }} + + test.assert_equal_message( + "Trail S3 bucket does not have logging enabled", + check.deny, + ) with input as inp +} diff --git a/lib/s3.rego b/lib/s3.rego new file mode 100644 index 00000000..20497051 --- /dev/null +++ b/lib/s3.rego @@ -0,0 +1,11 @@ +package lib.s3 + +import rego.v1 + +public_acls = {"public-read", "public-read-write", "website", "authenticated-read"} + +bucket_has_public_access(bucket) if { + bucket.acl.value in public_acls + not bucket.publicaccessblock.ignorepublicacls.value + not bucket.publicaccessblock.blockpublicacls.value +} diff --git a/lib/test.rego b/lib/test.rego new file mode 100644 index 00000000..8e9ceb59 --- /dev/null +++ b/lib/test.rego @@ -0,0 +1,37 @@ +package lib.test + +import rego.v1 + +assert_empty(v) if { + not _assert_not_empty(v) +} + +_assert_not_empty(v) if { + count(v) > 0 + trace_and_print(sprintf("assert_not_empty:\n %v", [v])) +} + +assert_equal_message(expected, results) if { + assert_count(results, 1) + not _assert_not_equal_message(results, expected) +} + +_assert_not_equal_message(expected, results) if { + msg := [res.msg | some res in results][0] + msg != expected + trace_and_print(sprintf("assert_equal_message:\n Got %q\n Expected %q", [msg, expected])) +} + +assert_count(results, expected) if { + not _assert_not_count(results, expected) +} + +_assert_not_count(results, expected) if { + count(results) != expected + trace_and_print(sprintf("assert_count:\n Got %v\n Expected %v", [count(results), expected])) +} + +trace_and_print(v) if { + trace(v) + print(v) +}