Skip to content

Commit

Permalink
Merge branch 'main' of github.com:hashicorp/terraform into liamcervan…
Browse files Browse the repository at this point in the history
…te/tests/flags
  • Loading branch information
liamcervante committed Jul 19, 2023
2 parents 446ef9e + 837716a commit 5e33bf6
Show file tree
Hide file tree
Showing 6 changed files with 157 additions and 24 deletions.
17 changes: 17 additions & 0 deletions internal/command/test.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ import (
"sort"
"strings"

"github.com/hashicorp/hcl/v2"

"github.com/hashicorp/terraform/internal/addrs"
"github.com/hashicorp/terraform/internal/backend"
"github.com/hashicorp/terraform/internal/command/arguments"
"github.com/hashicorp/terraform/internal/command/views"
Expand Down Expand Up @@ -389,6 +392,7 @@ func (runner *TestRunner) ExecuteTestRun(run *moduletest.Run, file *moduletest.F
SkipRefresh: !run.Config.Options.Refresh,
ExternalReferences: references,
}, run.Config.Command, globals)
diags = run.ValidateExpectedFailures(diags)
run.Diagnostics = run.Diagnostics.Append(diags)

if runner.Cancelled {
Expand Down Expand Up @@ -531,6 +535,19 @@ func (runner *TestRunner) execute(run *moduletest.Run, file *moduletest.File, co
return tfCtx, plan, state, diags
}

// We're also going to strip out any warnings from check blocks, as we do
// for normal executions. Since we're going to go ahead and execute the
// plan immediately, any warnings from the check block are just not relevant
// any more.
var filteredDiags tfdiags.Diagnostics
for _, diag := range diags {
if rule, ok := addrs.DiagnosticOriginatesFromCheckRule(diag); ok && rule.Container.CheckableKind() == addrs.CheckableCheck {
continue
}
filteredDiags = filteredDiags.Append(diag)
}
diags = filteredDiags

// Fourth, execute apply stage.
tfCtx, ctxDiags = terraform.NewContext(tfCtxOpts)
diags = diags.Append(ctxDiags)
Expand Down
16 changes: 0 additions & 16 deletions internal/command/test_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -56,30 +56,18 @@ func TestTest(t *testing.T) {
"expect_failures_checks": {
expected: "1 passed, 0 failed.",
code: 0,
// TODO(liamcervante): Enable this when support for expect_failures
// has been added.
skip: true,
},
"expect_failures_inputs": {
expected: "1 passed, 0 failed.",
code: 0,
// TODO(liamcervante): Enable this when support for expect_failures
// has been added.
skip: true,
},
"expect_failures_outputs": {
expected: "1 passed, 0 failed.",
code: 0,
// TODO(liamcervante): Enable this when support for expect_failures
// has been added.
skip: true,
},
"expect_failures_resources": {
expected: "1 passed, 0 failed.",
code: 0,
// TODO(liamcervante): Enable this when support for expect_failures
// has been added.
skip: true,
},
"multiple_files": {
expected: "2 passed, 0 failed",
Expand Down Expand Up @@ -108,10 +96,6 @@ func TestTest(t *testing.T) {
"custom_condition_checks": {
expected: "0 passed, 1 failed.",
code: 1,
// TODO(liamcervante): Enable this, at the moment checks aren't
// causing the tests to fail when they should. Also, it's not
// skipping warnings during the plan when it should.
skip: true,
},
"custom_condition_inputs": {
expected: "0 passed, 1 failed.",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,6 @@ variables {
}

run "test" {

assert {
condition = test_resource.resource.value == "some value"
error_message = "since we used a postcondition, it should still have actually created the resource"
}

expect_failures = [
test_resource.resource
]
Expand Down
26 changes: 26 additions & 0 deletions internal/moduletest/run.go
Original file line number Diff line number Diff line change
Expand Up @@ -173,6 +173,32 @@ func (run *Run) ValidateExpectedFailures(originals tfdiags.Diagnostics) tfdiags.
// Otherwise, this isn't an expected failure so just fall out
// and add it into the returned set of diagnostics below.

case addrs.CheckableInputVariable:
addr := rule.Container.(addrs.AbsInputVariableInstance)
if !addr.Module.IsRoot() {
// failures can only be expected against checkable objects
// in the root module. This diagnostic will be added into
// returned set below.
break
}

if diag.Severity() == tfdiags.Warning {
// Warnings don't count as errors. This diagnostic will be
// added into the returned set below.
break
}
if expectedFailures.Has(addr.Variable) {
// Then this failure is expected! Mark the original map as
// having found a failure and swallow this error by
// continuing and not adding it into the returned set of
// diagnostics.
expectedFailures.Put(addr.Variable, true)
continue
}

// Otherwise, this isn't an expected failure so just fall out
// and add it into the returned set of diagnostics below.

case addrs.CheckableResource:
addr := rule.Container.(addrs.AbsResourceInstance)
if !addr.Module.IsRoot() {
Expand Down
106 changes: 106 additions & 0 deletions internal/moduletest/run_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -184,6 +184,112 @@ func TestRun_ValidateExpectedFailures(t *testing.T) {
},
},
},
"variables": {
ExpectedFailures: []string{
"var.expected_one",
"var.expected_two",
},
Input: createDiagnostics(func(diags tfdiags.Diagnostics) tfdiags.Diagnostics {

// First, let's create an input that failed that isn't
// expected. This should be unaffected by our function.
diags = diags.Append(
&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "unexpected failure",
Detail: "this should not be removed",
Extra: &addrs.CheckRuleDiagnosticExtra{
CheckRule: addrs.NewCheckRule(addrs.AbsInputVariableInstance{
Module: addrs.RootModuleInstance,
Variable: addrs.InputVariable{Name: "unexpected"},
}, addrs.InputValidation, 0),
},
})

// Second, let's create an input that failed but is expected.
// Our function should remove this from the set of diags.
diags = diags.Append(
&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "expected failure",
Detail: "this should be removed",
Extra: &addrs.CheckRuleDiagnosticExtra{
CheckRule: addrs.NewCheckRule(addrs.AbsInputVariableInstance{
Module: addrs.RootModuleInstance,
Variable: addrs.InputVariable{Name: "expected_one"},
}, addrs.InputValidation, 0),
},
})

diags = diags.Append(
&hcl.Diagnostic{
Severity: hcl.DiagWarning,
Summary: "expected warning",
Detail: "this should not be removed",
Extra: &addrs.CheckRuleDiagnosticExtra{
CheckRule: addrs.NewCheckRule(addrs.AbsInputVariableInstance{
Module: addrs.RootModuleInstance,
Variable: addrs.InputVariable{Name: "expected_one"},
}, addrs.InputValidation, 0),
},
})

// The error we are adding here is for expected_two but in a
// child module. We expect that this diagnostic shouldn't
// trigger our expected failure, and that an extra diagnostic
// should be created complaining that the output wasn't actually
// triggered.

diags = diags.Append(
&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "error in child module",
Detail: "this should not be removed",
Extra: &addrs.CheckRuleDiagnosticExtra{
CheckRule: addrs.NewCheckRule(addrs.AbsInputVariableInstance{
Module: []addrs.ModuleInstanceStep{
{
Name: "child_module",
},
},
Variable: addrs.InputVariable{Name: "expected_two"},
}, addrs.InputValidation, 0),
},
})

return diags
}),
Output: []output{
{
Description: tfdiags.Description{
Summary: "unexpected failure",
Detail: "this should not be removed",
},
Severity: tfdiags.Error,
},
{
Description: tfdiags.Description{
Summary: "expected warning",
Detail: "this should not be removed",
},
Severity: tfdiags.Warning,
},
{
Description: tfdiags.Description{
Summary: "error in child module",
Detail: "this should not be removed",
},
Severity: tfdiags.Error,
},
{
Description: tfdiags.Description{
Summary: "Missing expected failure",
Detail: "The checkable object, var.expected_two, was expected to report an error but did not.",
},
Severity: tfdiags.Error,
},
},
},
"resources": {
ExpectedFailures: []string{
"test_instance.single",
Expand Down
10 changes: 8 additions & 2 deletions internal/terraform/eval_variable.go
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@ func evalVariableValidations(addr addrs.AbsInputVariableInstance, config *config
}

for ix, validation := range config.Validations {
result, ruleDiags := evalVariableValidation(validation, hclCtx, addr, config, expr)
result, ruleDiags := evalVariableValidation(validation, hclCtx, addr, config, expr, ix)
diags = diags.Append(ruleDiags)

log.Printf("[TRACE] evalVariableValidations: %s status is now %s", addr, result.Status)
Expand All @@ -256,7 +256,7 @@ func evalVariableValidations(addr addrs.AbsInputVariableInstance, config *config
return diags
}

func evalVariableValidation(validation *configs.CheckRule, hclCtx *hcl.EvalContext, addr addrs.AbsInputVariableInstance, config *configs.Variable, expr hcl.Expression) (checkResult, tfdiags.Diagnostics) {
func evalVariableValidation(validation *configs.CheckRule, hclCtx *hcl.EvalContext, addr addrs.AbsInputVariableInstance, config *configs.Variable, expr hcl.Expression, ix int) (checkResult, tfdiags.Diagnostics) {
const errInvalidCondition = "Invalid variable validation result"
const errInvalidValue = "Invalid value for variable"
var diags tfdiags.Diagnostics
Expand Down Expand Up @@ -404,6 +404,9 @@ You can correct this by removing references to sensitive values, or by carefully
Subject: expr.Range().Ptr(),
Expression: validation.Condition,
EvalContext: hclCtx,
Extra: &addrs.CheckRuleDiagnosticExtra{
CheckRule: addr.CheckRule(addrs.InputValidation, ix),
},
})
} else {
// Since we don't have a source expression for a root module
Expand All @@ -416,6 +419,9 @@ You can correct this by removing references to sensitive values, or by carefully
Subject: config.DeclRange.Ptr(),
Expression: validation.Condition,
EvalContext: hclCtx,
Extra: &addrs.CheckRuleDiagnosticExtra{
CheckRule: addr.CheckRule(addrs.InputValidation, ix),
},
})
}

Expand Down

0 comments on commit 5e33bf6

Please sign in to comment.