-
Notifications
You must be signed in to change notification settings - Fork 1.7k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Add support for providing additional experiments to Dataflow job #6196
Changes from 2 commits
632fd49
9d8d610
6c22caf
31ad2d7
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -182,6 +182,30 @@ func TestAccDataflowJob_withIpConfig(t *testing.T) { | |
}) | ||
} | ||
|
||
func TestAccDataflowJobWithAdditionalExperiments(t *testing.T) { | ||
t.Parallel() | ||
|
||
randStr := randString(t, 10) | ||
bucket := "tf-test-dataflow-gcs-" + randStr | ||
job := "tf-test-dataflow-job-" + randStr | ||
additionalExperiments := []string{"enable_stackdriver_agent_metrics"} | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Want to add a few more values to this list just for extra confidence? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I've added |
||
|
||
vcrTest(t, resource.TestCase{ | ||
PreCheck: func() { testAccPreCheck(t) }, | ||
Providers: testAccProviders, | ||
CheckDestroy: testAccCheckDataflowJobDestroyProducer(t), | ||
Steps: []resource.TestStep{ | ||
{ | ||
Config: testAccDataflowJob_additionalExperiments(bucket, job, additionalExperiments), | ||
Check: resource.ComposeTestCheckFunc( | ||
testAccDataflowJobExists(t, "google_dataflow_job.with_additional_experiments"), | ||
testAccDataflowJobHasExperiments(t, "google_dataflow_job.with_additional_experiments", additionalExperiments), | ||
), | ||
}, | ||
}, | ||
}) | ||
} | ||
|
||
func testAccCheckDataflowJobDestroyProducer(t *testing.T) func(s *terraform.State) error { | ||
return func(s *terraform.State) error { | ||
for _, rs := range s.RootModule().Resources { | ||
|
@@ -384,6 +408,39 @@ func testAccDataflowJobHasLabels(t *testing.T, res, key string) resource.TestChe | |
} | ||
} | ||
|
||
func testAccDataflowJobHasExperiments(t *testing.T, res string, experiments []string) resource.TestCheckFunc { | ||
return func(s *terraform.State) error { | ||
rs, ok := s.RootModule().Resources[res] | ||
if !ok { | ||
return fmt.Errorf("resource %q not found in state", res) | ||
} | ||
|
||
if rs.Primary.ID == "" { | ||
return fmt.Errorf("No ID is set") | ||
} | ||
config := googleProviderConfig(t) | ||
|
||
job, err := config.clientDataflow.Projects.Jobs.Get(config.Project, rs.Primary.ID).View("JOB_VIEW_ALL").Do() | ||
if err != nil { | ||
return fmt.Errorf("dataflow job does not exist") | ||
} | ||
|
||
for _, expectedExperiment := range experiments { | ||
var contains = false | ||
for _, actualExperiment := range job.Environment.Experiments { | ||
if actualExperiment == expectedExperiment { | ||
contains = true | ||
} | ||
} | ||
if contains != true { | ||
return fmt.Errorf("Expected experiment '%s' not found in experiments", expectedExperiment) | ||
} | ||
} | ||
|
||
return nil | ||
} | ||
} | ||
|
||
func testAccDataflowJob_zone(bucket, job, zone string) string { | ||
return fmt.Sprintf(` | ||
resource "google_storage_bucket" "temp" { | ||
|
@@ -583,3 +640,27 @@ resource "google_dataflow_job" "with_labels" { | |
`, bucket, job, labelKey, labelVal, testDataflowJobTemplateWordCountUrl, testDataflowJobSampleFileUrl) | ||
|
||
} | ||
|
||
func testAccDataflowJob_additionalExperiments(bucket string, job string, experiments []string) string { | ||
return fmt.Sprintf(` | ||
resource "google_storage_bucket" "temp" { | ||
name = "%s" | ||
force_destroy = true | ||
} | ||
|
||
resource "google_dataflow_job" "with_additional_experiments" { | ||
name = "%s" | ||
|
||
additional_experiments = ["%s"] | ||
|
||
template_gcs_path = "%s" | ||
temp_gcs_location = google_storage_bucket.temp.url | ||
parameters = { | ||
inputFile = "%s" | ||
output = "${google_storage_bucket.temp.url}/output" | ||
} | ||
on_delete = "cancel" | ||
} | ||
`, bucket, job, strings.Join(experiments, ","), testDataflowJobTemplateWordCountUrl, testDataflowJobSampleFileUrl) | ||
|
||
} |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Is the order here meaningful? If not, I'd recommend using a TypeSet instead.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Good idea! I've updated it to
TypeSet
. Thanks!