From ccdc2728167323ca5932b8a96bf6662838363590 Mon Sep 17 00:00:00 2001 From: Modular Magician Date: Tue, 16 Aug 2022 18:28:46 +0000 Subject: [PATCH] promote cfn2 to ga (#6388) Signed-off-by: Modular Magician --- .changelog/6388.txt | 3 + google/cloudfunctions2_operation.go | 75 + google/config.go | 4 + google/iam_cloudfunctions2_function.go | 223 +++ ...cloudfunctions2_function_generated_test.go | 422 ++++ google/provider.go | 19 +- google/resource_cloudfunctions2_function.go | 1706 +++++++++++++++++ ...cloudfunctions2_function_generated_test.go | 511 +++++ ...e_cloudfunctions2_function_sweeper_test.go | 124 ++ .../r/cloudfunctions2_function.html.markdown | 46 +- ...cloudfunctions2_function_iam.html.markdown | 21 +- 11 files changed, 3098 insertions(+), 56 deletions(-) create mode 100644 .changelog/6388.txt create mode 100644 google/cloudfunctions2_operation.go create mode 100644 google/iam_cloudfunctions2_function.go create mode 100644 google/iam_cloudfunctions2_function_generated_test.go create mode 100644 google/resource_cloudfunctions2_function.go create mode 100644 google/resource_cloudfunctions2_function_generated_test.go create mode 100644 google/resource_cloudfunctions2_function_sweeper_test.go diff --git a/.changelog/6388.txt b/.changelog/6388.txt new file mode 100644 index 00000000000..334db8fa8b1 --- /dev/null +++ b/.changelog/6388.txt @@ -0,0 +1,3 @@ +```release-note:enhancement +cloudfunction2: promote to `google_cloudfunctions2_function` ga +``` diff --git a/google/cloudfunctions2_operation.go b/google/cloudfunctions2_operation.go new file mode 100644 index 00000000000..e50b32bd6b0 --- /dev/null +++ b/google/cloudfunctions2_operation.go @@ -0,0 +1,75 @@ +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** Type: MMv1 *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package google + +import ( + "encoding/json" + "fmt" + "time" +) + +type Cloudfunctions2OperationWaiter struct { + Config *Config + UserAgent string + Project string + CommonOperationWaiter +} + +func (w *Cloudfunctions2OperationWaiter) QueryOp() (interface{}, error) { + if w == nil { + return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") + } + // Returns the proper get. + url := fmt.Sprintf("%s%s", w.Config.Cloudfunctions2BasePath, w.CommonOperationWaiter.Op.Name) + + return sendRequest(w.Config, "GET", w.Project, url, w.UserAgent, nil) +} + +func createCloudfunctions2Waiter(config *Config, op map[string]interface{}, project, activity, userAgent string) (*Cloudfunctions2OperationWaiter, error) { + w := &Cloudfunctions2OperationWaiter{ + Config: config, + UserAgent: userAgent, + Project: project, + } + if err := w.CommonOperationWaiter.SetOp(op); err != nil { + return nil, err + } + return w, nil +} + +// nolint: deadcode,unused +func cloudfunctions2OperationWaitTimeWithResponse(config *Config, op map[string]interface{}, response *map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { + w, err := createCloudfunctions2Waiter(config, op, project, activity, userAgent) + if err != nil { + return err + } + if err := OperationWait(w, activity, timeout, config.PollInterval); err != nil { + return err + } + return json.Unmarshal([]byte(w.CommonOperationWaiter.Op.Response), response) +} + +func cloudfunctions2OperationWaitTime(config *Config, op map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { + if val, ok := op["name"]; !ok || val == "" { + // This was a synchronous call - there is no operation to wait for. + return nil + } + w, err := createCloudfunctions2Waiter(config, op, project, activity, userAgent) + if err != nil { + // If w is nil, the op was synchronous. + return err + } + return OperationWait(w, activity, timeout, config.PollInterval) +} diff --git a/google/config.go b/google/config.go index efb4aba449c..46b3cd7ba1a 100644 --- a/google/config.go +++ b/google/config.go @@ -178,6 +178,7 @@ type Config struct { CloudAssetBasePath string CloudBuildBasePath string CloudFunctionsBasePath string + Cloudfunctions2BasePath string CloudIdentityBasePath string CloudIotBasePath string CloudRunBasePath string @@ -271,6 +272,7 @@ const CertificateManagerBasePathKey = "CertificateManager" const CloudAssetBasePathKey = "CloudAsset" const CloudBuildBasePathKey = "CloudBuild" const CloudFunctionsBasePathKey = "CloudFunctions" +const Cloudfunctions2BasePathKey = "Cloudfunctions2" const CloudIdentityBasePathKey = "CloudIdentity" const CloudIotBasePathKey = "CloudIot" const CloudRunBasePathKey = "CloudRun" @@ -358,6 +360,7 @@ var DefaultBasePaths = map[string]string{ CloudAssetBasePathKey: "https://cloudasset.googleapis.com/v1/", CloudBuildBasePathKey: "https://cloudbuild.googleapis.com/v1/", CloudFunctionsBasePathKey: "https://cloudfunctions.googleapis.com/v1/", + Cloudfunctions2BasePathKey: "https://cloudfunctions.googleapis.com/v2/", CloudIdentityBasePathKey: "https://cloudidentity.googleapis.com/v1/", CloudIotBasePathKey: "https://cloudiot.googleapis.com/v1/", CloudRunBasePathKey: "https://{{location}}-run.googleapis.com/", @@ -1207,6 +1210,7 @@ func ConfigureBasePaths(c *Config) { c.CloudAssetBasePath = DefaultBasePaths[CloudAssetBasePathKey] c.CloudBuildBasePath = DefaultBasePaths[CloudBuildBasePathKey] c.CloudFunctionsBasePath = DefaultBasePaths[CloudFunctionsBasePathKey] + c.Cloudfunctions2BasePath = DefaultBasePaths[Cloudfunctions2BasePathKey] c.CloudIdentityBasePath = DefaultBasePaths[CloudIdentityBasePathKey] c.CloudIotBasePath = DefaultBasePaths[CloudIotBasePathKey] c.CloudRunBasePath = DefaultBasePaths[CloudRunBasePathKey] diff --git a/google/iam_cloudfunctions2_function.go b/google/iam_cloudfunctions2_function.go new file mode 100644 index 00000000000..652c57cc523 --- /dev/null +++ b/google/iam_cloudfunctions2_function.go @@ -0,0 +1,223 @@ +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** Type: MMv1 *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package google + +import ( + "fmt" + + "github.com/hashicorp/errwrap" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "google.golang.org/api/cloudresourcemanager/v1" +) + +var Cloudfunctions2functionIamSchema = map[string]*schema.Schema{ + "project": { + Type: schema.TypeString, + Computed: true, + Optional: true, + ForceNew: true, + }, + "location": { + Type: schema.TypeString, + Computed: true, + Optional: true, + ForceNew: true, + }, + "cloud_function": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + DiffSuppressFunc: compareSelfLinkOrResourceName, + }, +} + +type Cloudfunctions2functionIamUpdater struct { + project string + location string + cloudFunction string + d TerraformResourceData + Config *Config +} + +func Cloudfunctions2functionIamUpdaterProducer(d TerraformResourceData, config *Config) (ResourceIamUpdater, error) { + values := make(map[string]string) + + project, _ := getProject(d, config) + if project != "" { + if err := d.Set("project", project); err != nil { + return nil, fmt.Errorf("Error setting project: %s", err) + } + } + values["project"] = project + location, _ := getLocation(d, config) + if location != "" { + if err := d.Set("location", location); err != nil { + return nil, fmt.Errorf("Error setting location: %s", err) + } + } + values["location"] = location + if v, ok := d.GetOk("cloud_function"); ok { + values["cloud_function"] = v.(string) + } + + // We may have gotten either a long or short name, so attempt to parse long name if possible + m, err := getImportIdQualifiers([]string{"projects/(?P[^/]+)/locations/(?P[^/]+)/functions/(?P[^/]+)", "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", "(?P[^/]+)/(?P[^/]+)", "(?P[^/]+)"}, d, config, d.Get("cloud_function").(string)) + if err != nil { + return nil, err + } + + for k, v := range m { + values[k] = v + } + + u := &Cloudfunctions2functionIamUpdater{ + project: values["project"], + location: values["location"], + cloudFunction: values["cloud_function"], + d: d, + Config: config, + } + + if err := d.Set("project", u.project); err != nil { + return nil, fmt.Errorf("Error setting project: %s", err) + } + if err := d.Set("location", u.location); err != nil { + return nil, fmt.Errorf("Error setting location: %s", err) + } + if err := d.Set("cloud_function", u.GetResourceId()); err != nil { + return nil, fmt.Errorf("Error setting cloud_function: %s", err) + } + + return u, nil +} + +func Cloudfunctions2functionIdParseFunc(d *schema.ResourceData, config *Config) error { + values := make(map[string]string) + + project, _ := getProject(d, config) + if project != "" { + values["project"] = project + } + + location, _ := getLocation(d, config) + if location != "" { + values["location"] = location + } + + m, err := getImportIdQualifiers([]string{"projects/(?P[^/]+)/locations/(?P[^/]+)/functions/(?P[^/]+)", "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", "(?P[^/]+)/(?P[^/]+)", "(?P[^/]+)"}, d, config, d.Id()) + if err != nil { + return err + } + + for k, v := range m { + values[k] = v + } + + u := &Cloudfunctions2functionIamUpdater{ + project: values["project"], + location: values["location"], + cloudFunction: values["cloud_function"], + d: d, + Config: config, + } + if err := d.Set("cloud_function", u.GetResourceId()); err != nil { + return fmt.Errorf("Error setting cloud_function: %s", err) + } + d.SetId(u.GetResourceId()) + return nil +} + +func (u *Cloudfunctions2functionIamUpdater) GetResourceIamPolicy() (*cloudresourcemanager.Policy, error) { + url, err := u.qualifyfunctionUrl("getIamPolicy") + if err != nil { + return nil, err + } + + project, err := getProject(u.d, u.Config) + if err != nil { + return nil, err + } + var obj map[string]interface{} + + userAgent, err := generateUserAgentString(u.d, u.Config.userAgent) + if err != nil { + return nil, err + } + + policy, err := sendRequest(u.Config, "GET", project, url, userAgent, obj) + if err != nil { + return nil, errwrap.Wrapf(fmt.Sprintf("Error retrieving IAM policy for %s: {{err}}", u.DescribeResource()), err) + } + + out := &cloudresourcemanager.Policy{} + err = Convert(policy, out) + if err != nil { + return nil, errwrap.Wrapf("Cannot convert a policy to a resource manager policy: {{err}}", err) + } + + return out, nil +} + +func (u *Cloudfunctions2functionIamUpdater) SetResourceIamPolicy(policy *cloudresourcemanager.Policy) error { + json, err := ConvertToMap(policy) + if err != nil { + return err + } + + obj := make(map[string]interface{}) + obj["policy"] = json + + url, err := u.qualifyfunctionUrl("setIamPolicy") + if err != nil { + return err + } + project, err := getProject(u.d, u.Config) + if err != nil { + return err + } + + userAgent, err := generateUserAgentString(u.d, u.Config.userAgent) + if err != nil { + return err + } + + _, err = sendRequestWithTimeout(u.Config, "POST", project, url, userAgent, obj, u.d.Timeout(schema.TimeoutCreate)) + if err != nil { + return errwrap.Wrapf(fmt.Sprintf("Error setting IAM policy for %s: {{err}}", u.DescribeResource()), err) + } + + return nil +} + +func (u *Cloudfunctions2functionIamUpdater) qualifyfunctionUrl(methodIdentifier string) (string, error) { + urlTemplate := fmt.Sprintf("{{Cloudfunctions2BasePath}}%s:%s", fmt.Sprintf("projects/%s/locations/%s/functions/%s", u.project, u.location, u.cloudFunction), methodIdentifier) + url, err := replaceVars(u.d, u.Config, urlTemplate) + if err != nil { + return "", err + } + return url, nil +} + +func (u *Cloudfunctions2functionIamUpdater) GetResourceId() string { + return fmt.Sprintf("projects/%s/locations/%s/functions/%s", u.project, u.location, u.cloudFunction) +} + +func (u *Cloudfunctions2functionIamUpdater) GetMutexKey() string { + return fmt.Sprintf("iam-cloudfunctions2-function-%s", u.GetResourceId()) +} + +func (u *Cloudfunctions2functionIamUpdater) DescribeResource() string { + return fmt.Sprintf("cloudfunctions2 function %q", u.GetResourceId()) +} diff --git a/google/iam_cloudfunctions2_function_generated_test.go b/google/iam_cloudfunctions2_function_generated_test.go new file mode 100644 index 00000000000..b2753df9db5 --- /dev/null +++ b/google/iam_cloudfunctions2_function_generated_test.go @@ -0,0 +1,422 @@ +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** Type: MMv1 *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccCloudfunctions2functionIamBindingGenerated(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": randString(t, 10), + "role": "roles/viewer", + "project": getTestProjectFromEnv(), + + "zip_path": "./test-fixtures/cloudfunctions2/function-source.zip", + "location": "us-central1", + } + + vcrTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccCloudfunctions2functionIamBinding_basicGenerated(context), + }, + { + ResourceName: "google_cloudfunctions2_function_iam_binding.foo", + ImportStateId: fmt.Sprintf("projects/%s/locations/%s/functions/%s roles/viewer", getTestProjectFromEnv(), getTestRegionFromEnv(), fmt.Sprintf("tf-test-function-v2%s", context["random_suffix"])), + ImportState: true, + ImportStateVerify: true, + }, + { + // Test Iam Binding update + Config: testAccCloudfunctions2functionIamBinding_updateGenerated(context), + }, + { + ResourceName: "google_cloudfunctions2_function_iam_binding.foo", + ImportStateId: fmt.Sprintf("projects/%s/locations/%s/functions/%s roles/viewer", getTestProjectFromEnv(), getTestRegionFromEnv(), fmt.Sprintf("tf-test-function-v2%s", context["random_suffix"])), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccCloudfunctions2functionIamMemberGenerated(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": randString(t, 10), + "role": "roles/viewer", + "project": getTestProjectFromEnv(), + + "zip_path": "./test-fixtures/cloudfunctions2/function-source.zip", + "location": "us-central1", + } + + vcrTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + // Test Iam Member creation (no update for member, no need to test) + Config: testAccCloudfunctions2functionIamMember_basicGenerated(context), + }, + { + ResourceName: "google_cloudfunctions2_function_iam_member.foo", + ImportStateId: fmt.Sprintf("projects/%s/locations/%s/functions/%s roles/viewer user:admin@hashicorptest.com", getTestProjectFromEnv(), getTestRegionFromEnv(), fmt.Sprintf("tf-test-function-v2%s", context["random_suffix"])), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccCloudfunctions2functionIamPolicyGenerated(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": randString(t, 10), + "role": "roles/viewer", + "project": getTestProjectFromEnv(), + + "zip_path": "./test-fixtures/cloudfunctions2/function-source.zip", + "location": "us-central1", + } + + vcrTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccCloudfunctions2functionIamPolicy_basicGenerated(context), + }, + { + ResourceName: "google_cloudfunctions2_function_iam_policy.foo", + ImportStateId: fmt.Sprintf("projects/%s/locations/%s/functions/%s", getTestProjectFromEnv(), getTestRegionFromEnv(), fmt.Sprintf("tf-test-function-v2%s", context["random_suffix"])), + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccCloudfunctions2functionIamPolicy_emptyBinding(context), + }, + { + ResourceName: "google_cloudfunctions2_function_iam_policy.foo", + ImportStateId: fmt.Sprintf("projects/%s/locations/%s/functions/%s", getTestProjectFromEnv(), getTestRegionFromEnv(), fmt.Sprintf("tf-test-function-v2%s", context["random_suffix"])), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccCloudfunctions2functionIamMember_basicGenerated(context map[string]interface{}) string { + return Nprintf(` +# [START functions_v2_basic] +locals { + project = "%{project}" # Google Cloud Platform Project ID +} + +resource "google_storage_bucket" "bucket" { + name = "${local.project}-tf-test-gcf-source%{random_suffix}" # Every bucket name must be globally unique + location = "US" + uniform_bucket_level_access = true +} + +resource "google_storage_bucket_object" "object" { + name = "function-source.zip" + bucket = google_storage_bucket.bucket.name + source = "%{zip_path}" # Add path to the zipped function source code +} + +resource "google_cloudfunctions2_function" "function" { + name = "tf-test-function-v2%{random_suffix}" + location = "us-central1" + description = "a new function" + + build_config { + runtime = "nodejs16" + entry_point = "helloHttp" # Set the entry point + source { + storage_source { + bucket = google_storage_bucket.bucket.name + object = google_storage_bucket_object.object.name + } + } + } + + service_config { + max_instance_count = 1 + available_memory = "256M" + timeout_seconds = 60 + } +} + +output "function_uri" { + value = google_cloudfunctions2_function.function.service_config[0].uri +} +# [END functions_v2_basic] + +resource "google_cloudfunctions2_function_iam_member" "foo" { + project = google_cloudfunctions2_function.function.project + location = google_cloudfunctions2_function.function.location + cloud_function = google_cloudfunctions2_function.function.name + role = "%{role}" + member = "user:admin@hashicorptest.com" +} +`, context) +} + +func testAccCloudfunctions2functionIamPolicy_basicGenerated(context map[string]interface{}) string { + return Nprintf(` +# [START functions_v2_basic] +locals { + project = "%{project}" # Google Cloud Platform Project ID +} + +resource "google_storage_bucket" "bucket" { + name = "${local.project}-tf-test-gcf-source%{random_suffix}" # Every bucket name must be globally unique + location = "US" + uniform_bucket_level_access = true +} + +resource "google_storage_bucket_object" "object" { + name = "function-source.zip" + bucket = google_storage_bucket.bucket.name + source = "%{zip_path}" # Add path to the zipped function source code +} + +resource "google_cloudfunctions2_function" "function" { + name = "tf-test-function-v2%{random_suffix}" + location = "us-central1" + description = "a new function" + + build_config { + runtime = "nodejs16" + entry_point = "helloHttp" # Set the entry point + source { + storage_source { + bucket = google_storage_bucket.bucket.name + object = google_storage_bucket_object.object.name + } + } + } + + service_config { + max_instance_count = 1 + available_memory = "256M" + timeout_seconds = 60 + } +} + +output "function_uri" { + value = google_cloudfunctions2_function.function.service_config[0].uri +} +# [END functions_v2_basic] + +data "google_iam_policy" "foo" { + binding { + role = "%{role}" + members = ["user:admin@hashicorptest.com"] + } +} + +resource "google_cloudfunctions2_function_iam_policy" "foo" { + project = google_cloudfunctions2_function.function.project + location = google_cloudfunctions2_function.function.location + cloud_function = google_cloudfunctions2_function.function.name + policy_data = data.google_iam_policy.foo.policy_data +} +`, context) +} + +func testAccCloudfunctions2functionIamPolicy_emptyBinding(context map[string]interface{}) string { + return Nprintf(` +# [START functions_v2_basic] +locals { + project = "%{project}" # Google Cloud Platform Project ID +} + +resource "google_storage_bucket" "bucket" { + name = "${local.project}-tf-test-gcf-source%{random_suffix}" # Every bucket name must be globally unique + location = "US" + uniform_bucket_level_access = true +} + +resource "google_storage_bucket_object" "object" { + name = "function-source.zip" + bucket = google_storage_bucket.bucket.name + source = "%{zip_path}" # Add path to the zipped function source code +} + +resource "google_cloudfunctions2_function" "function" { + name = "tf-test-function-v2%{random_suffix}" + location = "us-central1" + description = "a new function" + + build_config { + runtime = "nodejs16" + entry_point = "helloHttp" # Set the entry point + source { + storage_source { + bucket = google_storage_bucket.bucket.name + object = google_storage_bucket_object.object.name + } + } + } + + service_config { + max_instance_count = 1 + available_memory = "256M" + timeout_seconds = 60 + } +} + +output "function_uri" { + value = google_cloudfunctions2_function.function.service_config[0].uri +} +# [END functions_v2_basic] + +data "google_iam_policy" "foo" { +} + +resource "google_cloudfunctions2_function_iam_policy" "foo" { + project = google_cloudfunctions2_function.function.project + location = google_cloudfunctions2_function.function.location + cloud_function = google_cloudfunctions2_function.function.name + policy_data = data.google_iam_policy.foo.policy_data +} +`, context) +} + +func testAccCloudfunctions2functionIamBinding_basicGenerated(context map[string]interface{}) string { + return Nprintf(` +# [START functions_v2_basic] +locals { + project = "%{project}" # Google Cloud Platform Project ID +} + +resource "google_storage_bucket" "bucket" { + name = "${local.project}-tf-test-gcf-source%{random_suffix}" # Every bucket name must be globally unique + location = "US" + uniform_bucket_level_access = true +} + +resource "google_storage_bucket_object" "object" { + name = "function-source.zip" + bucket = google_storage_bucket.bucket.name + source = "%{zip_path}" # Add path to the zipped function source code +} + +resource "google_cloudfunctions2_function" "function" { + name = "tf-test-function-v2%{random_suffix}" + location = "us-central1" + description = "a new function" + + build_config { + runtime = "nodejs16" + entry_point = "helloHttp" # Set the entry point + source { + storage_source { + bucket = google_storage_bucket.bucket.name + object = google_storage_bucket_object.object.name + } + } + } + + service_config { + max_instance_count = 1 + available_memory = "256M" + timeout_seconds = 60 + } +} + +output "function_uri" { + value = google_cloudfunctions2_function.function.service_config[0].uri +} +# [END functions_v2_basic] + +resource "google_cloudfunctions2_function_iam_binding" "foo" { + project = google_cloudfunctions2_function.function.project + location = google_cloudfunctions2_function.function.location + cloud_function = google_cloudfunctions2_function.function.name + role = "%{role}" + members = ["user:admin@hashicorptest.com"] +} +`, context) +} + +func testAccCloudfunctions2functionIamBinding_updateGenerated(context map[string]interface{}) string { + return Nprintf(` +# [START functions_v2_basic] +locals { + project = "%{project}" # Google Cloud Platform Project ID +} + +resource "google_storage_bucket" "bucket" { + name = "${local.project}-tf-test-gcf-source%{random_suffix}" # Every bucket name must be globally unique + location = "US" + uniform_bucket_level_access = true +} + +resource "google_storage_bucket_object" "object" { + name = "function-source.zip" + bucket = google_storage_bucket.bucket.name + source = "%{zip_path}" # Add path to the zipped function source code +} + +resource "google_cloudfunctions2_function" "function" { + name = "tf-test-function-v2%{random_suffix}" + location = "us-central1" + description = "a new function" + + build_config { + runtime = "nodejs16" + entry_point = "helloHttp" # Set the entry point + source { + storage_source { + bucket = google_storage_bucket.bucket.name + object = google_storage_bucket_object.object.name + } + } + } + + service_config { + max_instance_count = 1 + available_memory = "256M" + timeout_seconds = 60 + } +} + +output "function_uri" { + value = google_cloudfunctions2_function.function.service_config[0].uri +} +# [END functions_v2_basic] + +resource "google_cloudfunctions2_function_iam_binding" "foo" { + project = google_cloudfunctions2_function.function.project + location = google_cloudfunctions2_function.function.location + cloud_function = google_cloudfunctions2_function.function.name + role = "%{role}" + members = ["user:admin@hashicorptest.com", "user:gterraformtest1@gmail.com"] +} +`, context) +} diff --git a/google/provider.go b/google/provider.go index 7f0d358fd63..0d0edbd0c0b 100644 --- a/google/provider.go +++ b/google/provider.go @@ -285,6 +285,14 @@ func Provider() *schema.Provider { "GOOGLE_CLOUD_FUNCTIONS_CUSTOM_ENDPOINT", }, DefaultBasePaths[CloudFunctionsBasePathKey]), }, + "cloudfunctions2_custom_endpoint": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validateCustomEndpoint, + DefaultFunc: schema.MultiEnvDefaultFunc([]string{ + "GOOGLE_CLOUDFUNCTIONS2_CUSTOM_ENDPOINT", + }, DefaultBasePaths[Cloudfunctions2BasePathKey]), + }, "cloud_identity_custom_endpoint": { Type: schema.TypeString, Optional: true, @@ -872,9 +880,9 @@ func Provider() *schema.Provider { return provider } -// Generated resources: 228 -// Generated IAM resources: 135 -// Total generated resources: 363 +// Generated resources: 229 +// Generated IAM resources: 138 +// Total generated resources: 367 func ResourceMap() map[string]*schema.Resource { resourceMap, _ := ResourceMapWithErrors() return resourceMap @@ -951,6 +959,10 @@ func ResourceMapWithErrors() (map[string]*schema.Resource, error) { "google_cloudfunctions_function_iam_binding": ResourceIamBinding(CloudFunctionsCloudFunctionIamSchema, CloudFunctionsCloudFunctionIamUpdaterProducer, CloudFunctionsCloudFunctionIdParseFunc), "google_cloudfunctions_function_iam_member": ResourceIamMember(CloudFunctionsCloudFunctionIamSchema, CloudFunctionsCloudFunctionIamUpdaterProducer, CloudFunctionsCloudFunctionIdParseFunc), "google_cloudfunctions_function_iam_policy": ResourceIamPolicy(CloudFunctionsCloudFunctionIamSchema, CloudFunctionsCloudFunctionIamUpdaterProducer, CloudFunctionsCloudFunctionIdParseFunc), + "google_cloudfunctions2_function": resourceCloudfunctions2function(), + "google_cloudfunctions2_function_iam_binding": ResourceIamBinding(Cloudfunctions2functionIamSchema, Cloudfunctions2functionIamUpdaterProducer, Cloudfunctions2functionIdParseFunc), + "google_cloudfunctions2_function_iam_member": ResourceIamMember(Cloudfunctions2functionIamSchema, Cloudfunctions2functionIamUpdaterProducer, Cloudfunctions2functionIdParseFunc), + "google_cloudfunctions2_function_iam_policy": ResourceIamPolicy(Cloudfunctions2functionIamSchema, Cloudfunctions2functionIamUpdaterProducer, Cloudfunctions2functionIdParseFunc), "google_cloud_identity_group": resourceCloudIdentityGroup(), "google_cloud_identity_group_membership": resourceCloudIdentityGroupMembership(), "google_cloudiot_registry": resourceCloudIotDeviceRegistry(), @@ -1477,6 +1489,7 @@ func providerConfigure(ctx context.Context, d *schema.ResourceData, p *schema.Pr config.CloudAssetBasePath = d.Get("cloud_asset_custom_endpoint").(string) config.CloudBuildBasePath = d.Get("cloud_build_custom_endpoint").(string) config.CloudFunctionsBasePath = d.Get("cloud_functions_custom_endpoint").(string) + config.Cloudfunctions2BasePath = d.Get("cloudfunctions2_custom_endpoint").(string) config.CloudIdentityBasePath = d.Get("cloud_identity_custom_endpoint").(string) config.CloudIotBasePath = d.Get("cloud_iot_custom_endpoint").(string) config.CloudRunBasePath = d.Get("cloud_run_custom_endpoint").(string) diff --git a/google/resource_cloudfunctions2_function.go b/google/resource_cloudfunctions2_function.go new file mode 100644 index 00000000000..e632e631b10 --- /dev/null +++ b/google/resource_cloudfunctions2_function.go @@ -0,0 +1,1706 @@ +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** Type: MMv1 *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package google + +import ( + "fmt" + "log" + "reflect" + "strings" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +func resourceCloudfunctions2function() *schema.Resource { + return &schema.Resource{ + Create: resourceCloudfunctions2functionCreate, + Read: resourceCloudfunctions2functionRead, + Update: resourceCloudfunctions2functionUpdate, + Delete: resourceCloudfunctions2functionDelete, + + Importer: &schema.ResourceImporter{ + State: resourceCloudfunctions2functionImport, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(60 * time.Minute), + Update: schema.DefaultTimeout(60 * time.Minute), + Delete: schema.DefaultTimeout(60 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `A user-defined name of the function. Function names must +be unique globally and match pattern 'projects/*/locations/*/functions/*'.`, + }, + "build_config": { + Type: schema.TypeList, + Optional: true, + Description: `Describes the Build step of the function that builds a container +from the given source.`, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "docker_repository": { + Type: schema.TypeString, + Optional: true, + Description: `User managed repository created in Artifact Registry optionally with a customer managed encryption key.`, + }, + "entry_point": { + Type: schema.TypeString, + Optional: true, + Description: `The name of the function (as defined in source code) that will be executed. +Defaults to the resource name suffix, if not specified. For backward +compatibility, if function with given name is not found, then the system +will try to use function named "function". For Node.js this is name of a +function exported by the module specified in source_location.`, + }, + "environment_variables": { + Type: schema.TypeMap, + Computed: true, + Optional: true, + Description: `User-provided build-time environment variables for the function.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "runtime": { + Type: schema.TypeString, + Optional: true, + Description: `The runtime in which to run the function. Required when deploying a new +function, optional when updating an existing function.`, + }, + "source": { + Type: schema.TypeList, + Optional: true, + Description: `The location of the function source code.`, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "repo_source": { + Type: schema.TypeList, + Optional: true, + Description: `If provided, get the source from this location in a Cloud Source Repository.`, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "branch_name": { + Type: schema.TypeString, + Optional: true, + Description: `Regex matching branches to build.`, + ExactlyOneOf: []string{}, + }, + "commit_sha": { + Type: schema.TypeString, + Optional: true, + Description: `Regex matching tags to build.`, + ExactlyOneOf: []string{}, + }, + "dir": { + Type: schema.TypeString, + Optional: true, + Description: `Directory, relative to the source root, in which to run the build.`, + }, + "invert_regex": { + Type: schema.TypeBool, + Optional: true, + Description: `Only trigger a build if the revision regex does +NOT match the revision regex.`, + }, + "project_id": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: `ID of the project that owns the Cloud Source Repository. If omitted, the +project ID requesting the build is assumed.`, + }, + "repo_name": { + Type: schema.TypeString, + Optional: true, + Description: `Name of the Cloud Source Repository.`, + }, + "tag_name": { + Type: schema.TypeString, + Optional: true, + Description: `Regex matching tags to build.`, + ExactlyOneOf: []string{}, + }, + }, + }, + ExactlyOneOf: []string{}, + }, + "storage_source": { + Type: schema.TypeList, + Optional: true, + Description: `If provided, get the source from this location in Google Cloud Storage.`, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "bucket": { + Type: schema.TypeString, + Optional: true, + Description: `Google Cloud Storage bucket containing the source`, + }, + "generation": { + Type: schema.TypeInt, + Optional: true, + Description: `Google Cloud Storage generation for the object. If the generation +is omitted, the latest generation will be used.`, + }, + "object": { + Type: schema.TypeString, + Optional: true, + Description: `Google Cloud Storage object containing the source.`, + }, + }, + }, + ExactlyOneOf: []string{}, + }, + }, + }, + }, + "worker_pool": { + Type: schema.TypeString, + Optional: true, + Description: `Name of the Cloud Build Custom Worker Pool that should be used to build the function.`, + }, + "build": { + Type: schema.TypeString, + Computed: true, + Description: `The Cloud Build name of the latest successful +deployment of the function.`, + }, + }, + }, + }, + "description": { + Type: schema.TypeString, + Optional: true, + Description: `User-provided description of a function.`, + }, + "event_trigger": { + Type: schema.TypeList, + Optional: true, + Description: `An Eventarc trigger managed by Google Cloud Functions that fires events in +response to a condition in another service.`, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "event_filters": { + Type: schema.TypeSet, + Optional: true, + Description: `Criteria used to filter events.`, + Elem: cloudfunctions2functionEventTriggerEventFiltersSchema(), + // Default schema.HashSchema is used. + }, + "event_type": { + Type: schema.TypeString, + Optional: true, + Description: `Required. The type of event to observe.`, + }, + "pubsub_topic": { + Type: schema.TypeString, + Computed: true, + Optional: true, + Description: `The name of a Pub/Sub topic in the same project that will be used +as the transport topic for the event delivery.`, + }, + "retry_policy": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validateEnum([]string{"RETRY_POLICY_UNSPECIFIED", "RETRY_POLICY_DO_NOT_RETRY", "RETRY_POLICY_RETRY", ""}), + Description: `Describes the retry policy in case of function's execution failure. +Retried execution is charged as any other execution. Possible values: ["RETRY_POLICY_UNSPECIFIED", "RETRY_POLICY_DO_NOT_RETRY", "RETRY_POLICY_RETRY"]`, + }, + "service_account_email": { + Type: schema.TypeString, + Computed: true, + Optional: true, + Description: `The email of the service account for this function.`, + }, + "trigger_region": { + Type: schema.TypeString, + Optional: true, + Description: `The region that the trigger will be in. The trigger will only receive +events originating in this region. It can be the same +region as the function, a different region or multi-region, or the global +region. If not provided, defaults to the same region as the function.`, + }, + "trigger": { + Type: schema.TypeString, + Computed: true, + Description: `The resource name of the Eventarc trigger.`, + }, + }, + }, + }, + "labels": { + Type: schema.TypeMap, + Optional: true, + Description: `A set of key/value label pairs associated with this Cloud Function.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "location": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: `The location of this cloud function.`, + }, + "service_config": { + Type: schema.TypeList, + Optional: true, + Description: `Describes the Service being deployed.`, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "all_traffic_on_latest_revision": { + Type: schema.TypeBool, + Optional: true, + Description: `Whether 100% of traffic is routed to the latest revision. Defaults to true.`, + Default: true, + }, + "available_memory": { + Type: schema.TypeString, + Optional: true, + Description: `The amount of memory available for a function. +Defaults to 256M. Supported units are k, M, G, Mi, Gi. If no unit is +supplied the value is interpreted as bytes.`, + }, + "environment_variables": { + Type: schema.TypeMap, + Optional: true, + Description: `Environment variables that shall be available during function execution.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "ingress_settings": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validateEnum([]string{"ALLOW_ALL", "ALLOW_INTERNAL_ONLY", "ALLOW_INTERNAL_AND_GCLB", ""}), + Description: `Available ingress settings. Defaults to "ALLOW_ALL" if unspecified. Default value: "ALLOW_ALL" Possible values: ["ALLOW_ALL", "ALLOW_INTERNAL_ONLY", "ALLOW_INTERNAL_AND_GCLB"]`, + Default: "ALLOW_ALL", + }, + "max_instance_count": { + Type: schema.TypeInt, + Optional: true, + Description: `The limit on the maximum number of function instances that may coexist at a +given time.`, + }, + "min_instance_count": { + Type: schema.TypeInt, + Optional: true, + Description: `The limit on the minimum number of function instances that may coexist at a +given time.`, + }, + "service": { + Type: schema.TypeString, + Computed: true, + Optional: true, + Description: `Name of the service associated with a Function.`, + }, + "service_account_email": { + Type: schema.TypeString, + Computed: true, + Optional: true, + Description: `The email of the service account for this function.`, + }, + "timeout_seconds": { + Type: schema.TypeInt, + Optional: true, + Description: `The function execution timeout. Execution is considered failed and +can be terminated if the function is not completed at the end of the +timeout period. Defaults to 60 seconds.`, + }, + "vpc_connector": { + Type: schema.TypeString, + Optional: true, + Description: `The Serverless VPC Access connector that this cloud function can connect to.`, + }, + "vpc_connector_egress_settings": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validateEnum([]string{"VPC_CONNECTOR_EGRESS_SETTINGS_UNSPECIFIED", "PRIVATE_RANGES_ONLY", "ALL_TRAFFIC", ""}), + Description: `Available egress settings. Possible values: ["VPC_CONNECTOR_EGRESS_SETTINGS_UNSPECIFIED", "PRIVATE_RANGES_ONLY", "ALL_TRAFFIC"]`, + }, + "gcf_uri": { + Type: schema.TypeString, + Computed: true, + Description: `URIs of the Service deployed`, + }, + "uri": { + Type: schema.TypeString, + Computed: true, + Description: `URI of the Service deployed.`, + }, + }, + }, + }, + "environment": { + Type: schema.TypeString, + Computed: true, + Description: `The environment the function is hosted on.`, + }, + "state": { + Type: schema.TypeString, + Computed: true, + Description: `Describes the current state of the function.`, + }, + "update_time": { + Type: schema.TypeString, + Computed: true, + Description: `The last update timestamp of a Cloud Function.`, + }, + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + }, + }, + UseJSONNumber: true, + } +} + +func cloudfunctions2functionEventTriggerEventFiltersSchema() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "attribute": { + Type: schema.TypeString, + Required: true, + Description: `'Required. The name of a CloudEvents attribute. +Currently, only a subset of attributes are supported for filtering. Use the 'gcloud eventarc providers describe' command to learn more about events and their attributes. +Do not filter for the 'type' attribute here, as this is already achieved by the resource's 'event_type' attribute.`, + }, + "value": { + Type: schema.TypeString, + Required: true, + Description: `Required. The value for the attribute. +If the operator field is set as 'match-path-pattern', this value can be a path pattern instead of an exact value.`, + }, + "operator": { + Type: schema.TypeString, + Optional: true, + Description: `Optional. The operator used for matching the events with the value of +the filter. If not specified, only events that have an exact key-value +pair specified in the filter are matched. +The only allowed value is 'match-path-pattern'. +[See documentation on path patterns here](https://cloud.google.com/eventarc/docs/path-patterns)'`, + }, + }, + } +} + +func resourceCloudfunctions2functionCreate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + userAgent, err := generateUserAgentString(d, config.userAgent) + if err != nil { + return err + } + + obj := make(map[string]interface{}) + nameProp, err := expandCloudfunctions2functionName(d.Get("name"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("name"); !isEmptyValue(reflect.ValueOf(nameProp)) && (ok || !reflect.DeepEqual(v, nameProp)) { + obj["name"] = nameProp + } + descriptionProp, err := expandCloudfunctions2functionDescription(d.Get("description"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("description"); !isEmptyValue(reflect.ValueOf(descriptionProp)) && (ok || !reflect.DeepEqual(v, descriptionProp)) { + obj["description"] = descriptionProp + } + buildConfigProp, err := expandCloudfunctions2functionBuildConfig(d.Get("build_config"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("build_config"); !isEmptyValue(reflect.ValueOf(buildConfigProp)) && (ok || !reflect.DeepEqual(v, buildConfigProp)) { + obj["buildConfig"] = buildConfigProp + } + serviceConfigProp, err := expandCloudfunctions2functionServiceConfig(d.Get("service_config"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("service_config"); !isEmptyValue(reflect.ValueOf(serviceConfigProp)) && (ok || !reflect.DeepEqual(v, serviceConfigProp)) { + obj["serviceConfig"] = serviceConfigProp + } + eventTriggerProp, err := expandCloudfunctions2functionEventTrigger(d.Get("event_trigger"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("event_trigger"); !isEmptyValue(reflect.ValueOf(eventTriggerProp)) && (ok || !reflect.DeepEqual(v, eventTriggerProp)) { + obj["eventTrigger"] = eventTriggerProp + } + labelsProp, err := expandCloudfunctions2functionLabels(d.Get("labels"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("labels"); !isEmptyValue(reflect.ValueOf(labelsProp)) && (ok || !reflect.DeepEqual(v, labelsProp)) { + obj["labels"] = labelsProp + } + + url, err := replaceVars(d, config, "{{Cloudfunctions2BasePath}}projects/{{project}}/locations/{{location}}/functions?functionId={{name}}") + if err != nil { + return err + } + + log.Printf("[DEBUG] Creating new function: %#v", obj) + billingProject := "" + + project, err := getProject(d, config) + if err != nil { + return fmt.Errorf("Error fetching project for function: %s", err) + } + billingProject = project + + // err == nil indicates that the billing_project value was found + if bp, err := getBillingProject(d, config); err == nil { + billingProject = bp + } + + res, err := sendRequestWithTimeout(config, "POST", billingProject, url, userAgent, obj, d.Timeout(schema.TimeoutCreate)) + if err != nil { + return fmt.Errorf("Error creating function: %s", err) + } + + // Store the ID now + id, err := replaceVars(d, config, "projects/{{project}}/locations/{{location}}/functions/{{name}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + // Use the resource in the operation response to populate + // identity fields and d.Id() before read + var opRes map[string]interface{} + err = cloudfunctions2OperationWaitTimeWithResponse( + config, res, &opRes, project, "Creating function", userAgent, + d.Timeout(schema.TimeoutCreate)) + if err != nil { + // The resource didn't actually create + d.SetId("") + return fmt.Errorf("Error waiting to create function: %s", err) + } + + if err := d.Set("name", flattenCloudfunctions2functionName(opRes["name"], d, config)); err != nil { + return err + } + + // This may have caused the ID to update - update it if so. + id, err = replaceVars(d, config, "projects/{{project}}/locations/{{location}}/functions/{{name}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + log.Printf("[DEBUG] Finished creating function %q: %#v", d.Id(), res) + + return resourceCloudfunctions2functionRead(d, meta) +} + +func resourceCloudfunctions2functionRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + userAgent, err := generateUserAgentString(d, config.userAgent) + if err != nil { + return err + } + + url, err := replaceVars(d, config, "{{Cloudfunctions2BasePath}}projects/{{project}}/locations/{{location}}/functions/{{name}}") + if err != nil { + return err + } + + billingProject := "" + + project, err := getProject(d, config) + if err != nil { + return fmt.Errorf("Error fetching project for function: %s", err) + } + billingProject = project + + // err == nil indicates that the billing_project value was found + if bp, err := getBillingProject(d, config); err == nil { + billingProject = bp + } + + res, err := sendRequest(config, "GET", billingProject, url, userAgent, nil) + if err != nil { + return handleNotFoundError(err, d, fmt.Sprintf("Cloudfunctions2function %q", d.Id())) + } + + if err := d.Set("project", project); err != nil { + return fmt.Errorf("Error reading function: %s", err) + } + + if err := d.Set("name", flattenCloudfunctions2functionName(res["name"], d, config)); err != nil { + return fmt.Errorf("Error reading function: %s", err) + } + if err := d.Set("description", flattenCloudfunctions2functionDescription(res["description"], d, config)); err != nil { + return fmt.Errorf("Error reading function: %s", err) + } + if err := d.Set("environment", flattenCloudfunctions2functionEnvironment(res["environment"], d, config)); err != nil { + return fmt.Errorf("Error reading function: %s", err) + } + if err := d.Set("state", flattenCloudfunctions2functionState(res["state"], d, config)); err != nil { + return fmt.Errorf("Error reading function: %s", err) + } + if err := d.Set("build_config", flattenCloudfunctions2functionBuildConfig(res["buildConfig"], d, config)); err != nil { + return fmt.Errorf("Error reading function: %s", err) + } + if err := d.Set("service_config", flattenCloudfunctions2functionServiceConfig(res["serviceConfig"], d, config)); err != nil { + return fmt.Errorf("Error reading function: %s", err) + } + if err := d.Set("event_trigger", flattenCloudfunctions2functionEventTrigger(res["eventTrigger"], d, config)); err != nil { + return fmt.Errorf("Error reading function: %s", err) + } + if err := d.Set("update_time", flattenCloudfunctions2functionUpdateTime(res["updateTime"], d, config)); err != nil { + return fmt.Errorf("Error reading function: %s", err) + } + if err := d.Set("labels", flattenCloudfunctions2functionLabels(res["labels"], d, config)); err != nil { + return fmt.Errorf("Error reading function: %s", err) + } + + return nil +} + +func resourceCloudfunctions2functionUpdate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + userAgent, err := generateUserAgentString(d, config.userAgent) + if err != nil { + return err + } + + billingProject := "" + + project, err := getProject(d, config) + if err != nil { + return fmt.Errorf("Error fetching project for function: %s", err) + } + billingProject = project + + obj := make(map[string]interface{}) + descriptionProp, err := expandCloudfunctions2functionDescription(d.Get("description"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("description"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, descriptionProp)) { + obj["description"] = descriptionProp + } + buildConfigProp, err := expandCloudfunctions2functionBuildConfig(d.Get("build_config"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("build_config"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, buildConfigProp)) { + obj["buildConfig"] = buildConfigProp + } + serviceConfigProp, err := expandCloudfunctions2functionServiceConfig(d.Get("service_config"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("service_config"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, serviceConfigProp)) { + obj["serviceConfig"] = serviceConfigProp + } + eventTriggerProp, err := expandCloudfunctions2functionEventTrigger(d.Get("event_trigger"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("event_trigger"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, eventTriggerProp)) { + obj["eventTrigger"] = eventTriggerProp + } + labelsProp, err := expandCloudfunctions2functionLabels(d.Get("labels"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("labels"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, labelsProp)) { + obj["labels"] = labelsProp + } + + url, err := replaceVars(d, config, "{{Cloudfunctions2BasePath}}projects/{{project}}/locations/{{location}}/functions/{{name}}") + if err != nil { + return err + } + + log.Printf("[DEBUG] Updating function %q: %#v", d.Id(), obj) + updateMask := []string{} + + if d.HasChange("description") { + updateMask = append(updateMask, "description") + } + + if d.HasChange("build_config") { + updateMask = append(updateMask, "buildConfig") + } + + if d.HasChange("service_config") { + updateMask = append(updateMask, "serviceConfig") + } + + if d.HasChange("event_trigger") { + updateMask = append(updateMask, "eventTrigger") + } + + if d.HasChange("labels") { + updateMask = append(updateMask, "labels") + } + // updateMask is a URL parameter but not present in the schema, so replaceVars + // won't set it + url, err = addQueryParams(url, map[string]string{"updateMask": strings.Join(updateMask, ",")}) + if err != nil { + return err + } + + // err == nil indicates that the billing_project value was found + if bp, err := getBillingProject(d, config); err == nil { + billingProject = bp + } + + res, err := sendRequestWithTimeout(config, "PATCH", billingProject, url, userAgent, obj, d.Timeout(schema.TimeoutUpdate)) + + if err != nil { + return fmt.Errorf("Error updating function %q: %s", d.Id(), err) + } else { + log.Printf("[DEBUG] Finished updating function %q: %#v", d.Id(), res) + } + + err = cloudfunctions2OperationWaitTime( + config, res, project, "Updating function", userAgent, + d.Timeout(schema.TimeoutUpdate)) + + if err != nil { + return err + } + + return resourceCloudfunctions2functionRead(d, meta) +} + +func resourceCloudfunctions2functionDelete(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + userAgent, err := generateUserAgentString(d, config.userAgent) + if err != nil { + return err + } + + billingProject := "" + + project, err := getProject(d, config) + if err != nil { + return fmt.Errorf("Error fetching project for function: %s", err) + } + billingProject = project + + url, err := replaceVars(d, config, "{{Cloudfunctions2BasePath}}projects/{{project}}/locations/{{location}}/functions/{{name}}") + if err != nil { + return err + } + + var obj map[string]interface{} + log.Printf("[DEBUG] Deleting function %q", d.Id()) + + // err == nil indicates that the billing_project value was found + if bp, err := getBillingProject(d, config); err == nil { + billingProject = bp + } + + res, err := sendRequestWithTimeout(config, "DELETE", billingProject, url, userAgent, obj, d.Timeout(schema.TimeoutDelete)) + if err != nil { + return handleNotFoundError(err, d, "function") + } + + err = cloudfunctions2OperationWaitTime( + config, res, project, "Deleting function", userAgent, + d.Timeout(schema.TimeoutDelete)) + + if err != nil { + return err + } + + log.Printf("[DEBUG] Finished deleting function %q: %#v", d.Id(), res) + return nil +} + +func resourceCloudfunctions2functionImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { + config := meta.(*Config) + if err := parseImportId([]string{ + "projects/(?P[^/]+)/locations/(?P[^/]+)/functions/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)", + }, d, config); err != nil { + return nil, err + } + + // Replace import id for the resource id + id, err := replaceVars(d, config, "projects/{{project}}/locations/{{location}}/functions/{{name}}") + if err != nil { + return nil, fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + return []*schema.ResourceData{d}, nil +} + +func flattenCloudfunctions2functionName(v interface{}, d *schema.ResourceData, config *Config) interface{} { + if v == nil { + return v + } + return NameFromSelfLinkStateFunc(v) +} + +func flattenCloudfunctions2functionDescription(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionEnvironment(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionState(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionBuildConfig(v interface{}, d *schema.ResourceData, config *Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["build"] = + flattenCloudfunctions2functionBuildConfigBuild(original["build"], d, config) + transformed["runtime"] = + flattenCloudfunctions2functionBuildConfigRuntime(original["runtime"], d, config) + transformed["entry_point"] = + flattenCloudfunctions2functionBuildConfigEntryPoint(original["entryPoint"], d, config) + transformed["source"] = + flattenCloudfunctions2functionBuildConfigSource(original["source"], d, config) + transformed["worker_pool"] = + flattenCloudfunctions2functionBuildConfigWorkerPool(original["workerPool"], d, config) + transformed["environment_variables"] = + flattenCloudfunctions2functionBuildConfigEnvironmentVariables(original["environmentVariables"], d, config) + transformed["docker_repository"] = + flattenCloudfunctions2functionBuildConfigDockerRepository(original["dockerRepository"], d, config) + return []interface{}{transformed} +} +func flattenCloudfunctions2functionBuildConfigBuild(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionBuildConfigRuntime(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionBuildConfigEntryPoint(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionBuildConfigSource(v interface{}, d *schema.ResourceData, config *Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["storage_source"] = + flattenCloudfunctions2functionBuildConfigSourceStorageSource(original["storageSource"], d, config) + transformed["repo_source"] = + flattenCloudfunctions2functionBuildConfigSourceRepoSource(original["repoSource"], d, config) + return []interface{}{transformed} +} +func flattenCloudfunctions2functionBuildConfigSourceStorageSource(v interface{}, d *schema.ResourceData, config *Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["bucket"] = + flattenCloudfunctions2functionBuildConfigSourceStorageSourceBucket(original["bucket"], d, config) + transformed["object"] = + flattenCloudfunctions2functionBuildConfigSourceStorageSourceObject(original["object"], d, config) + transformed["generation"] = + flattenCloudfunctions2functionBuildConfigSourceStorageSourceGeneration(original["generation"], d, config) + return []interface{}{transformed} +} + +func flattenCloudfunctions2functionBuildConfigSourceStorageSourceBucket(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return d.Get("build_config.0.source.0.storage_source.0.bucket") +} + +func flattenCloudfunctions2functionBuildConfigSourceStorageSourceObject(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return d.Get("build_config.0.source.0.storage_source.0.object") +} + +func flattenCloudfunctions2functionBuildConfigSourceStorageSourceGeneration(v interface{}, d *schema.ResourceData, config *Config) interface{} { + // Handles the string fixed64 format + if strVal, ok := v.(string); ok { + if intVal, err := stringToFixed64(strVal); err == nil { + return intVal + } + } + + // number values are represented as float64 + if floatVal, ok := v.(float64); ok { + intVal := int(floatVal) + return intVal + } + + return v // let terraform core handle it otherwise +} + +func flattenCloudfunctions2functionBuildConfigSourceRepoSource(v interface{}, d *schema.ResourceData, config *Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["project_id"] = + flattenCloudfunctions2functionBuildConfigSourceRepoSourceProjectId(original["projectId"], d, config) + transformed["repo_name"] = + flattenCloudfunctions2functionBuildConfigSourceRepoSourceRepoName(original["repoName"], d, config) + transformed["branch_name"] = + flattenCloudfunctions2functionBuildConfigSourceRepoSourceBranchName(original["branchName"], d, config) + transformed["tag_name"] = + flattenCloudfunctions2functionBuildConfigSourceRepoSourceTagName(original["tagName"], d, config) + transformed["commit_sha"] = + flattenCloudfunctions2functionBuildConfigSourceRepoSourceCommitSha(original["commitSha"], d, config) + transformed["dir"] = + flattenCloudfunctions2functionBuildConfigSourceRepoSourceDir(original["dir"], d, config) + transformed["invert_regex"] = + flattenCloudfunctions2functionBuildConfigSourceRepoSourceInvertRegex(original["invertRegex"], d, config) + return []interface{}{transformed} +} +func flattenCloudfunctions2functionBuildConfigSourceRepoSourceProjectId(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionBuildConfigSourceRepoSourceRepoName(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionBuildConfigSourceRepoSourceBranchName(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionBuildConfigSourceRepoSourceTagName(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionBuildConfigSourceRepoSourceCommitSha(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionBuildConfigSourceRepoSourceDir(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionBuildConfigSourceRepoSourceInvertRegex(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionBuildConfigWorkerPool(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionBuildConfigEnvironmentVariables(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionBuildConfigDockerRepository(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionServiceConfig(v interface{}, d *schema.ResourceData, config *Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["service"] = + flattenCloudfunctions2functionServiceConfigService(original["service"], d, config) + transformed["timeout_seconds"] = + flattenCloudfunctions2functionServiceConfigTimeoutSeconds(original["timeoutSeconds"], d, config) + transformed["available_memory"] = + flattenCloudfunctions2functionServiceConfigAvailableMemory(original["availableMemory"], d, config) + transformed["environment_variables"] = + flattenCloudfunctions2functionServiceConfigEnvironmentVariables(original["environmentVariables"], d, config) + transformed["max_instance_count"] = + flattenCloudfunctions2functionServiceConfigMaxInstanceCount(original["maxInstanceCount"], d, config) + transformed["min_instance_count"] = + flattenCloudfunctions2functionServiceConfigMinInstanceCount(original["minInstanceCount"], d, config) + transformed["vpc_connector"] = + flattenCloudfunctions2functionServiceConfigVPCConnector(original["vpcConnector"], d, config) + transformed["vpc_connector_egress_settings"] = + flattenCloudfunctions2functionServiceConfigVPCConnectorEgressSettings(original["vpcConnectorEgressSettings"], d, config) + transformed["ingress_settings"] = + flattenCloudfunctions2functionServiceConfigIngressSettings(original["ingressSettings"], d, config) + transformed["uri"] = + flattenCloudfunctions2functionServiceConfigUri(original["uri"], d, config) + transformed["gcf_uri"] = + flattenCloudfunctions2functionServiceConfigGcfUri(original["gcfUri"], d, config) + transformed["service_account_email"] = + flattenCloudfunctions2functionServiceConfigServiceAccountEmail(original["serviceAccountEmail"], d, config) + transformed["all_traffic_on_latest_revision"] = + flattenCloudfunctions2functionServiceConfigAllTrafficOnLatestRevision(original["allTrafficOnLatestRevision"], d, config) + return []interface{}{transformed} +} +func flattenCloudfunctions2functionServiceConfigService(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionServiceConfigTimeoutSeconds(v interface{}, d *schema.ResourceData, config *Config) interface{} { + // Handles the string fixed64 format + if strVal, ok := v.(string); ok { + if intVal, err := stringToFixed64(strVal); err == nil { + return intVal + } + } + + // number values are represented as float64 + if floatVal, ok := v.(float64); ok { + intVal := int(floatVal) + return intVal + } + + return v // let terraform core handle it otherwise +} + +func flattenCloudfunctions2functionServiceConfigAvailableMemory(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionServiceConfigEnvironmentVariables(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionServiceConfigMaxInstanceCount(v interface{}, d *schema.ResourceData, config *Config) interface{} { + // Handles the string fixed64 format + if strVal, ok := v.(string); ok { + if intVal, err := stringToFixed64(strVal); err == nil { + return intVal + } + } + + // number values are represented as float64 + if floatVal, ok := v.(float64); ok { + intVal := int(floatVal) + return intVal + } + + return v // let terraform core handle it otherwise +} + +func flattenCloudfunctions2functionServiceConfigMinInstanceCount(v interface{}, d *schema.ResourceData, config *Config) interface{} { + // Handles the string fixed64 format + if strVal, ok := v.(string); ok { + if intVal, err := stringToFixed64(strVal); err == nil { + return intVal + } + } + + // number values are represented as float64 + if floatVal, ok := v.(float64); ok { + intVal := int(floatVal) + return intVal + } + + return v // let terraform core handle it otherwise +} + +func flattenCloudfunctions2functionServiceConfigVPCConnector(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionServiceConfigVPCConnectorEgressSettings(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionServiceConfigIngressSettings(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionServiceConfigUri(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionServiceConfigGcfUri(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionServiceConfigServiceAccountEmail(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionServiceConfigAllTrafficOnLatestRevision(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionEventTrigger(v interface{}, d *schema.ResourceData, config *Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["trigger"] = + flattenCloudfunctions2functionEventTriggerTrigger(original["trigger"], d, config) + transformed["trigger_region"] = + flattenCloudfunctions2functionEventTriggerTriggerRegion(original["triggerRegion"], d, config) + transformed["event_type"] = + flattenCloudfunctions2functionEventTriggerEventType(original["eventType"], d, config) + transformed["event_filters"] = + flattenCloudfunctions2functionEventTriggerEventFilters(original["eventFilters"], d, config) + transformed["pubsub_topic"] = + flattenCloudfunctions2functionEventTriggerPubsubTopic(original["pubsubTopic"], d, config) + transformed["service_account_email"] = + flattenCloudfunctions2functionEventTriggerServiceAccountEmail(original["serviceAccountEmail"], d, config) + transformed["retry_policy"] = + flattenCloudfunctions2functionEventTriggerRetryPolicy(original["retryPolicy"], d, config) + return []interface{}{transformed} +} +func flattenCloudfunctions2functionEventTriggerTrigger(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionEventTriggerTriggerRegion(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionEventTriggerEventType(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionEventTriggerEventFilters(v interface{}, d *schema.ResourceData, config *Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := schema.NewSet(schema.HashResource(cloudfunctions2functionEventTriggerEventFiltersSchema()), []interface{}{}) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed.Add(map[string]interface{}{ + "attribute": flattenCloudfunctions2functionEventTriggerEventFiltersAttribute(original["attribute"], d, config), + "value": flattenCloudfunctions2functionEventTriggerEventFiltersValue(original["value"], d, config), + "operator": flattenCloudfunctions2functionEventTriggerEventFiltersOperator(original["operator"], d, config), + }) + } + return transformed +} +func flattenCloudfunctions2functionEventTriggerEventFiltersAttribute(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionEventTriggerEventFiltersValue(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionEventTriggerEventFiltersOperator(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionEventTriggerPubsubTopic(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionEventTriggerServiceAccountEmail(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionEventTriggerRetryPolicy(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionUpdateTime(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenCloudfunctions2functionLabels(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func expandCloudfunctions2functionName(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return replaceVars(d, config, "projects/{{project}}/locations/{{location}}/functions/{{name}}") +} + +func expandCloudfunctions2functionDescription(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionBuildConfig(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + l := v.([]interface{}) + if len(l) == 0 || l[0] == nil { + return nil, nil + } + raw := l[0] + original := raw.(map[string]interface{}) + transformed := make(map[string]interface{}) + + transformedBuild, err := expandCloudfunctions2functionBuildConfigBuild(original["build"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedBuild); val.IsValid() && !isEmptyValue(val) { + transformed["build"] = transformedBuild + } + + transformedRuntime, err := expandCloudfunctions2functionBuildConfigRuntime(original["runtime"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedRuntime); val.IsValid() && !isEmptyValue(val) { + transformed["runtime"] = transformedRuntime + } + + transformedEntryPoint, err := expandCloudfunctions2functionBuildConfigEntryPoint(original["entry_point"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedEntryPoint); val.IsValid() && !isEmptyValue(val) { + transformed["entryPoint"] = transformedEntryPoint + } + + transformedSource, err := expandCloudfunctions2functionBuildConfigSource(original["source"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedSource); val.IsValid() && !isEmptyValue(val) { + transformed["source"] = transformedSource + } + + transformedWorkerPool, err := expandCloudfunctions2functionBuildConfigWorkerPool(original["worker_pool"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedWorkerPool); val.IsValid() && !isEmptyValue(val) { + transformed["workerPool"] = transformedWorkerPool + } + + transformedEnvironmentVariables, err := expandCloudfunctions2functionBuildConfigEnvironmentVariables(original["environment_variables"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedEnvironmentVariables); val.IsValid() && !isEmptyValue(val) { + transformed["environmentVariables"] = transformedEnvironmentVariables + } + + transformedDockerRepository, err := expandCloudfunctions2functionBuildConfigDockerRepository(original["docker_repository"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedDockerRepository); val.IsValid() && !isEmptyValue(val) { + transformed["dockerRepository"] = transformedDockerRepository + } + + return transformed, nil +} + +func expandCloudfunctions2functionBuildConfigBuild(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionBuildConfigRuntime(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionBuildConfigEntryPoint(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionBuildConfigSource(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + l := v.([]interface{}) + if len(l) == 0 || l[0] == nil { + return nil, nil + } + raw := l[0] + original := raw.(map[string]interface{}) + transformed := make(map[string]interface{}) + + transformedStorageSource, err := expandCloudfunctions2functionBuildConfigSourceStorageSource(original["storage_source"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedStorageSource); val.IsValid() && !isEmptyValue(val) { + transformed["storageSource"] = transformedStorageSource + } + + transformedRepoSource, err := expandCloudfunctions2functionBuildConfigSourceRepoSource(original["repo_source"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedRepoSource); val.IsValid() && !isEmptyValue(val) { + transformed["repoSource"] = transformedRepoSource + } + + return transformed, nil +} + +func expandCloudfunctions2functionBuildConfigSourceStorageSource(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + l := v.([]interface{}) + if len(l) == 0 || l[0] == nil { + return nil, nil + } + raw := l[0] + original := raw.(map[string]interface{}) + transformed := make(map[string]interface{}) + + transformedBucket, err := expandCloudfunctions2functionBuildConfigSourceStorageSourceBucket(original["bucket"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedBucket); val.IsValid() && !isEmptyValue(val) { + transformed["bucket"] = transformedBucket + } + + transformedObject, err := expandCloudfunctions2functionBuildConfigSourceStorageSourceObject(original["object"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedObject); val.IsValid() && !isEmptyValue(val) { + transformed["object"] = transformedObject + } + + transformedGeneration, err := expandCloudfunctions2functionBuildConfigSourceStorageSourceGeneration(original["generation"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedGeneration); val.IsValid() && !isEmptyValue(val) { + transformed["generation"] = transformedGeneration + } + + return transformed, nil +} + +func expandCloudfunctions2functionBuildConfigSourceStorageSourceBucket(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionBuildConfigSourceStorageSourceObject(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionBuildConfigSourceStorageSourceGeneration(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionBuildConfigSourceRepoSource(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + l := v.([]interface{}) + if len(l) == 0 || l[0] == nil { + return nil, nil + } + raw := l[0] + original := raw.(map[string]interface{}) + transformed := make(map[string]interface{}) + + transformedProjectId, err := expandCloudfunctions2functionBuildConfigSourceRepoSourceProjectId(original["project_id"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedProjectId); val.IsValid() && !isEmptyValue(val) { + transformed["projectId"] = transformedProjectId + } + + transformedRepoName, err := expandCloudfunctions2functionBuildConfigSourceRepoSourceRepoName(original["repo_name"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedRepoName); val.IsValid() && !isEmptyValue(val) { + transformed["repoName"] = transformedRepoName + } + + transformedBranchName, err := expandCloudfunctions2functionBuildConfigSourceRepoSourceBranchName(original["branch_name"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedBranchName); val.IsValid() && !isEmptyValue(val) { + transformed["branchName"] = transformedBranchName + } + + transformedTagName, err := expandCloudfunctions2functionBuildConfigSourceRepoSourceTagName(original["tag_name"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedTagName); val.IsValid() && !isEmptyValue(val) { + transformed["tagName"] = transformedTagName + } + + transformedCommitSha, err := expandCloudfunctions2functionBuildConfigSourceRepoSourceCommitSha(original["commit_sha"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedCommitSha); val.IsValid() && !isEmptyValue(val) { + transformed["commitSha"] = transformedCommitSha + } + + transformedDir, err := expandCloudfunctions2functionBuildConfigSourceRepoSourceDir(original["dir"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedDir); val.IsValid() && !isEmptyValue(val) { + transformed["dir"] = transformedDir + } + + transformedInvertRegex, err := expandCloudfunctions2functionBuildConfigSourceRepoSourceInvertRegex(original["invert_regex"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedInvertRegex); val.IsValid() && !isEmptyValue(val) { + transformed["invertRegex"] = transformedInvertRegex + } + + return transformed, nil +} + +func expandCloudfunctions2functionBuildConfigSourceRepoSourceProjectId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionBuildConfigSourceRepoSourceRepoName(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionBuildConfigSourceRepoSourceBranchName(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionBuildConfigSourceRepoSourceTagName(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionBuildConfigSourceRepoSourceCommitSha(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionBuildConfigSourceRepoSourceDir(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionBuildConfigSourceRepoSourceInvertRegex(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionBuildConfigWorkerPool(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionBuildConfigEnvironmentVariables(v interface{}, d TerraformResourceData, config *Config) (map[string]string, error) { + if v == nil { + return map[string]string{}, nil + } + m := make(map[string]string) + for k, val := range v.(map[string]interface{}) { + m[k] = val.(string) + } + return m, nil +} + +func expandCloudfunctions2functionBuildConfigDockerRepository(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionServiceConfig(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + l := v.([]interface{}) + if len(l) == 0 || l[0] == nil { + return nil, nil + } + raw := l[0] + original := raw.(map[string]interface{}) + transformed := make(map[string]interface{}) + + transformedService, err := expandCloudfunctions2functionServiceConfigService(original["service"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedService); val.IsValid() && !isEmptyValue(val) { + transformed["service"] = transformedService + } + + transformedTimeoutSeconds, err := expandCloudfunctions2functionServiceConfigTimeoutSeconds(original["timeout_seconds"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedTimeoutSeconds); val.IsValid() && !isEmptyValue(val) { + transformed["timeoutSeconds"] = transformedTimeoutSeconds + } + + transformedAvailableMemory, err := expandCloudfunctions2functionServiceConfigAvailableMemory(original["available_memory"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedAvailableMemory); val.IsValid() && !isEmptyValue(val) { + transformed["availableMemory"] = transformedAvailableMemory + } + + transformedEnvironmentVariables, err := expandCloudfunctions2functionServiceConfigEnvironmentVariables(original["environment_variables"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedEnvironmentVariables); val.IsValid() && !isEmptyValue(val) { + transformed["environmentVariables"] = transformedEnvironmentVariables + } + + transformedMaxInstanceCount, err := expandCloudfunctions2functionServiceConfigMaxInstanceCount(original["max_instance_count"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedMaxInstanceCount); val.IsValid() && !isEmptyValue(val) { + transformed["maxInstanceCount"] = transformedMaxInstanceCount + } + + transformedMinInstanceCount, err := expandCloudfunctions2functionServiceConfigMinInstanceCount(original["min_instance_count"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedMinInstanceCount); val.IsValid() && !isEmptyValue(val) { + transformed["minInstanceCount"] = transformedMinInstanceCount + } + + transformedVPCConnector, err := expandCloudfunctions2functionServiceConfigVPCConnector(original["vpc_connector"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedVPCConnector); val.IsValid() && !isEmptyValue(val) { + transformed["vpcConnector"] = transformedVPCConnector + } + + transformedVPCConnectorEgressSettings, err := expandCloudfunctions2functionServiceConfigVPCConnectorEgressSettings(original["vpc_connector_egress_settings"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedVPCConnectorEgressSettings); val.IsValid() && !isEmptyValue(val) { + transformed["vpcConnectorEgressSettings"] = transformedVPCConnectorEgressSettings + } + + transformedIngressSettings, err := expandCloudfunctions2functionServiceConfigIngressSettings(original["ingress_settings"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedIngressSettings); val.IsValid() && !isEmptyValue(val) { + transformed["ingressSettings"] = transformedIngressSettings + } + + transformedUri, err := expandCloudfunctions2functionServiceConfigUri(original["uri"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedUri); val.IsValid() && !isEmptyValue(val) { + transformed["uri"] = transformedUri + } + + transformedGcfUri, err := expandCloudfunctions2functionServiceConfigGcfUri(original["gcf_uri"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedGcfUri); val.IsValid() && !isEmptyValue(val) { + transformed["gcfUri"] = transformedGcfUri + } + + transformedServiceAccountEmail, err := expandCloudfunctions2functionServiceConfigServiceAccountEmail(original["service_account_email"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedServiceAccountEmail); val.IsValid() && !isEmptyValue(val) { + transformed["serviceAccountEmail"] = transformedServiceAccountEmail + } + + transformedAllTrafficOnLatestRevision, err := expandCloudfunctions2functionServiceConfigAllTrafficOnLatestRevision(original["all_traffic_on_latest_revision"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedAllTrafficOnLatestRevision); val.IsValid() && !isEmptyValue(val) { + transformed["allTrafficOnLatestRevision"] = transformedAllTrafficOnLatestRevision + } + + return transformed, nil +} + +func expandCloudfunctions2functionServiceConfigService(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionServiceConfigTimeoutSeconds(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionServiceConfigAvailableMemory(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionServiceConfigEnvironmentVariables(v interface{}, d TerraformResourceData, config *Config) (map[string]string, error) { + if v == nil { + return map[string]string{}, nil + } + m := make(map[string]string) + for k, val := range v.(map[string]interface{}) { + m[k] = val.(string) + } + return m, nil +} + +func expandCloudfunctions2functionServiceConfigMaxInstanceCount(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionServiceConfigMinInstanceCount(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionServiceConfigVPCConnector(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionServiceConfigVPCConnectorEgressSettings(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionServiceConfigIngressSettings(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionServiceConfigUri(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionServiceConfigGcfUri(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionServiceConfigServiceAccountEmail(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionServiceConfigAllTrafficOnLatestRevision(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionEventTrigger(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + l := v.([]interface{}) + if len(l) == 0 || l[0] == nil { + return nil, nil + } + raw := l[0] + original := raw.(map[string]interface{}) + transformed := make(map[string]interface{}) + + transformedTrigger, err := expandCloudfunctions2functionEventTriggerTrigger(original["trigger"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedTrigger); val.IsValid() && !isEmptyValue(val) { + transformed["trigger"] = transformedTrigger + } + + transformedTriggerRegion, err := expandCloudfunctions2functionEventTriggerTriggerRegion(original["trigger_region"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedTriggerRegion); val.IsValid() && !isEmptyValue(val) { + transformed["triggerRegion"] = transformedTriggerRegion + } + + transformedEventType, err := expandCloudfunctions2functionEventTriggerEventType(original["event_type"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedEventType); val.IsValid() && !isEmptyValue(val) { + transformed["eventType"] = transformedEventType + } + + transformedEventFilters, err := expandCloudfunctions2functionEventTriggerEventFilters(original["event_filters"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedEventFilters); val.IsValid() && !isEmptyValue(val) { + transformed["eventFilters"] = transformedEventFilters + } + + transformedPubsubTopic, err := expandCloudfunctions2functionEventTriggerPubsubTopic(original["pubsub_topic"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedPubsubTopic); val.IsValid() && !isEmptyValue(val) { + transformed["pubsubTopic"] = transformedPubsubTopic + } + + transformedServiceAccountEmail, err := expandCloudfunctions2functionEventTriggerServiceAccountEmail(original["service_account_email"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedServiceAccountEmail); val.IsValid() && !isEmptyValue(val) { + transformed["serviceAccountEmail"] = transformedServiceAccountEmail + } + + transformedRetryPolicy, err := expandCloudfunctions2functionEventTriggerRetryPolicy(original["retry_policy"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedRetryPolicy); val.IsValid() && !isEmptyValue(val) { + transformed["retryPolicy"] = transformedRetryPolicy + } + + return transformed, nil +} + +func expandCloudfunctions2functionEventTriggerTrigger(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionEventTriggerTriggerRegion(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionEventTriggerEventType(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionEventTriggerEventFilters(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + v = v.(*schema.Set).List() + l := v.([]interface{}) + req := make([]interface{}, 0, len(l)) + for _, raw := range l { + if raw == nil { + continue + } + original := raw.(map[string]interface{}) + transformed := make(map[string]interface{}) + + transformedAttribute, err := expandCloudfunctions2functionEventTriggerEventFiltersAttribute(original["attribute"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedAttribute); val.IsValid() && !isEmptyValue(val) { + transformed["attribute"] = transformedAttribute + } + + transformedValue, err := expandCloudfunctions2functionEventTriggerEventFiltersValue(original["value"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedValue); val.IsValid() && !isEmptyValue(val) { + transformed["value"] = transformedValue + } + + transformedOperator, err := expandCloudfunctions2functionEventTriggerEventFiltersOperator(original["operator"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedOperator); val.IsValid() && !isEmptyValue(val) { + transformed["operator"] = transformedOperator + } + + req = append(req, transformed) + } + return req, nil +} + +func expandCloudfunctions2functionEventTriggerEventFiltersAttribute(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionEventTriggerEventFiltersValue(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionEventTriggerEventFiltersOperator(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionEventTriggerPubsubTopic(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionEventTriggerServiceAccountEmail(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionEventTriggerRetryPolicy(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandCloudfunctions2functionLabels(v interface{}, d TerraformResourceData, config *Config) (map[string]string, error) { + if v == nil { + return map[string]string{}, nil + } + m := make(map[string]string) + for k, val := range v.(map[string]interface{}) { + m[k] = val.(string) + } + return m, nil +} diff --git a/google/resource_cloudfunctions2_function_generated_test.go b/google/resource_cloudfunctions2_function_generated_test.go new file mode 100644 index 00000000000..dbbf0d303b0 --- /dev/null +++ b/google/resource_cloudfunctions2_function_generated_test.go @@ -0,0 +1,511 @@ +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** Type: MMv1 *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package google + +import ( + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" +) + +func TestAccCloudfunctions2function_cloudfunctions2BasicExample(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project": getTestProjectFromEnv(), + "zip_path": "./test-fixtures/cloudfunctions2/function-source.zip", + "location": "us-central1", + "random_suffix": randString(t, 10), + } + + vcrTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckCloudfunctions2functionDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccCloudfunctions2function_cloudfunctions2BasicExample(context), + }, + { + ResourceName: "google_cloudfunctions2_function.function", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "build_config.0.source.0.storage_source.0.object", "build_config.0.source.0.storage_source.0.bucket"}, + }, + }, + }) +} + +func testAccCloudfunctions2function_cloudfunctions2BasicExample(context map[string]interface{}) string { + return Nprintf(` +# [START functions_v2_basic] +locals { + project = "%{project}" # Google Cloud Platform Project ID +} + +resource "google_storage_bucket" "bucket" { + name = "${local.project}-tf-test-gcf-source%{random_suffix}" # Every bucket name must be globally unique + location = "US" + uniform_bucket_level_access = true +} + +resource "google_storage_bucket_object" "object" { + name = "function-source.zip" + bucket = google_storage_bucket.bucket.name + source = "%{zip_path}" # Add path to the zipped function source code +} + +resource "google_cloudfunctions2_function" "function" { + name = "tf-test-function-v2%{random_suffix}" + location = "us-central1" + description = "a new function" + + build_config { + runtime = "nodejs16" + entry_point = "helloHttp" # Set the entry point + source { + storage_source { + bucket = google_storage_bucket.bucket.name + object = google_storage_bucket_object.object.name + } + } + } + + service_config { + max_instance_count = 1 + available_memory = "256M" + timeout_seconds = 60 + } +} + +output "function_uri" { + value = google_cloudfunctions2_function.function.service_config[0].uri +} +# [END functions_v2_basic] +`, context) +} + +func TestAccCloudfunctions2function_cloudfunctions2FullExample(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project": getTestProjectFromEnv(), + "zip_path": "./test-fixtures/cloudfunctions2/function-source-pubsub.zip", + "primary_resource_id": "terraform-test", + "location": "us-central1", + "random_suffix": randString(t, 10), + } + + vcrTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckCloudfunctions2functionDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccCloudfunctions2function_cloudfunctions2FullExample(context), + }, + { + ResourceName: "google_cloudfunctions2_function.function", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "build_config.0.source.0.storage_source.0.object", "build_config.0.source.0.storage_source.0.bucket"}, + }, + }, + }) +} + +func testAccCloudfunctions2function_cloudfunctions2FullExample(context map[string]interface{}) string { + return Nprintf(` +# [START functions_v2_full] +locals { + project = "%{project}" # Google Cloud Platform Project ID +} + +resource "google_service_account" "account" { + account_id = "sa%{random_suffix}" + display_name = "Test Service Account" +} + +resource "google_pubsub_topic" "topic" { + name = "tf-test-functions2-topic%{random_suffix}" +} + +resource "google_storage_bucket" "bucket" { + name = "${local.project}-tf-test-gcf-source%{random_suffix}" # Every bucket name must be globally unique + location = "US" + uniform_bucket_level_access = true +} + +resource "google_storage_bucket_object" "object" { + name = "function-source.zip" + bucket = google_storage_bucket.bucket.name + source = "%{zip_path}" # Add path to the zipped function source code +} + +resource "google_cloudfunctions2_function" "function" { + name = "function%{random_suffix}" + location = "us-central1" + description = "a new function" + + build_config { + runtime = "nodejs16" + entry_point = "helloPubSub" # Set the entry point + environment_variables = { + BUILD_CONFIG_TEST = "build_test" + } + source { + storage_source { + bucket = google_storage_bucket.bucket.name + object = google_storage_bucket_object.object.name + } + } + } + + service_config { + max_instance_count = 3 + min_instance_count = 1 + available_memory = "256M" + timeout_seconds = 60 + environment_variables = { + SERVICE_CONFIG_TEST = "config_test" + } + ingress_settings = "ALLOW_INTERNAL_ONLY" + all_traffic_on_latest_revision = true + service_account_email = google_service_account.account.email + } + + event_trigger { + trigger_region = "us-central1" + event_type = "google.cloud.pubsub.topic.v1.messagePublished" + pubsub_topic = google_pubsub_topic.topic.id + retry_policy = "RETRY_POLICY_RETRY" + } +} +# [END functions_v2_full] +`, context) +} + +func TestAccCloudfunctions2function_cloudfunctions2BasicGcsExample(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project": getTestProjectFromEnv(), + "zip_path": "./test-fixtures/cloudfunctions2/function-source-eventarc-gcs.zip", + "primary_resource_id": "terraform-test", + "random_suffix": randString(t, 10), + } + + vcrTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckCloudfunctions2functionDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccCloudfunctions2function_cloudfunctions2BasicGcsExample(context), + }, + { + ResourceName: "google_cloudfunctions2_function.function", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "build_config.0.source.0.storage_source.0.object", "build_config.0.source.0.storage_source.0.bucket"}, + }, + }, + }) +} + +func testAccCloudfunctions2function_cloudfunctions2BasicGcsExample(context map[string]interface{}) string { + return Nprintf(` +# [START functions_v2_basic_gcs] + +resource "google_storage_bucket" "source-bucket" { + name = "tf-test-gcf-source-bucket%{random_suffix}" + location = "US" + uniform_bucket_level_access = true +} + +resource "google_storage_bucket_object" "object" { + name = "function-source.zip" + bucket = google_storage_bucket.source-bucket.name + source = "%{zip_path}" # Add path to the zipped function source code +} + +resource "google_storage_bucket" "trigger-bucket" { + name = "tf-test-gcf-trigger-bucket%{random_suffix}" + location = "us-central1" # The trigger must be in the same location as the bucket + uniform_bucket_level_access = true +} + +data "google_storage_project_service_account" "gcs_account" { +} + +# To use GCS CloudEvent triggers, the GCS service account requires the Pub/Sub Publisher(roles/pubsub.publisher) IAM role in the specified project. +# (See https://cloud.google.com/eventarc/docs/run/quickstart-storage#before-you-begin) +resource "google_project_iam_member" "gcs-pubsub-publishing" { + project = "%{project}" + role = "roles/pubsub.publisher" + member = "serviceAccount:${data.google_storage_project_service_account.gcs_account.email_address}" +} + +resource "google_service_account" "account" { + account_id = "sa%{random_suffix}" + display_name = "Test Service Account - used for both the cloud function and eventarc trigger in the test" +} + +# Permissions on the service account used by the function and Eventarc trigger +resource "google_project_iam_member" "invoking" { + project = "%{project}" + role = "roles/run.invoker" + member = "serviceAccount:${google_service_account.account.email}" +} + +resource "google_project_iam_member" "event-receiving" { + project = "%{project}" + role = "roles/eventarc.eventReceiver" + member = "serviceAccount:${google_service_account.account.email}" +} + +resource "google_project_iam_member" "artifactregistry-reader" { + project = "%{project}" + role = "roles/artifactregistry.reader" + member = "serviceAccount:${google_service_account.account.email}" +} + +resource "google_cloudfunctions2_function" "function" { + depends_on = [ + google_project_iam_member.event-receiving, + google_project_iam_member.artifactregistry-reader, + ] + name = "function%{random_suffix}" + location = "us-central1" + description = "a new function" + + build_config { + runtime = "nodejs12" + entry_point = "entryPoint" # Set the entry point in the code + environment_variables = { + BUILD_CONFIG_TEST = "build_test" + } + source { + storage_source { + bucket = google_storage_bucket.source-bucket.name + object = google_storage_bucket_object.object.name + } + } + } + + service_config { + max_instance_count = 3 + min_instance_count = 1 + available_memory = "256M" + timeout_seconds = 60 + environment_variables = { + SERVICE_CONFIG_TEST = "config_test" + } + ingress_settings = "ALLOW_INTERNAL_ONLY" + all_traffic_on_latest_revision = true + service_account_email = google_service_account.account.email + } + + event_trigger { + trigger_region = "us-central1" # The trigger must be in the same location as the bucket + event_type = "google.cloud.storage.object.v1.finalized" + retry_policy = "RETRY_POLICY_RETRY" + service_account_email = google_service_account.account.email + event_filters { + attribute = "bucket" + value = google_storage_bucket.trigger-bucket.name + } + } +} +# [END functions_v2_basic_gcs] +`, context) +} + +func TestAccCloudfunctions2function_cloudfunctions2BasicAuditlogsExample(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project": getTestProjectFromEnv(), + "zip_path": "./test-fixtures/cloudfunctions2/function-source-eventarc-gcs.zip", + "primary_resource_id": "terraform-test", + "random_suffix": randString(t, 10), + } + + vcrTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckCloudfunctions2functionDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccCloudfunctions2function_cloudfunctions2BasicAuditlogsExample(context), + }, + { + ResourceName: "google_cloudfunctions2_function.function", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "build_config.0.source.0.storage_source.0.object", "build_config.0.source.0.storage_source.0.bucket"}, + }, + }, + }) +} + +func testAccCloudfunctions2function_cloudfunctions2BasicAuditlogsExample(context map[string]interface{}) string { + return Nprintf(` +# [START functions_v2_basic_auditlogs] +# This example follows the examples shown in this Google Cloud Community blog post +# https://medium.com/google-cloud/applying-a-path-pattern-when-filtering-in-eventarc-f06b937b4c34 +# and the docs: +# https://cloud.google.com/eventarc/docs/path-patterns + +resource "google_storage_bucket" "source-bucket" { + name = "tf-test-gcf-source-bucket%{random_suffix}" + location = "US" + uniform_bucket_level_access = true +} + +resource "google_storage_bucket_object" "object" { + name = "function-source.zip" + bucket = google_storage_bucket.source-bucket.name + source = "%{zip_path}" # Add path to the zipped function source code +} + +resource "google_service_account" "account" { + account_id = "tf-test-gcf-sa%{random_suffix}" + display_name = "Test Service Account - used for both the cloud function and eventarc trigger in the test" +} + +# Note: The right way of listening for Cloud Storage events is to use a Cloud Storage trigger. +# Here we use Audit Logs to monitor the bucket so path patterns can be used in the example of +# google_cloudfunctions2_function below (Audit Log events have path pattern support) +resource "google_storage_bucket" "audit-log-bucket" { + name = "tf-test-gcf-auditlog-bucket%{random_suffix}" + location = "us-central1" # The trigger must be in the same location as the bucket + uniform_bucket_level_access = true +} + +# Permissions on the service account used by the function and Eventarc trigger +resource "google_project_iam_member" "invoking" { + project = "%{project}" + role = "roles/run.invoker" + member = "serviceAccount:${google_service_account.account.email}" +} + +resource "google_project_iam_member" "event-receiving" { + project = "%{project}" + role = "roles/eventarc.eventReceiver" + member = "serviceAccount:${google_service_account.account.email}" +} + +resource "google_project_iam_member" "artifactregistry-reader" { + project = "%{project}" + role = "roles/artifactregistry.reader" + member = "serviceAccount:${google_service_account.account.email}" +} + +resource "google_cloudfunctions2_function" "function" { + depends_on = [ + google_project_iam_member.event-receiving, + google_project_iam_member.artifactregistry-reader, + ] + name = "tf-test-gcf-function%{random_suffix}" + location = "us-central1" + description = "a new function" + + build_config { + runtime = "nodejs12" + entry_point = "entryPoint" # Set the entry point in the code + environment_variables = { + BUILD_CONFIG_TEST = "build_test" + } + source { + storage_source { + bucket = google_storage_bucket.source-bucket.name + object = google_storage_bucket_object.object.name + } + } + } + + service_config { + max_instance_count = 3 + min_instance_count = 1 + available_memory = "256M" + timeout_seconds = 60 + environment_variables = { + SERVICE_CONFIG_TEST = "config_test" + } + ingress_settings = "ALLOW_INTERNAL_ONLY" + all_traffic_on_latest_revision = true + service_account_email = google_service_account.account.email + } + + event_trigger { + trigger_region = "us-central1" # The trigger must be in the same location as the bucket + event_type = "google.cloud.audit.log.v1.written" + retry_policy = "RETRY_POLICY_RETRY" + service_account_email = google_service_account.account.email + event_filters { + attribute = "serviceName" + value = "storage.googleapis.com" + } + event_filters { + attribute = "methodName" + value = "storage.objects.create" + } + event_filters { + attribute = "resourceName" + value = "/projects/_/buckets/${google_storage_bucket.audit-log-bucket.name}/objects/*.txt" # Path pattern selects all .txt files in the bucket + operator = "match-path-pattern" # This allows path patterns to be used in the value field + } + } +} +# [END functions_v2_basic_auditlogs] +`, context) +} + +func testAccCheckCloudfunctions2functionDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "google_cloudfunctions2_function" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := googleProviderConfig(t) + + url, err := replaceVarsForTest(config, rs, "{{Cloudfunctions2BasePath}}projects/{{project}}/locations/{{location}}/functions/{{name}}") + if err != nil { + return err + } + + billingProject := "" + + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + _, err = sendRequest(config, "GET", billingProject, url, config.userAgent, nil) + if err == nil { + return fmt.Errorf("Cloudfunctions2function still exists at %s", url) + } + } + + return nil + } +} diff --git a/google/resource_cloudfunctions2_function_sweeper_test.go b/google/resource_cloudfunctions2_function_sweeper_test.go new file mode 100644 index 00000000000..f9e6d8addff --- /dev/null +++ b/google/resource_cloudfunctions2_function_sweeper_test.go @@ -0,0 +1,124 @@ +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** Type: MMv1 *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package google + +import ( + "context" + "log" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func init() { + resource.AddTestSweepers("Cloudfunctions2function", &resource.Sweeper{ + Name: "Cloudfunctions2function", + F: testSweepCloudfunctions2function, + }) +} + +// At the time of writing, the CI only passes us-central1 as the region +func testSweepCloudfunctions2function(region string) error { + resourceName := "Cloudfunctions2function" + log.Printf("[INFO][SWEEPER_LOG] Starting sweeper for %s", resourceName) + + config, err := sharedConfigForRegion(region) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error getting shared config for region: %s", err) + return err + } + + err = config.LoadAndValidate(context.Background()) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error loading: %s", err) + return err + } + + t := &testing.T{} + billingId := getTestBillingAccountFromEnv(t) + + // Setup variables to replace in list template + d := &ResourceDataMock{ + FieldsInSchema: map[string]interface{}{ + "project": config.Project, + "region": region, + "location": region, + "zone": "-", + "billing_account": billingId, + }, + } + + listTemplate := strings.Split("https://cloudfunctions.googleapis.com/v2/projects/{{project}}/locations/{{location}}/functions", "?")[0] + listUrl, err := replaceVars(d, config, listTemplate) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error preparing sweeper list url: %s", err) + return nil + } + + res, err := sendRequest(config, "GET", config.Project, listUrl, config.userAgent, nil) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] Error in response from request %s: %s", listUrl, err) + return nil + } + + resourceList, ok := res["functions"] + if !ok { + log.Printf("[INFO][SWEEPER_LOG] Nothing found in response.") + return nil + } + + rl := resourceList.([]interface{}) + + log.Printf("[INFO][SWEEPER_LOG] Found %d items in %s list response.", len(rl), resourceName) + // Keep count of items that aren't sweepable for logging. + nonPrefixCount := 0 + for _, ri := range rl { + obj := ri.(map[string]interface{}) + if obj["name"] == nil { + log.Printf("[INFO][SWEEPER_LOG] %s resource name was nil", resourceName) + return nil + } + + name := GetResourceNameFromSelfLink(obj["name"].(string)) + // Skip resources that shouldn't be sweeped + if !isSweepableTestResource(name) { + nonPrefixCount++ + continue + } + + deleteTemplate := "https://cloudfunctions.googleapis.com/v2/projects/{{project}}/locations/{{location}}/functions/{{name}}" + deleteUrl, err := replaceVars(d, config, deleteTemplate) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error preparing delete url: %s", err) + return nil + } + deleteUrl = deleteUrl + name + + // Don't wait on operations as we may have a lot to delete + _, err = sendRequest(config, "DELETE", config.Project, deleteUrl, config.userAgent, nil) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] Error deleting for url %s : %s", deleteUrl, err) + } else { + log.Printf("[INFO][SWEEPER_LOG] Sent delete request for %s resource: %s", resourceName, name) + } + } + + if nonPrefixCount > 0 { + log.Printf("[INFO][SWEEPER_LOG] %d items were non-sweepable and skipped.", nonPrefixCount) + } + + return nil +} diff --git a/website/docs/r/cloudfunctions2_function.html.markdown b/website/docs/r/cloudfunctions2_function.html.markdown index 6d2041dbdd6..dce370c37af 100644 --- a/website/docs/r/cloudfunctions2_function.html.markdown +++ b/website/docs/r/cloudfunctions2_function.html.markdown @@ -22,8 +22,6 @@ description: |- A Cloud Function that contains user computation executed in response to an event. -~> **Warning:** This resource is in beta, and should be used with the terraform-provider-google-beta provider. -See [Provider Versions](https://terraform.io/docs/providers/google/guides/provider_versions.html) for more details on beta resources. To get more information about function, see: @@ -38,27 +36,20 @@ locals { project = "my-project-name" # Google Cloud Platform Project ID } -provider "google-beta" { - project = local.project -} - resource "google_storage_bucket" "bucket" { - provider = google-beta name = "${local.project}-gcf-source" # Every bucket name must be globally unique location = "US" uniform_bucket_level_access = true } resource "google_storage_bucket_object" "object" { - provider = google-beta name = "function-source.zip" bucket = google_storage_bucket.bucket.name source = "function-source.zip" # Add path to the zipped function source code } resource "google_cloudfunctions2_function" "function" { - provider = google-beta - name = "test-function" + name = "function-v2" location = "us-central1" description = "a new function" @@ -94,38 +85,29 @@ locals { project = "my-project-name" # Google Cloud Platform Project ID } -provider "google-beta" { - project = local.project -} - resource "google_service_account" "account" { - provider = google-beta - account_id = "test-sa" + account_id = "sa" display_name = "Test Service Account" } resource "google_pubsub_topic" "topic" { - provider = google-beta name = "functions2-topic" } resource "google_storage_bucket" "bucket" { - provider = google-beta name = "${local.project}-gcf-source" # Every bucket name must be globally unique location = "US" uniform_bucket_level_access = true } resource "google_storage_bucket_object" "object" { - provider = google-beta name = "function-source.zip" bucket = google_storage_bucket.bucket.name source = "function-source.zip" # Add path to the zipped function source code } resource "google_cloudfunctions2_function" "function" { - provider = google-beta - name = "test-function" + name = "function" location = "us-central1" description = "a new function" @@ -172,74 +154,64 @@ resource "google_cloudfunctions2_function" "function" { # [START functions_v2_basic_gcs] resource "google_storage_bucket" "source-bucket" { - provider = google-beta name = "gcf-source-bucket" location = "US" uniform_bucket_level_access = true } resource "google_storage_bucket_object" "object" { - provider = google-beta name = "function-source.zip" bucket = google_storage_bucket.source-bucket.name source = "function-source.zip" # Add path to the zipped function source code } resource "google_storage_bucket" "trigger-bucket" { - provider = google-beta name = "gcf-trigger-bucket" location = "us-central1" # The trigger must be in the same location as the bucket uniform_bucket_level_access = true } data "google_storage_project_service_account" "gcs_account" { - provider = google-beta } # To use GCS CloudEvent triggers, the GCS service account requires the Pub/Sub Publisher(roles/pubsub.publisher) IAM role in the specified project. # (See https://cloud.google.com/eventarc/docs/run/quickstart-storage#before-you-begin) resource "google_project_iam_member" "gcs-pubsub-publishing" { - provider = google-beta project = "my-project-name" role = "roles/pubsub.publisher" member = "serviceAccount:${data.google_storage_project_service_account.gcs_account.email_address}" } resource "google_service_account" "account" { - provider = google-beta - account_id = "test-sa" + account_id = "sa" display_name = "Test Service Account - used for both the cloud function and eventarc trigger in the test" } # Permissions on the service account used by the function and Eventarc trigger resource "google_project_iam_member" "invoking" { - provider = google-beta project = "my-project-name" role = "roles/run.invoker" member = "serviceAccount:${google_service_account.account.email}" } resource "google_project_iam_member" "event-receiving" { - provider = google-beta project = "my-project-name" role = "roles/eventarc.eventReceiver" member = "serviceAccount:${google_service_account.account.email}" } resource "google_project_iam_member" "artifactregistry-reader" { - provider = google-beta project = "my-project-name" role = "roles/artifactregistry.reader" member = "serviceAccount:${google_service_account.account.email}" } resource "google_cloudfunctions2_function" "function" { - provider = google-beta depends_on = [ google_project_iam_member.event-receiving, google_project_iam_member.artifactregistry-reader, ] - name = "test-function" + name = "function" location = "us-central1" description = "a new function" @@ -294,21 +266,18 @@ resource "google_cloudfunctions2_function" "function" { # https://cloud.google.com/eventarc/docs/path-patterns resource "google_storage_bucket" "source-bucket" { - provider = google-beta name = "gcf-source-bucket" location = "US" uniform_bucket_level_access = true } resource "google_storage_bucket_object" "object" { - provider = google-beta name = "function-source.zip" bucket = google_storage_bucket.source-bucket.name source = "function-source.zip" # Add path to the zipped function source code } resource "google_service_account" "account" { - provider = google-beta account_id = "gcf-sa" display_name = "Test Service Account - used for both the cloud function and eventarc trigger in the test" } @@ -317,7 +286,6 @@ resource "google_service_account" "account" { # Here we use Audit Logs to monitor the bucket so path patterns can be used in the example of # google_cloudfunctions2_function below (Audit Log events have path pattern support) resource "google_storage_bucket" "audit-log-bucket" { - provider = google-beta name = "gcf-auditlog-bucket" location = "us-central1" # The trigger must be in the same location as the bucket uniform_bucket_level_access = true @@ -325,28 +293,24 @@ resource "google_storage_bucket" "audit-log-bucket" { # Permissions on the service account used by the function and Eventarc trigger resource "google_project_iam_member" "invoking" { - provider = google-beta project = "my-project-name" role = "roles/run.invoker" member = "serviceAccount:${google_service_account.account.email}" } resource "google_project_iam_member" "event-receiving" { - provider = google-beta project = "my-project-name" role = "roles/eventarc.eventReceiver" member = "serviceAccount:${google_service_account.account.email}" } resource "google_project_iam_member" "artifactregistry-reader" { - provider = google-beta project = "my-project-name" role = "roles/artifactregistry.reader" member = "serviceAccount:${google_service_account.account.email}" } resource "google_cloudfunctions2_function" "function" { - provider = google-beta depends_on = [ google_project_iam_member.event-receiving, google_project_iam_member.artifactregistry-reader, diff --git a/website/docs/r/cloudfunctions2_function_iam.html.markdown b/website/docs/r/cloudfunctions2_function_iam.html.markdown index 3e73aaadc0f..3d49d2624e0 100644 --- a/website/docs/r/cloudfunctions2_function_iam.html.markdown +++ b/website/docs/r/cloudfunctions2_function_iam.html.markdown @@ -30,15 +30,12 @@ Three different resources help you manage your IAM policy for Cloud Functions (2 ~> **Note:** `google_cloudfunctions2_function_iam_binding` resources **can be** used in conjunction with `google_cloudfunctions2_function_iam_member` resources **only if** they do not grant privilege to the same role. -~> **Warning:** This resource is in beta, and should be used with the terraform-provider-google-beta provider. -See [Provider Versions](https://terraform.io/docs/providers/google/guides/provider_versions.html) for more details on beta resources. ## google\_cloudfunctions2\_function\_iam\_policy ```hcl data "google_iam_policy" "admin" { - provider = google-beta binding { role = "roles/viewer" members = [ @@ -48,9 +45,9 @@ data "google_iam_policy" "admin" { } resource "google_cloudfunctions2_function_iam_policy" "policy" { - provider = google-beta - cloud_function = google_cloudfunctions2_function.%{primary_resource_id}.name - location = "%{location}" + project = google_cloudfunctions2_function.function.project + location = google_cloudfunctions2_function.function.location + cloud_function = google_cloudfunctions2_function.function.name policy_data = data.google_iam_policy.admin.policy_data } ``` @@ -59,9 +56,9 @@ resource "google_cloudfunctions2_function_iam_policy" "policy" { ```hcl resource "google_cloudfunctions2_function_iam_binding" "binding" { - provider = google-beta - cloud_function = google_cloudfunctions2_function.%{primary_resource_id}.name - location = "%{location}" + project = google_cloudfunctions2_function.function.project + location = google_cloudfunctions2_function.function.location + cloud_function = google_cloudfunctions2_function.function.name role = "roles/viewer" members = [ "user:jane@example.com", @@ -73,9 +70,9 @@ resource "google_cloudfunctions2_function_iam_binding" "binding" { ```hcl resource "google_cloudfunctions2_function_iam_member" "member" { - provider = google-beta - cloud_function = google_cloudfunctions2_function.%{primary_resource_id}.name - location = "%{location}" + project = google_cloudfunctions2_function.function.project + location = google_cloudfunctions2_function.function.location + cloud_function = google_cloudfunctions2_function.function.name role = "roles/viewer" member = "user:jane@example.com" }