From 82898ab79039e3ee9a52595059a013d6e9ff7310 Mon Sep 17 00:00:00 2001 From: The Magician Date: Wed, 20 Jul 2022 16:53:32 -0700 Subject: [PATCH] Add Dataplex Zone resource (#6075) (#12146) * Add Dataplex Zone resource * Fix zone name * Update DCL to 1.10.2 * run make upgrade-dcl * Update DCL to v1.15.1 * run make upgrade Signed-off-by: Modular Magician --- .changelog/6075.txt | 3 + go.mod | 2 +- go.sum | 6 +- google/provider.go | 1 + google/resource_container_azure_node_pool.go | 6 - google/resource_dataplex_zone.go | 688 ++++++++++++++++++ .../resource_dataplex_zone_generated_test.go | 184 +++++ website/docs/r/dataplex_zone.html.markdown | 216 ++++++ 8 files changed, 1097 insertions(+), 9 deletions(-) create mode 100644 .changelog/6075.txt create mode 100644 google/resource_dataplex_zone.go create mode 100644 google/resource_dataplex_zone_generated_test.go create mode 100644 website/docs/r/dataplex_zone.html.markdown diff --git a/.changelog/6075.txt b/.changelog/6075.txt new file mode 100644 index 00000000000..e9215d1e28e --- /dev/null +++ b/.changelog/6075.txt @@ -0,0 +1,3 @@ +```release-note:new-resource +`google_dataplex_zone` +``` diff --git a/go.mod b/go.mod index 9b1ba4ad665..e370b3c1004 100644 --- a/go.mod +++ b/go.mod @@ -1,7 +1,7 @@ module github.com/hashicorp/terraform-provider-google require ( cloud.google.com/go/bigtable v1.13.0 - github.com/GoogleCloudPlatform/declarative-resource-client-library v1.14.4 + github.com/GoogleCloudPlatform/declarative-resource-client-library v1.15.1 github.com/apparentlymart/go-cidr v1.1.0 github.com/client9/misspell v0.3.4 github.com/davecgh/go-spew v1.1.1 diff --git a/go.sum b/go.sum index c9899741bdc..6c19fad5cdf 100644 --- a/go.sum +++ b/go.sum @@ -1516,5 +1516,7 @@ rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8 rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.14.4 h1:CxVWVPoWru9OCIUYJRUgH7jNWaZZlKBy8RdG8qbn1vU= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.14.4/go.mod h1:i6Pmzp7aolLmJY86RaJ9wjqm/HFleMeN7Vl5uIWLwE8= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.15.1 h1:ex2UFZoVSbfsHK0zDsilMpXXs1vBDwi2wk1Fxd5N3D0= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.15.1/go.mod h1:i6Pmzp7aolLmJY86RaJ9wjqm/HFleMeN7Vl5uIWLwE8= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.15.1 h1:ex2UFZoVSbfsHK0zDsilMpXXs1vBDwi2wk1Fxd5N3D0= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.15.1/go.mod h1:i6Pmzp7aolLmJY86RaJ9wjqm/HFleMeN7Vl5uIWLwE8= diff --git a/google/provider.go b/google/provider.go index a2f1d67d528..b9dc159b576 100644 --- a/google/provider.go +++ b/google/provider.go @@ -1325,6 +1325,7 @@ func ResourceMapWithErrors() (map[string]*schema.Resource, error) { "google_container_azure_cluster": resourceContainerAzureCluster(), "google_container_azure_node_pool": resourceContainerAzureNodePool(), "google_dataplex_lake": resourceDataplexLake(), + "google_dataplex_zone": resourceDataplexZone(), "google_dataproc_workflow_template": resourceDataprocWorkflowTemplate(), "google_eventarc_trigger": resourceEventarcTrigger(), "google_firebaserules_release": resourceFirebaserulesRelease(), diff --git a/google/resource_container_azure_node_pool.go b/google/resource_container_azure_node_pool.go index 802314d32b5..36e7a3846ff 100644 --- a/google/resource_container_azure_node_pool.go +++ b/google/resource_container_azure_node_pool.go @@ -48,7 +48,6 @@ func resourceContainerAzureNodePool() *schema.Resource { "autoscaling": { Type: schema.TypeList, Required: true, - ForceNew: true, Description: "Autoscaler configuration for this node pool.", MaxItems: 1, Elem: ContainerAzureNodePoolAutoscalingSchema(), @@ -65,7 +64,6 @@ func resourceContainerAzureNodePool() *schema.Resource { "config": { Type: schema.TypeList, Required: true, - ForceNew: true, Description: "The node configuration of the node pool.", MaxItems: 1, Elem: ContainerAzureNodePoolConfigSchema(), @@ -176,14 +174,12 @@ func ContainerAzureNodePoolAutoscalingSchema() *schema.Resource { "max_node_count": { Type: schema.TypeInt, Required: true, - ForceNew: true, Description: "Maximum number of nodes in the node pool. Must be >= min_node_count.", }, "min_node_count": { Type: schema.TypeInt, Required: true, - ForceNew: true, Description: "Minimum number of nodes in the node pool. Must be >= 1 and <= max_node_count.", }, }, @@ -196,7 +192,6 @@ func ContainerAzureNodePoolConfigSchema() *schema.Resource { "ssh_config": { Type: schema.TypeList, Required: true, - ForceNew: true, Description: "SSH configuration for how to access the node pool machines.", MaxItems: 1, Elem: ContainerAzureNodePoolConfigSshConfigSchema(), @@ -246,7 +241,6 @@ func ContainerAzureNodePoolConfigSshConfigSchema() *schema.Resource { "authorized_key": { Type: schema.TypeString, Required: true, - ForceNew: true, Description: "The SSH public key data for VMs managed by Anthos. This accepts the authorized_keys file format used in OpenSSH according to the sshd(8) manual page.", }, }, diff --git a/google/resource_dataplex_zone.go b/google/resource_dataplex_zone.go new file mode 100644 index 00000000000..d4bb8d1005a --- /dev/null +++ b/google/resource_dataplex_zone.go @@ -0,0 +1,688 @@ +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** Type: DCL *** +// +// ---------------------------------------------------------------------------- +// +// This file is managed by Magic Modules (https://github.com/GoogleCloudPlatform/magic-modules) +// and is based on the DCL (https://github.com/GoogleCloudPlatform/declarative-resource-client-library). +// Changes will need to be made to the DCL or Magic Modules instead of here. +// +// We are not currently able to accept contributions to this file. If changes +// are required, please file an issue at https://github.com/hashicorp/terraform-provider-google/issues/new/choose +// +// ---------------------------------------------------------------------------- + +package google + +import ( + "context" + "fmt" + "log" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + + dcl "github.com/GoogleCloudPlatform/declarative-resource-client-library/dcl" + dataplex "github.com/GoogleCloudPlatform/declarative-resource-client-library/services/google/dataplex" +) + +func resourceDataplexZone() *schema.Resource { + return &schema.Resource{ + Create: resourceDataplexZoneCreate, + Read: resourceDataplexZoneRead, + Update: resourceDataplexZoneUpdate, + Delete: resourceDataplexZoneDelete, + + Importer: &schema.ResourceImporter{ + State: resourceDataplexZoneImport, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(20 * time.Minute), + Update: schema.DefaultTimeout(20 * time.Minute), + Delete: schema.DefaultTimeout(20 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "discovery_spec": { + Type: schema.TypeList, + Required: true, + Description: "Required. Specification of the discovery feature applied to data in this zone.", + MaxItems: 1, + Elem: DataplexZoneDiscoverySpecSchema(), + }, + + "lake": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "The lake for the resource", + }, + + "location": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "The location for the resource", + }, + + "name": { + Type: schema.TypeString, + Required: true, + DiffSuppressFunc: compareSelfLinkOrResourceName, + Description: "The name of the zone.", + }, + + "resource_spec": { + Type: schema.TypeList, + Required: true, + ForceNew: true, + Description: "Required. Immutable. Specification of the resources that are referenced by the assets within this zone.", + MaxItems: 1, + Elem: DataplexZoneResourceSpecSchema(), + }, + + "type": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Required. Immutable. The type of the zone. Possible values: TYPE_UNSPECIFIED, RAW, CURATED", + }, + + "description": { + Type: schema.TypeString, + Optional: true, + Description: "Optional. Description of the zone.", + }, + + "display_name": { + Type: schema.TypeString, + Optional: true, + Description: "Optional. User friendly display name.", + }, + + "labels": { + Type: schema.TypeMap, + Optional: true, + Description: "Optional. User defined labels for the zone.", + Elem: &schema.Schema{Type: schema.TypeString}, + }, + + "project": { + Type: schema.TypeString, + Computed: true, + Optional: true, + ForceNew: true, + DiffSuppressFunc: compareSelfLinkOrResourceName, + Description: "The project for the resource", + }, + + "asset_status": { + Type: schema.TypeList, + Computed: true, + Description: "Output only. Aggregated status of the underlying assets of the zone.", + Elem: DataplexZoneAssetStatusSchema(), + }, + + "create_time": { + Type: schema.TypeString, + Computed: true, + Description: "Output only. The time when the zone was created.", + }, + + "state": { + Type: schema.TypeString, + Computed: true, + Description: "Output only. Current state of the zone. Possible values: STATE_UNSPECIFIED, ACTIVE, CREATING, DELETING, ACTION_REQUIRED", + }, + + "uid": { + Type: schema.TypeString, + Computed: true, + Description: "Output only. System generated globally unique ID for the zone. This ID will be different if the zone is deleted and re-created with the same name.", + }, + + "update_time": { + Type: schema.TypeString, + Computed: true, + Description: "Output only. The time when the zone was last updated.", + }, + }, + } +} + +func DataplexZoneDiscoverySpecSchema() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "enabled": { + Type: schema.TypeBool, + Required: true, + Description: "Required. Whether discovery is enabled.", + }, + + "csv_options": { + Type: schema.TypeList, + Optional: true, + Description: "Optional. Configuration for CSV data.", + MaxItems: 1, + Elem: DataplexZoneDiscoverySpecCsvOptionsSchema(), + }, + + "exclude_patterns": { + Type: schema.TypeList, + Optional: true, + Description: "Optional. The list of patterns to apply for selecting data to exclude during discovery. For Cloud Storage bucket assets, these are interpreted as glob patterns used to match object names. For BigQuery dataset assets, these are interpreted as patterns to match table names.", + Elem: &schema.Schema{Type: schema.TypeString}, + }, + + "include_patterns": { + Type: schema.TypeList, + Optional: true, + Description: "Optional. The list of patterns to apply for selecting data to include during discovery if only a subset of the data should considered. For Cloud Storage bucket assets, these are interpreted as glob patterns used to match object names. For BigQuery dataset assets, these are interpreted as patterns to match table names.", + Elem: &schema.Schema{Type: schema.TypeString}, + }, + + "json_options": { + Type: schema.TypeList, + Optional: true, + Description: "Optional. Configuration for Json data.", + MaxItems: 1, + Elem: DataplexZoneDiscoverySpecJsonOptionsSchema(), + }, + + "schedule": { + Type: schema.TypeString, + Computed: true, + Optional: true, + Description: "Optional. Cron schedule (https://en.wikipedia.org/wiki/Cron) for running discovery periodically. Successive discovery runs must be scheduled at least 60 minutes apart. The default value is to run discovery every 60 minutes. To explicitly set a timezone to the cron tab, apply a prefix in the cron tab: \"CRON_TZ=${IANA_TIME_ZONE}\" or TZ=${IANA_TIME_ZONE}\". The ${IANA_TIME_ZONE} may only be a valid string from IANA time zone database. For example, \"CRON_TZ=America/New_York 1 * * * *\", or \"TZ=America/New_York 1 * * * *\".", + }, + }, + } +} + +func DataplexZoneDiscoverySpecCsvOptionsSchema() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "delimiter": { + Type: schema.TypeString, + Optional: true, + Description: "Optional. The delimiter being used to separate values. This defaults to ','.", + }, + + "disable_type_inference": { + Type: schema.TypeBool, + Optional: true, + Description: "Optional. Whether to disable the inference of data type for CSV data. If true, all columns will be registered as strings.", + }, + + "encoding": { + Type: schema.TypeString, + Optional: true, + Description: "Optional. The character encoding of the data. The default is UTF-8.", + }, + + "header_rows": { + Type: schema.TypeInt, + Optional: true, + Description: "Optional. The number of rows to interpret as header rows that should be skipped when reading data rows.", + }, + }, + } +} + +func DataplexZoneDiscoverySpecJsonOptionsSchema() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "disable_type_inference": { + Type: schema.TypeBool, + Optional: true, + Description: "Optional. Whether to disable the inference of data type for Json data. If true, all columns will be registered as their primitive types (strings, number or boolean).", + }, + + "encoding": { + Type: schema.TypeString, + Optional: true, + Description: "Optional. The character encoding of the data. The default is UTF-8.", + }, + }, + } +} + +func DataplexZoneResourceSpecSchema() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "location_type": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Required. Immutable. The location type of the resources that are allowed to be attached to the assets within this zone. Possible values: LOCATION_TYPE_UNSPECIFIED, SINGLE_REGION, MULTI_REGION", + }, + }, + } +} + +func DataplexZoneAssetStatusSchema() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "active_assets": { + Type: schema.TypeInt, + Computed: true, + Description: "Number of active assets.", + }, + + "security_policy_applying_assets": { + Type: schema.TypeInt, + Computed: true, + Description: "Number of assets that are in process of updating the security policy on attached resources.", + }, + + "update_time": { + Type: schema.TypeString, + Computed: true, + Description: "Last update time of the status.", + }, + }, + } +} + +func resourceDataplexZoneCreate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + project, err := getProject(d, config) + if err != nil { + return err + } + + obj := &dataplex.Zone{ + DiscoverySpec: expandDataplexZoneDiscoverySpec(d.Get("discovery_spec")), + Lake: dcl.String(d.Get("lake").(string)), + Location: dcl.String(d.Get("location").(string)), + Name: dcl.String(d.Get("name").(string)), + ResourceSpec: expandDataplexZoneResourceSpec(d.Get("resource_spec")), + Type: dataplex.ZoneTypeEnumRef(d.Get("type").(string)), + Description: dcl.String(d.Get("description").(string)), + DisplayName: dcl.String(d.Get("display_name").(string)), + Labels: checkStringMap(d.Get("labels")), + Project: dcl.String(project), + } + + id, err := obj.ID() + if err != nil { + return fmt.Errorf("error constructing id: %s", err) + } + d.SetId(id) + createDirective := CreateDirective + userAgent, err := generateUserAgentString(d, config.userAgent) + if err != nil { + return err + } + billingProject := project + // err == nil indicates that the billing_project value was found + if bp, err := getBillingProject(d, config); err == nil { + billingProject = bp + } + client := NewDCLDataplexClient(config, userAgent, billingProject, d.Timeout(schema.TimeoutCreate)) + if bp, err := replaceVars(d, config, client.Config.BasePath); err != nil { + d.SetId("") + return fmt.Errorf("Could not format %q: %w", client.Config.BasePath, err) + } else { + client.Config.BasePath = bp + } + res, err := client.ApplyZone(context.Background(), obj, createDirective...) + + if _, ok := err.(dcl.DiffAfterApplyError); ok { + log.Printf("[DEBUG] Diff after apply returned from the DCL: %s", err) + } else if err != nil { + // The resource didn't actually create + d.SetId("") + return fmt.Errorf("Error creating Zone: %s", err) + } + + log.Printf("[DEBUG] Finished creating Zone %q: %#v", d.Id(), res) + + return resourceDataplexZoneRead(d, meta) +} + +func resourceDataplexZoneRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + project, err := getProject(d, config) + if err != nil { + return err + } + + obj := &dataplex.Zone{ + DiscoverySpec: expandDataplexZoneDiscoverySpec(d.Get("discovery_spec")), + Lake: dcl.String(d.Get("lake").(string)), + Location: dcl.String(d.Get("location").(string)), + Name: dcl.String(d.Get("name").(string)), + ResourceSpec: expandDataplexZoneResourceSpec(d.Get("resource_spec")), + Type: dataplex.ZoneTypeEnumRef(d.Get("type").(string)), + Description: dcl.String(d.Get("description").(string)), + DisplayName: dcl.String(d.Get("display_name").(string)), + Labels: checkStringMap(d.Get("labels")), + Project: dcl.String(project), + } + + userAgent, err := generateUserAgentString(d, config.userAgent) + if err != nil { + return err + } + billingProject := project + // err == nil indicates that the billing_project value was found + if bp, err := getBillingProject(d, config); err == nil { + billingProject = bp + } + client := NewDCLDataplexClient(config, userAgent, billingProject, d.Timeout(schema.TimeoutRead)) + if bp, err := replaceVars(d, config, client.Config.BasePath); err != nil { + d.SetId("") + return fmt.Errorf("Could not format %q: %w", client.Config.BasePath, err) + } else { + client.Config.BasePath = bp + } + res, err := client.GetZone(context.Background(), obj) + if err != nil { + resourceName := fmt.Sprintf("DataplexZone %q", d.Id()) + return handleNotFoundDCLError(err, d, resourceName) + } + + if err = d.Set("discovery_spec", flattenDataplexZoneDiscoverySpec(res.DiscoverySpec)); err != nil { + return fmt.Errorf("error setting discovery_spec in state: %s", err) + } + if err = d.Set("lake", res.Lake); err != nil { + return fmt.Errorf("error setting lake in state: %s", err) + } + if err = d.Set("location", res.Location); err != nil { + return fmt.Errorf("error setting location in state: %s", err) + } + if err = d.Set("name", res.Name); err != nil { + return fmt.Errorf("error setting name in state: %s", err) + } + if err = d.Set("resource_spec", flattenDataplexZoneResourceSpec(res.ResourceSpec)); err != nil { + return fmt.Errorf("error setting resource_spec in state: %s", err) + } + if err = d.Set("type", res.Type); err != nil { + return fmt.Errorf("error setting type in state: %s", err) + } + if err = d.Set("description", res.Description); err != nil { + return fmt.Errorf("error setting description in state: %s", err) + } + if err = d.Set("display_name", res.DisplayName); err != nil { + return fmt.Errorf("error setting display_name in state: %s", err) + } + if err = d.Set("labels", res.Labels); err != nil { + return fmt.Errorf("error setting labels in state: %s", err) + } + if err = d.Set("project", res.Project); err != nil { + return fmt.Errorf("error setting project in state: %s", err) + } + if err = d.Set("asset_status", flattenDataplexZoneAssetStatus(res.AssetStatus)); err != nil { + return fmt.Errorf("error setting asset_status in state: %s", err) + } + if err = d.Set("create_time", res.CreateTime); err != nil { + return fmt.Errorf("error setting create_time in state: %s", err) + } + if err = d.Set("state", res.State); err != nil { + return fmt.Errorf("error setting state in state: %s", err) + } + if err = d.Set("uid", res.Uid); err != nil { + return fmt.Errorf("error setting uid in state: %s", err) + } + if err = d.Set("update_time", res.UpdateTime); err != nil { + return fmt.Errorf("error setting update_time in state: %s", err) + } + + return nil +} +func resourceDataplexZoneUpdate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + project, err := getProject(d, config) + if err != nil { + return err + } + + obj := &dataplex.Zone{ + DiscoverySpec: expandDataplexZoneDiscoverySpec(d.Get("discovery_spec")), + Lake: dcl.String(d.Get("lake").(string)), + Location: dcl.String(d.Get("location").(string)), + Name: dcl.String(d.Get("name").(string)), + ResourceSpec: expandDataplexZoneResourceSpec(d.Get("resource_spec")), + Type: dataplex.ZoneTypeEnumRef(d.Get("type").(string)), + Description: dcl.String(d.Get("description").(string)), + DisplayName: dcl.String(d.Get("display_name").(string)), + Labels: checkStringMap(d.Get("labels")), + Project: dcl.String(project), + } + directive := UpdateDirective + userAgent, err := generateUserAgentString(d, config.userAgent) + if err != nil { + return err + } + + billingProject := "" + // err == nil indicates that the billing_project value was found + if bp, err := getBillingProject(d, config); err == nil { + billingProject = bp + } + client := NewDCLDataplexClient(config, userAgent, billingProject, d.Timeout(schema.TimeoutUpdate)) + if bp, err := replaceVars(d, config, client.Config.BasePath); err != nil { + d.SetId("") + return fmt.Errorf("Could not format %q: %w", client.Config.BasePath, err) + } else { + client.Config.BasePath = bp + } + res, err := client.ApplyZone(context.Background(), obj, directive...) + + if _, ok := err.(dcl.DiffAfterApplyError); ok { + log.Printf("[DEBUG] Diff after apply returned from the DCL: %s", err) + } else if err != nil { + // The resource didn't actually create + d.SetId("") + return fmt.Errorf("Error updating Zone: %s", err) + } + + log.Printf("[DEBUG] Finished creating Zone %q: %#v", d.Id(), res) + + return resourceDataplexZoneRead(d, meta) +} + +func resourceDataplexZoneDelete(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + project, err := getProject(d, config) + if err != nil { + return err + } + + obj := &dataplex.Zone{ + DiscoverySpec: expandDataplexZoneDiscoverySpec(d.Get("discovery_spec")), + Lake: dcl.String(d.Get("lake").(string)), + Location: dcl.String(d.Get("location").(string)), + Name: dcl.String(d.Get("name").(string)), + ResourceSpec: expandDataplexZoneResourceSpec(d.Get("resource_spec")), + Type: dataplex.ZoneTypeEnumRef(d.Get("type").(string)), + Description: dcl.String(d.Get("description").(string)), + DisplayName: dcl.String(d.Get("display_name").(string)), + Labels: checkStringMap(d.Get("labels")), + Project: dcl.String(project), + } + + log.Printf("[DEBUG] Deleting Zone %q", d.Id()) + userAgent, err := generateUserAgentString(d, config.userAgent) + if err != nil { + return err + } + billingProject := project + // err == nil indicates that the billing_project value was found + if bp, err := getBillingProject(d, config); err == nil { + billingProject = bp + } + client := NewDCLDataplexClient(config, userAgent, billingProject, d.Timeout(schema.TimeoutDelete)) + if bp, err := replaceVars(d, config, client.Config.BasePath); err != nil { + d.SetId("") + return fmt.Errorf("Could not format %q: %w", client.Config.BasePath, err) + } else { + client.Config.BasePath = bp + } + if err := client.DeleteZone(context.Background(), obj); err != nil { + return fmt.Errorf("Error deleting Zone: %s", err) + } + + log.Printf("[DEBUG] Finished deleting Zone %q", d.Id()) + return nil +} + +func resourceDataplexZoneImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { + config := meta.(*Config) + + if err := parseImportId([]string{ + "projects/(?P[^/]+)/locations/(?P[^/]+)/lakes/(?P[^/]+)/zones/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + }, d, config); err != nil { + return nil, err + } + + // Replace import id for the resource id + id, err := replaceVarsForId(d, config, "projects/{{project}}/locations/{{location}}/lakes/{{lake}}/zones/{{name}}") + if err != nil { + return nil, fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + return []*schema.ResourceData{d}, nil +} + +func expandDataplexZoneDiscoverySpec(o interface{}) *dataplex.ZoneDiscoverySpec { + if o == nil { + return dataplex.EmptyZoneDiscoverySpec + } + objArr := o.([]interface{}) + if len(objArr) == 0 || objArr[0] == nil { + return dataplex.EmptyZoneDiscoverySpec + } + obj := objArr[0].(map[string]interface{}) + return &dataplex.ZoneDiscoverySpec{ + Enabled: dcl.Bool(obj["enabled"].(bool)), + CsvOptions: expandDataplexZoneDiscoverySpecCsvOptions(obj["csv_options"]), + ExcludePatterns: expandStringArray(obj["exclude_patterns"]), + IncludePatterns: expandStringArray(obj["include_patterns"]), + JsonOptions: expandDataplexZoneDiscoverySpecJsonOptions(obj["json_options"]), + Schedule: dcl.StringOrNil(obj["schedule"].(string)), + } +} + +func flattenDataplexZoneDiscoverySpec(obj *dataplex.ZoneDiscoverySpec) interface{} { + if obj == nil || obj.Empty() { + return nil + } + transformed := map[string]interface{}{ + "enabled": obj.Enabled, + "csv_options": flattenDataplexZoneDiscoverySpecCsvOptions(obj.CsvOptions), + "exclude_patterns": obj.ExcludePatterns, + "include_patterns": obj.IncludePatterns, + "json_options": flattenDataplexZoneDiscoverySpecJsonOptions(obj.JsonOptions), + "schedule": obj.Schedule, + } + + return []interface{}{transformed} + +} + +func expandDataplexZoneDiscoverySpecCsvOptions(o interface{}) *dataplex.ZoneDiscoverySpecCsvOptions { + if o == nil { + return dataplex.EmptyZoneDiscoverySpecCsvOptions + } + objArr := o.([]interface{}) + if len(objArr) == 0 || objArr[0] == nil { + return dataplex.EmptyZoneDiscoverySpecCsvOptions + } + obj := objArr[0].(map[string]interface{}) + return &dataplex.ZoneDiscoverySpecCsvOptions{ + Delimiter: dcl.String(obj["delimiter"].(string)), + DisableTypeInference: dcl.Bool(obj["disable_type_inference"].(bool)), + Encoding: dcl.String(obj["encoding"].(string)), + HeaderRows: dcl.Int64(int64(obj["header_rows"].(int))), + } +} + +func flattenDataplexZoneDiscoverySpecCsvOptions(obj *dataplex.ZoneDiscoverySpecCsvOptions) interface{} { + if obj == nil || obj.Empty() { + return nil + } + transformed := map[string]interface{}{ + "delimiter": obj.Delimiter, + "disable_type_inference": obj.DisableTypeInference, + "encoding": obj.Encoding, + "header_rows": obj.HeaderRows, + } + + return []interface{}{transformed} + +} + +func expandDataplexZoneDiscoverySpecJsonOptions(o interface{}) *dataplex.ZoneDiscoverySpecJsonOptions { + if o == nil { + return dataplex.EmptyZoneDiscoverySpecJsonOptions + } + objArr := o.([]interface{}) + if len(objArr) == 0 || objArr[0] == nil { + return dataplex.EmptyZoneDiscoverySpecJsonOptions + } + obj := objArr[0].(map[string]interface{}) + return &dataplex.ZoneDiscoverySpecJsonOptions{ + DisableTypeInference: dcl.Bool(obj["disable_type_inference"].(bool)), + Encoding: dcl.String(obj["encoding"].(string)), + } +} + +func flattenDataplexZoneDiscoverySpecJsonOptions(obj *dataplex.ZoneDiscoverySpecJsonOptions) interface{} { + if obj == nil || obj.Empty() { + return nil + } + transformed := map[string]interface{}{ + "disable_type_inference": obj.DisableTypeInference, + "encoding": obj.Encoding, + } + + return []interface{}{transformed} + +} + +func expandDataplexZoneResourceSpec(o interface{}) *dataplex.ZoneResourceSpec { + if o == nil { + return dataplex.EmptyZoneResourceSpec + } + objArr := o.([]interface{}) + if len(objArr) == 0 || objArr[0] == nil { + return dataplex.EmptyZoneResourceSpec + } + obj := objArr[0].(map[string]interface{}) + return &dataplex.ZoneResourceSpec{ + LocationType: dataplex.ZoneResourceSpecLocationTypeEnumRef(obj["location_type"].(string)), + } +} + +func flattenDataplexZoneResourceSpec(obj *dataplex.ZoneResourceSpec) interface{} { + if obj == nil || obj.Empty() { + return nil + } + transformed := map[string]interface{}{ + "location_type": obj.LocationType, + } + + return []interface{}{transformed} + +} + +func flattenDataplexZoneAssetStatus(obj *dataplex.ZoneAssetStatus) interface{} { + if obj == nil || obj.Empty() { + return nil + } + transformed := map[string]interface{}{ + "active_assets": obj.ActiveAssets, + "security_policy_applying_assets": obj.SecurityPolicyApplyingAssets, + "update_time": obj.UpdateTime, + } + + return []interface{}{transformed} + +} diff --git a/google/resource_dataplex_zone_generated_test.go b/google/resource_dataplex_zone_generated_test.go new file mode 100644 index 00000000000..4720504141f --- /dev/null +++ b/google/resource_dataplex_zone_generated_test.go @@ -0,0 +1,184 @@ +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** Type: DCL *** +// +// ---------------------------------------------------------------------------- +// +// This file is managed by Magic Modules (https://github.com/GoogleCloudPlatform/magic-modules) +// and is based on the DCL (https://github.com/GoogleCloudPlatform/declarative-resource-client-library). +// Changes will need to be made to the DCL or Magic Modules instead of here. +// +// We are not currently able to accept contributions to this file. If changes +// are required, please file an issue at https://github.com/hashicorp/terraform-provider-google/issues/new/choose +// +// ---------------------------------------------------------------------------- + +package google + +import ( + "context" + "fmt" + dcl "github.com/GoogleCloudPlatform/declarative-resource-client-library/dcl" + dataplex "github.com/GoogleCloudPlatform/declarative-resource-client-library/services/google/dataplex" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + "strings" + "testing" +) + +func TestAccDataplexZone_BasicZone(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project_name": getTestProjectFromEnv(), + "region": getTestRegionFromEnv(), + "random_suffix": randString(t, 10), + } + + vcrTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataplexZoneDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataplexZone_BasicZone(context), + }, + { + ResourceName: "google_dataplex_zone.primary", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataplexZone_BasicZoneUpdate0(context), + }, + { + ResourceName: "google_dataplex_zone.primary", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccDataplexZone_BasicZone(context map[string]interface{}) string { + return Nprintf(` +resource "google_dataplex_zone" "primary" { + discovery_spec { + enabled = false + } + + lake = google_dataplex_lake.basic.name + location = "%{region}" + name = "tf-test-zone%{random_suffix}" + + resource_spec { + location_type = "MULTI_REGION" + } + + type = "RAW" + description = "Zone for DCL" + display_name = "Zone for DCL" + labels = {} + project = "%{project_name}" +} + +resource "google_dataplex_lake" "basic" { + location = "%{region}" + name = "tf-test-lake%{random_suffix}" + description = "Lake for DCL" + display_name = "Lake for DCL" + + labels = { + my-lake = "exists" + } + + project = "%{project_name}" +} + + +`, context) +} + +func testAccDataplexZone_BasicZoneUpdate0(context map[string]interface{}) string { + return Nprintf(` +resource "google_dataplex_zone" "primary" { + discovery_spec { + enabled = false + } + + lake = google_dataplex_lake.basic.name + location = "%{region}" + name = "tf-test-zone%{random_suffix}" + + resource_spec { + location_type = "MULTI_REGION" + } + + type = "RAW" + description = "Zone for DCL Updated" + display_name = "Zone for DCL" + + labels = { + updated_label = "exists" + } + + project = "%{project_name}" +} + +resource "google_dataplex_lake" "basic" { + location = "%{region}" + name = "tf-test-lake%{random_suffix}" + description = "Lake for DCL" + display_name = "Lake for DCL" + + labels = { + my-lake = "exists" + } + + project = "%{project_name}" +} + + +`, context) +} + +func testAccCheckDataplexZoneDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "rs.google_dataplex_zone" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := googleProviderConfig(t) + + billingProject := "" + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + obj := &dataplex.Zone{ + Lake: dcl.String(rs.Primary.Attributes["lake"]), + Location: dcl.String(rs.Primary.Attributes["location"]), + Name: dcl.String(rs.Primary.Attributes["name"]), + Type: dataplex.ZoneTypeEnumRef(rs.Primary.Attributes["type"]), + Description: dcl.String(rs.Primary.Attributes["description"]), + DisplayName: dcl.String(rs.Primary.Attributes["display_name"]), + Project: dcl.StringOrNil(rs.Primary.Attributes["project"]), + CreateTime: dcl.StringOrNil(rs.Primary.Attributes["create_time"]), + State: dataplex.ZoneStateEnumRef(rs.Primary.Attributes["state"]), + Uid: dcl.StringOrNil(rs.Primary.Attributes["uid"]), + UpdateTime: dcl.StringOrNil(rs.Primary.Attributes["update_time"]), + } + + client := NewDCLDataplexClient(config, config.userAgent, billingProject, 0) + _, err := client.GetZone(context.Background(), obj) + if err == nil { + return fmt.Errorf("google_dataplex_zone still exists %v", obj) + } + } + return nil + } +} diff --git a/website/docs/r/dataplex_zone.html.markdown b/website/docs/r/dataplex_zone.html.markdown new file mode 100644 index 00000000000..6309cd9265f --- /dev/null +++ b/website/docs/r/dataplex_zone.html.markdown @@ -0,0 +1,216 @@ +--- +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** Type: DCL *** +# +# ---------------------------------------------------------------------------- +# +# This file is managed by Magic Modules (https:#github.com/GoogleCloudPlatform/magic-modules) +# and is based on the DCL (https:#github.com/GoogleCloudPlatform/declarative-resource-client-library). +# Changes will need to be made to the DCL or Magic Modules instead of here. +# +# We are not currently able to accept contributions to this file. If changes +# are required, please file an issue at https:#github.com/hashicorp/terraform-provider-google/issues/new/choose +# +# ---------------------------------------------------------------------------- +subcategory: "Dataplex" +page_title: "Google: google_dataplex_zone" +description: |- + The Dataplex Zone resource +--- + +# google_dataplex_zone + +The Dataplex Zone resource + +## Example Usage - basic_zone +A basic example of a dataplex zone +```hcl +resource "google_dataplex_zone" "primary" { + discovery_spec { + enabled = false + } + + lake = google_dataplex_lake.basic.name + location = "us-west1" + name = "zone" + + resource_spec { + location_type = "MULTI_REGION" + } + + type = "RAW" + description = "Zone for DCL" + display_name = "Zone for DCL" + labels = {} + project = "my-project-name" +} + +resource "google_dataplex_lake" "basic" { + location = "us-west1" + name = "lake" + description = "Lake for DCL" + display_name = "Lake for DCL" + + labels = { + my-lake = "exists" + } + + project = "my-project-name" +} + + +``` + +## Argument Reference + +The following arguments are supported: + +* `discovery_spec` - + (Required) + Required. Specification of the discovery feature applied to data in this zone. + +* `lake` - + (Required) + The lake for the resource + +* `location` - + (Required) + The location for the resource + +* `name` - + (Required) + The name of the zone. + +* `resource_spec` - + (Required) + Required. Immutable. Specification of the resources that are referenced by the assets within this zone. + +* `type` - + (Required) + Required. Immutable. The type of the zone. Possible values: TYPE_UNSPECIFIED, RAW, CURATED + + + +The `discovery_spec` block supports: + +* `csv_options` - + (Optional) + Optional. Configuration for CSV data. + +* `enabled` - + (Required) + Required. Whether discovery is enabled. + +* `exclude_patterns` - + (Optional) + Optional. The list of patterns to apply for selecting data to exclude during discovery. For Cloud Storage bucket assets, these are interpreted as glob patterns used to match object names. For BigQuery dataset assets, these are interpreted as patterns to match table names. + +* `include_patterns` - + (Optional) + Optional. The list of patterns to apply for selecting data to include during discovery if only a subset of the data should considered. For Cloud Storage bucket assets, these are interpreted as glob patterns used to match object names. For BigQuery dataset assets, these are interpreted as patterns to match table names. + +* `json_options` - + (Optional) + Optional. Configuration for Json data. + +* `schedule` - + (Optional) + Optional. Cron schedule (https://en.wikipedia.org/wiki/Cron) for running discovery periodically. Successive discovery runs must be scheduled at least 60 minutes apart. The default value is to run discovery every 60 minutes. To explicitly set a timezone to the cron tab, apply a prefix in the cron tab: "CRON_TZ=${IANA_TIME_ZONE}" or TZ=${IANA_TIME_ZONE}". The ${IANA_TIME_ZONE} may only be a valid string from IANA time zone database. For example, "CRON_TZ=America/New_York 1 * * * *", or "TZ=America/New_York 1 * * * *". + +The `resource_spec` block supports: + +* `location_type` - + (Required) + Required. Immutable. The location type of the resources that are allowed to be attached to the assets within this zone. Possible values: LOCATION_TYPE_UNSPECIFIED, SINGLE_REGION, MULTI_REGION + +- - - + +* `description` - + (Optional) + Optional. Description of the zone. + +* `display_name` - + (Optional) + Optional. User friendly display name. + +* `labels` - + (Optional) + Optional. User defined labels for the zone. + +* `project` - + (Optional) + The project for the resource + + + +The `csv_options` block supports: + +* `delimiter` - + (Optional) + Optional. The delimiter being used to separate values. This defaults to ','. + +* `disable_type_inference` - + (Optional) + Optional. Whether to disable the inference of data type for CSV data. If true, all columns will be registered as strings. + +* `encoding` - + (Optional) + Optional. The character encoding of the data. The default is UTF-8. + +* `header_rows` - + (Optional) + Optional. The number of rows to interpret as header rows that should be skipped when reading data rows. + +The `json_options` block supports: + +* `disable_type_inference` - + (Optional) + Optional. Whether to disable the inference of data type for Json data. If true, all columns will be registered as their primitive types (strings, number or boolean). + +* `encoding` - + (Optional) + Optional. The character encoding of the data. The default is UTF-8. + +## Attributes Reference + +In addition to the arguments listed above, the following computed attributes are exported: + +* `id` - an identifier for the resource with format `projects/{{project}}/locations/{{location}}/lakes/{{lake}}/zones/{{name}}` + +* `asset_status` - + Output only. Aggregated status of the underlying assets of the zone. + +* `create_time` - + Output only. The time when the zone was created. + +* `state` - + Output only. Current state of the zone. Possible values: STATE_UNSPECIFIED, ACTIVE, CREATING, DELETING, ACTION_REQUIRED + +* `uid` - + Output only. System generated globally unique ID for the zone. This ID will be different if the zone is deleted and re-created with the same name. + +* `update_time` - + Output only. The time when the zone was last updated. + +## Timeouts + +This resource provides the following +[Timeouts](/docs/configuration/resources.html#timeouts) configuration options: + +- `create` - Default is 20 minutes. +- `update` - Default is 20 minutes. +- `delete` - Default is 20 minutes. + +## Import + +Zone can be imported using any of these accepted formats: + +``` +$ terraform import google_dataplex_zone.default projects/{{project}}/locations/{{location}}/lakes/{{lake}}/zones/{{name}} +$ terraform import google_dataplex_zone.default {{project}}/{{location}}/{{lake}}/{{name}} +$ terraform import google_dataplex_zone.default {{location}}/{{lake}}/{{name}} +``` + + +