diff --git a/.changelog/11989.txt b/.changelog/11989.txt new file mode 100644 index 0000000000..68d6f72fa0 --- /dev/null +++ b/.changelog/11989.txt @@ -0,0 +1,3 @@ +```release-note:new-resource +`google_dataproc_gdc_service_instance` +``` \ No newline at end of file diff --git a/google-beta/fwmodels/provider_model.go b/google-beta/fwmodels/provider_model.go index 2de998a090..605758cecf 100644 --- a/google-beta/fwmodels/provider_model.go +++ b/google-beta/fwmodels/provider_model.go @@ -80,6 +80,7 @@ type ProviderModel struct { DataPipelineCustomEndpoint types.String `tfsdk:"data_pipeline_custom_endpoint"` DataplexCustomEndpoint types.String `tfsdk:"dataplex_custom_endpoint"` DataprocCustomEndpoint types.String `tfsdk:"dataproc_custom_endpoint"` + DataprocGdcCustomEndpoint types.String `tfsdk:"dataproc_gdc_custom_endpoint"` DataprocMetastoreCustomEndpoint types.String `tfsdk:"dataproc_metastore_custom_endpoint"` DatastreamCustomEndpoint types.String `tfsdk:"datastream_custom_endpoint"` DeploymentManagerCustomEndpoint types.String `tfsdk:"deployment_manager_custom_endpoint"` diff --git a/google-beta/fwprovider/framework_provider.go b/google-beta/fwprovider/framework_provider.go index 9ac744ba24..a939072829 100644 --- a/google-beta/fwprovider/framework_provider.go +++ b/google-beta/fwprovider/framework_provider.go @@ -449,6 +449,12 @@ func (p *FrameworkProvider) Schema(_ context.Context, _ provider.SchemaRequest, transport_tpg.CustomEndpointValidator(), }, }, + "dataproc_gdc_custom_endpoint": &schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + transport_tpg.CustomEndpointValidator(), + }, + }, "dataproc_metastore_custom_endpoint": &schema.StringAttribute{ Optional: true, Validators: []validator.String{ diff --git a/google-beta/fwtransport/framework_config.go b/google-beta/fwtransport/framework_config.go index 10579a1848..7de5ebcd3a 100644 --- a/google-beta/fwtransport/framework_config.go +++ b/google-beta/fwtransport/framework_config.go @@ -114,6 +114,7 @@ type FrameworkProviderConfig struct { DataPipelineBasePath string DataplexBasePath string DataprocBasePath string + DataprocGdcBasePath string DataprocMetastoreBasePath string DatastreamBasePath string DeploymentManagerBasePath string @@ -293,6 +294,7 @@ func (p *FrameworkProviderConfig) LoadAndValidateFramework(ctx context.Context, p.DataPipelineBasePath = data.DataPipelineCustomEndpoint.ValueString() p.DataplexBasePath = data.DataplexCustomEndpoint.ValueString() p.DataprocBasePath = data.DataprocCustomEndpoint.ValueString() + p.DataprocGdcBasePath = data.DataprocGdcCustomEndpoint.ValueString() p.DataprocMetastoreBasePath = data.DataprocMetastoreCustomEndpoint.ValueString() p.DatastreamBasePath = data.DatastreamCustomEndpoint.ValueString() p.DeploymentManagerBasePath = data.DeploymentManagerCustomEndpoint.ValueString() @@ -923,6 +925,14 @@ func (p *FrameworkProviderConfig) HandleDefaults(ctx context.Context, data *fwmo data.DataprocCustomEndpoint = types.StringValue(customEndpoint.(string)) } } + if data.DataprocGdcCustomEndpoint.IsNull() { + customEndpoint := transport_tpg.MultiEnvDefault([]string{ + "GOOGLE_DATAPROC_GDC_CUSTOM_ENDPOINT", + }, transport_tpg.DefaultBasePaths[transport_tpg.DataprocGdcBasePathKey]) + if customEndpoint != nil { + data.DataprocGdcCustomEndpoint = types.StringValue(customEndpoint.(string)) + } + } if data.DataprocMetastoreCustomEndpoint.IsNull() { customEndpoint := transport_tpg.MultiEnvDefault([]string{ "GOOGLE_DATAPROC_METASTORE_CUSTOM_ENDPOINT", diff --git a/google-beta/provider/provider.go b/google-beta/provider/provider.go index 58c19e19b6..1af27bb1ef 100644 --- a/google-beta/provider/provider.go +++ b/google-beta/provider/provider.go @@ -395,6 +395,11 @@ func Provider() *schema.Provider { Optional: true, ValidateFunc: transport_tpg.ValidateCustomEndpoint, }, + "dataproc_gdc_custom_endpoint": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: transport_tpg.ValidateCustomEndpoint, + }, "dataproc_metastore_custom_endpoint": { Type: schema.TypeString, Optional: true, @@ -1087,6 +1092,7 @@ func ProviderConfigure(ctx context.Context, d *schema.ResourceData, p *schema.Pr config.DataPipelineBasePath = d.Get("data_pipeline_custom_endpoint").(string) config.DataplexBasePath = d.Get("dataplex_custom_endpoint").(string) config.DataprocBasePath = d.Get("dataproc_custom_endpoint").(string) + config.DataprocGdcBasePath = d.Get("dataproc_gdc_custom_endpoint").(string) config.DataprocMetastoreBasePath = d.Get("dataproc_metastore_custom_endpoint").(string) config.DatastreamBasePath = d.Get("datastream_custom_endpoint").(string) config.DeploymentManagerBasePath = d.Get("deployment_manager_custom_endpoint").(string) diff --git a/google-beta/provider/provider_mmv1_resources.go b/google-beta/provider/provider_mmv1_resources.go index 619e26eda8..e4b79b4b0c 100644 --- a/google-beta/provider/provider_mmv1_resources.go +++ b/google-beta/provider/provider_mmv1_resources.go @@ -54,6 +54,7 @@ import ( "github.com/hashicorp/terraform-provider-google-beta/google-beta/services/datapipeline" "github.com/hashicorp/terraform-provider-google-beta/google-beta/services/dataplex" "github.com/hashicorp/terraform-provider-google-beta/google-beta/services/dataproc" + "github.com/hashicorp/terraform-provider-google-beta/google-beta/services/dataprocgdc" "github.com/hashicorp/terraform-provider-google-beta/google-beta/services/dataprocmetastore" "github.com/hashicorp/terraform-provider-google-beta/google-beta/services/datastream" "github.com/hashicorp/terraform-provider-google-beta/google-beta/services/deploymentmanager" @@ -499,9 +500,9 @@ var handwrittenIAMDatasources = map[string]*schema.Resource{ } // Resources -// Generated resources: 546 +// Generated resources: 547 // Generated IAM resources: 291 -// Total generated resources: 837 +// Total generated resources: 838 var generatedResources = map[string]*schema.Resource{ "google_folder_access_approval_settings": accessapproval.ResourceAccessApprovalFolderSettings(), "google_organization_access_approval_settings": accessapproval.ResourceAccessApprovalOrganizationSettings(), @@ -885,6 +886,7 @@ var generatedResources = map[string]*schema.Resource{ "google_dataproc_autoscaling_policy_iam_member": tpgiamresource.ResourceIamMember(dataproc.DataprocAutoscalingPolicyIamSchema, dataproc.DataprocAutoscalingPolicyIamUpdaterProducer, dataproc.DataprocAutoscalingPolicyIdParseFunc), "google_dataproc_autoscaling_policy_iam_policy": tpgiamresource.ResourceIamPolicy(dataproc.DataprocAutoscalingPolicyIamSchema, dataproc.DataprocAutoscalingPolicyIamUpdaterProducer, dataproc.DataprocAutoscalingPolicyIdParseFunc), "google_dataproc_batch": dataproc.ResourceDataprocBatch(), + "google_dataproc_gdc_service_instance": dataprocgdc.ResourceDataprocGdcServiceInstance(), "google_dataproc_metastore_federation": dataprocmetastore.ResourceDataprocMetastoreFederation(), "google_dataproc_metastore_federation_iam_binding": tpgiamresource.ResourceIamBinding(dataprocmetastore.DataprocMetastoreFederationIamSchema, dataprocmetastore.DataprocMetastoreFederationIamUpdaterProducer, dataprocmetastore.DataprocMetastoreFederationIdParseFunc), "google_dataproc_metastore_federation_iam_member": tpgiamresource.ResourceIamMember(dataprocmetastore.DataprocMetastoreFederationIamSchema, dataprocmetastore.DataprocMetastoreFederationIamUpdaterProducer, dataprocmetastore.DataprocMetastoreFederationIdParseFunc), diff --git a/google-beta/services/dataprocgdc/dataproc_gdc_operation.go b/google-beta/services/dataprocgdc/dataproc_gdc_operation.go new file mode 100644 index 0000000000..3699894509 --- /dev/null +++ b/google-beta/services/dataprocgdc/dataproc_gdc_operation.go @@ -0,0 +1,92 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** Type: MMv1 *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package dataprocgdc + +import ( + "encoding/json" + "errors" + "fmt" + "time" + + "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" +) + +type DataprocGdcOperationWaiter struct { + Config *transport_tpg.Config + UserAgent string + Project string + tpgresource.CommonOperationWaiter +} + +func (w *DataprocGdcOperationWaiter) QueryOp() (interface{}, error) { + if w == nil { + return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") + } + // Returns the proper get. + url := fmt.Sprintf("%s%s", w.Config.DataprocGdcBasePath, w.CommonOperationWaiter.Op.Name) + + return transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: w.Config, + Method: "GET", + Project: w.Project, + RawURL: url, + UserAgent: w.UserAgent, + }) +} + +func createDataprocGdcWaiter(config *transport_tpg.Config, op map[string]interface{}, project, activity, userAgent string) (*DataprocGdcOperationWaiter, error) { + w := &DataprocGdcOperationWaiter{ + Config: config, + UserAgent: userAgent, + Project: project, + } + if err := w.CommonOperationWaiter.SetOp(op); err != nil { + return nil, err + } + return w, nil +} + +// nolint: deadcode,unused +func DataprocGdcOperationWaitTimeWithResponse(config *transport_tpg.Config, op map[string]interface{}, response *map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { + w, err := createDataprocGdcWaiter(config, op, project, activity, userAgent) + if err != nil { + return err + } + if err := tpgresource.OperationWait(w, activity, timeout, config.PollInterval); err != nil { + return err + } + rawResponse := []byte(w.CommonOperationWaiter.Op.Response) + if len(rawResponse) == 0 { + return errors.New("`resource` not set in operation response") + } + return json.Unmarshal(rawResponse, response) +} + +func DataprocGdcOperationWaitTime(config *transport_tpg.Config, op map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { + if val, ok := op["name"]; !ok || val == "" { + // This was a synchronous call - there is no operation to wait for. + return nil + } + w, err := createDataprocGdcWaiter(config, op, project, activity, userAgent) + if err != nil { + // If w is nil, the op was synchronous. + return err + } + return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) +} diff --git a/google-beta/services/dataprocgdc/resource_dataproc_gdc_service_instance.go b/google-beta/services/dataprocgdc/resource_dataproc_gdc_service_instance.go new file mode 100644 index 0000000000..cecd3dbba9 --- /dev/null +++ b/google-beta/services/dataprocgdc/resource_dataproc_gdc_service_instance.go @@ -0,0 +1,626 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** Type: MMv1 *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package dataprocgdc + +import ( + "fmt" + "log" + "net/http" + "reflect" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + + "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" +) + +func ResourceDataprocGdcServiceInstance() *schema.Resource { + return &schema.Resource{ + Create: resourceDataprocGdcServiceInstanceCreate, + Read: resourceDataprocGdcServiceInstanceRead, + Update: resourceDataprocGdcServiceInstanceUpdate, + Delete: resourceDataprocGdcServiceInstanceDelete, + + Importer: &schema.ResourceImporter{ + State: resourceDataprocGdcServiceInstanceImport, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(20 * time.Minute), + Update: schema.DefaultTimeout(20 * time.Minute), + Delete: schema.DefaultTimeout(20 * time.Minute), + }, + + CustomizeDiff: customdiff.All( + tpgresource.SetLabelsDiff, + tpgresource.DefaultProviderProject, + ), + + Schema: map[string]*schema.Schema{ + "location": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `Location of the resource.`, + }, + "service_instance_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `Id of the service instance.`, + }, + "display_name": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: `User-provided human-readable name to be used in user interfaces.`, + }, + "gdce_cluster": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + Description: `Gdce cluster information.`, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "gdce_cluster": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `Gdce cluster resource id.`, + }, + }, + }, + }, + "labels": { + Type: schema.TypeMap, + Optional: true, + Description: `The labels to associate with this service instance. Labels may be used for filtering and billing tracking. + +**Note**: This field is non-authoritative, and will only manage the labels present in your configuration. +Please refer to the field 'effective_labels' for all of the labels present on the resource.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "service_account": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: `Requested service account to associate with ServiceInstance.`, + }, + "spark_service_instance_config": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + Description: `Spark-specific service instance configuration.`, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{}, + }, + }, + "create_time": { + Type: schema.TypeString, + Computed: true, + Description: `The timestamp when the resource was created.`, + }, + "effective_labels": { + Type: schema.TypeMap, + Computed: true, + ForceNew: true, + Description: `All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Terraform, other clients and services.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "effective_service_account": { + Type: schema.TypeString, + Computed: true, + Description: `Effective service account associated with ServiceInstance. This will be the service_account if specified. Otherwise, it will be an automatically created per-resource P4SA that also automatically has Fleet Workload. Identity bindings applied.`, + }, + "name": { + Type: schema.TypeString, + Computed: true, + Description: `Identifier. The name of the service instance.`, + }, + "reconciling": { + Type: schema.TypeBool, + Computed: true, + Description: `Whether the service instance is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated.`, + }, + "requested_state": { + Type: schema.TypeString, + Computed: true, + Description: `The intended state to which the service instance is reconciling. Possible values: +* 'CREATING' +* 'ACTIVE' +* 'DISCONNECTED' +* 'DELETING' +* 'STOPPING' +* 'STOPPED' +* 'STARTING' +* 'UPDATING' +* 'FAILED'`, + }, + "state": { + Type: schema.TypeString, + Computed: true, + Description: `The current state. Possible values: +* 'CREATING' +* 'ACTIVE' +* 'DISCONNECTED' +* 'DELETING' +* 'STOPPING' +* 'STOPPED' +* 'STARTING' +* 'UPDATING' +* 'FAILED'`, + }, + "state_message": { + Type: schema.TypeString, + Computed: true, + Description: `A message explaining the current state.`, + }, + "terraform_labels": { + Type: schema.TypeMap, + Computed: true, + Description: `The combination of labels configured directly on the resource + and default labels configured on the provider.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "uid": { + Type: schema.TypeString, + Computed: true, + Description: `System generated unique identifier for this service instance, formatted as UUID4.`, + }, + "update_time": { + Type: schema.TypeString, + Computed: true, + Description: `The timestamp when the resource was most recently updated.`, + }, + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + }, + }, + UseJSONNumber: true, + } +} + +func resourceDataprocGdcServiceInstanceCreate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + obj := make(map[string]interface{}) + gdceClusterProp, err := expandDataprocGdcServiceInstanceGdceCluster(d.Get("gdce_cluster"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("gdce_cluster"); !tpgresource.IsEmptyValue(reflect.ValueOf(gdceClusterProp)) && (ok || !reflect.DeepEqual(v, gdceClusterProp)) { + obj["gdceCluster"] = gdceClusterProp + } + displayNameProp, err := expandDataprocGdcServiceInstanceDisplayName(d.Get("display_name"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("display_name"); !tpgresource.IsEmptyValue(reflect.ValueOf(displayNameProp)) && (ok || !reflect.DeepEqual(v, displayNameProp)) { + obj["displayName"] = displayNameProp + } + sparkServiceInstanceConfigProp, err := expandDataprocGdcServiceInstanceSparkServiceInstanceConfig(d.Get("spark_service_instance_config"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("spark_service_instance_config"); !tpgresource.IsEmptyValue(reflect.ValueOf(sparkServiceInstanceConfigProp)) && (ok || !reflect.DeepEqual(v, sparkServiceInstanceConfigProp)) { + obj["sparkServiceInstanceConfig"] = sparkServiceInstanceConfigProp + } + serviceAccountProp, err := expandDataprocGdcServiceInstanceServiceAccount(d.Get("service_account"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("service_account"); !tpgresource.IsEmptyValue(reflect.ValueOf(serviceAccountProp)) && (ok || !reflect.DeepEqual(v, serviceAccountProp)) { + obj["serviceAccount"] = serviceAccountProp + } + labelsProp, err := expandDataprocGdcServiceInstanceEffectiveLabels(d.Get("effective_labels"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("effective_labels"); !tpgresource.IsEmptyValue(reflect.ValueOf(labelsProp)) && (ok || !reflect.DeepEqual(v, labelsProp)) { + obj["labels"] = labelsProp + } + + url, err := tpgresource.ReplaceVars(d, config, "{{DataprocGdcBasePath}}projects/{{project}}/locations/{{location}}/serviceInstances?serviceInstanceId={{service_instance_id}}") + if err != nil { + return err + } + + log.Printf("[DEBUG] Creating new ServiceInstance: %#v", obj) + billingProject := "" + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return fmt.Errorf("Error fetching project for ServiceInstance: %s", err) + } + billingProject = project + + // err == nil indicates that the billing_project value was found + if bp, err := tpgresource.GetBillingProject(d, config); err == nil { + billingProject = bp + } + + headers := make(http.Header) + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "POST", + Project: billingProject, + RawURL: url, + UserAgent: userAgent, + Body: obj, + Timeout: d.Timeout(schema.TimeoutCreate), + Headers: headers, + }) + if err != nil { + return fmt.Errorf("Error creating ServiceInstance: %s", err) + } + + // Store the ID now + id, err := tpgresource.ReplaceVars(d, config, "projects/{{project}}/locations/{{location}}/serviceInstances/{{service_instance_id}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + err = DataprocGdcOperationWaitTime( + config, res, project, "Creating ServiceInstance", userAgent, + d.Timeout(schema.TimeoutCreate)) + + if err != nil { + // The resource didn't actually create + d.SetId("") + return fmt.Errorf("Error waiting to create ServiceInstance: %s", err) + } + + log.Printf("[DEBUG] Finished creating ServiceInstance %q: %#v", d.Id(), res) + + return resourceDataprocGdcServiceInstanceRead(d, meta) +} + +func resourceDataprocGdcServiceInstanceRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + url, err := tpgresource.ReplaceVars(d, config, "{{DataprocGdcBasePath}}projects/{{project}}/locations/{{location}}/serviceInstances/{{service_instance_id}}") + if err != nil { + return err + } + + billingProject := "" + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return fmt.Errorf("Error fetching project for ServiceInstance: %s", err) + } + billingProject = project + + // err == nil indicates that the billing_project value was found + if bp, err := tpgresource.GetBillingProject(d, config); err == nil { + billingProject = bp + } + + headers := make(http.Header) + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: userAgent, + Headers: headers, + }) + if err != nil { + return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("DataprocGdcServiceInstance %q", d.Id())) + } + + if err := d.Set("project", project); err != nil { + return fmt.Errorf("Error reading ServiceInstance: %s", err) + } + + if err := d.Set("gdce_cluster", flattenDataprocGdcServiceInstanceGdceCluster(res["gdceCluster"], d, config)); err != nil { + return fmt.Errorf("Error reading ServiceInstance: %s", err) + } + if err := d.Set("name", flattenDataprocGdcServiceInstanceName(res["name"], d, config)); err != nil { + return fmt.Errorf("Error reading ServiceInstance: %s", err) + } + if err := d.Set("uid", flattenDataprocGdcServiceInstanceUid(res["uid"], d, config)); err != nil { + return fmt.Errorf("Error reading ServiceInstance: %s", err) + } + if err := d.Set("display_name", flattenDataprocGdcServiceInstanceDisplayName(res["displayName"], d, config)); err != nil { + return fmt.Errorf("Error reading ServiceInstance: %s", err) + } + if err := d.Set("create_time", flattenDataprocGdcServiceInstanceCreateTime(res["createTime"], d, config)); err != nil { + return fmt.Errorf("Error reading ServiceInstance: %s", err) + } + if err := d.Set("update_time", flattenDataprocGdcServiceInstanceUpdateTime(res["updateTime"], d, config)); err != nil { + return fmt.Errorf("Error reading ServiceInstance: %s", err) + } + if err := d.Set("requested_state", flattenDataprocGdcServiceInstanceRequestedState(res["requestedState"], d, config)); err != nil { + return fmt.Errorf("Error reading ServiceInstance: %s", err) + } + if err := d.Set("state", flattenDataprocGdcServiceInstanceState(res["state"], d, config)); err != nil { + return fmt.Errorf("Error reading ServiceInstance: %s", err) + } + if err := d.Set("reconciling", flattenDataprocGdcServiceInstanceReconciling(res["reconciling"], d, config)); err != nil { + return fmt.Errorf("Error reading ServiceInstance: %s", err) + } + if err := d.Set("labels", flattenDataprocGdcServiceInstanceLabels(res["labels"], d, config)); err != nil { + return fmt.Errorf("Error reading ServiceInstance: %s", err) + } + if err := d.Set("spark_service_instance_config", flattenDataprocGdcServiceInstanceSparkServiceInstanceConfig(res["sparkServiceInstanceConfig"], d, config)); err != nil { + return fmt.Errorf("Error reading ServiceInstance: %s", err) + } + if err := d.Set("state_message", flattenDataprocGdcServiceInstanceStateMessage(res["stateMessage"], d, config)); err != nil { + return fmt.Errorf("Error reading ServiceInstance: %s", err) + } + if err := d.Set("service_account", flattenDataprocGdcServiceInstanceServiceAccount(res["serviceAccount"], d, config)); err != nil { + return fmt.Errorf("Error reading ServiceInstance: %s", err) + } + if err := d.Set("effective_service_account", flattenDataprocGdcServiceInstanceEffectiveServiceAccount(res["effectiveServiceAccount"], d, config)); err != nil { + return fmt.Errorf("Error reading ServiceInstance: %s", err) + } + if err := d.Set("terraform_labels", flattenDataprocGdcServiceInstanceTerraformLabels(res["labels"], d, config)); err != nil { + return fmt.Errorf("Error reading ServiceInstance: %s", err) + } + if err := d.Set("effective_labels", flattenDataprocGdcServiceInstanceEffectiveLabels(res["labels"], d, config)); err != nil { + return fmt.Errorf("Error reading ServiceInstance: %s", err) + } + + return nil +} + +func resourceDataprocGdcServiceInstanceUpdate(d *schema.ResourceData, meta interface{}) error { + // Only the root field "labels" and "terraform_labels" are mutable + return resourceDataprocGdcServiceInstanceRead(d, meta) +} + +func resourceDataprocGdcServiceInstanceDelete(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + billingProject := "" + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return fmt.Errorf("Error fetching project for ServiceInstance: %s", err) + } + billingProject = project + + url, err := tpgresource.ReplaceVars(d, config, "{{DataprocGdcBasePath}}projects/{{project}}/locations/{{location}}/serviceInstances/{{service_instance_id}}") + if err != nil { + return err + } + + var obj map[string]interface{} + + // err == nil indicates that the billing_project value was found + if bp, err := tpgresource.GetBillingProject(d, config); err == nil { + billingProject = bp + } + + headers := make(http.Header) + + log.Printf("[DEBUG] Deleting ServiceInstance %q", d.Id()) + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "DELETE", + Project: billingProject, + RawURL: url, + UserAgent: userAgent, + Body: obj, + Timeout: d.Timeout(schema.TimeoutDelete), + Headers: headers, + }) + if err != nil { + return transport_tpg.HandleNotFoundError(err, d, "ServiceInstance") + } + + err = DataprocGdcOperationWaitTime( + config, res, project, "Deleting ServiceInstance", userAgent, + d.Timeout(schema.TimeoutDelete)) + + if err != nil { + return err + } + + log.Printf("[DEBUG] Finished deleting ServiceInstance %q: %#v", d.Id(), res) + return nil +} + +func resourceDataprocGdcServiceInstanceImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { + config := meta.(*transport_tpg.Config) + if err := tpgresource.ParseImportId([]string{ + "^projects/(?P[^/]+)/locations/(?P[^/]+)/serviceInstances/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", + }, d, config); err != nil { + return nil, err + } + + // Replace import id for the resource id + id, err := tpgresource.ReplaceVars(d, config, "projects/{{project}}/locations/{{location}}/serviceInstances/{{service_instance_id}}") + if err != nil { + return nil, fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + return []*schema.ResourceData{d}, nil +} + +func flattenDataprocGdcServiceInstanceGdceCluster(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["gdce_cluster"] = + flattenDataprocGdcServiceInstanceGdceClusterGdceCluster(original["gdceCluster"], d, config) + return []interface{}{transformed} +} +func flattenDataprocGdcServiceInstanceGdceClusterGdceCluster(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcServiceInstanceName(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcServiceInstanceUid(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcServiceInstanceDisplayName(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcServiceInstanceCreateTime(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcServiceInstanceUpdateTime(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcServiceInstanceRequestedState(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcServiceInstanceState(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcServiceInstanceReconciling(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcServiceInstanceLabels(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + + transformed := make(map[string]interface{}) + if l, ok := d.GetOkExists("labels"); ok { + for k := range l.(map[string]interface{}) { + transformed[k] = v.(map[string]interface{})[k] + } + } + + return transformed +} + +func flattenDataprocGdcServiceInstanceSparkServiceInstanceConfig(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + return []interface{}{transformed} +} + +func flattenDataprocGdcServiceInstanceStateMessage(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcServiceInstanceServiceAccount(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcServiceInstanceEffectiveServiceAccount(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcServiceInstanceTerraformLabels(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + + transformed := make(map[string]interface{}) + if l, ok := d.GetOkExists("terraform_labels"); ok { + for k := range l.(map[string]interface{}) { + transformed[k] = v.(map[string]interface{})[k] + } + } + + return transformed +} + +func flattenDataprocGdcServiceInstanceEffectiveLabels(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func expandDataprocGdcServiceInstanceGdceCluster(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + l := v.([]interface{}) + if len(l) == 0 || l[0] == nil { + return nil, nil + } + raw := l[0] + original := raw.(map[string]interface{}) + transformed := make(map[string]interface{}) + + transformedGdceCluster, err := expandDataprocGdcServiceInstanceGdceClusterGdceCluster(original["gdce_cluster"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedGdceCluster); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["gdceCluster"] = transformedGdceCluster + } + + return transformed, nil +} + +func expandDataprocGdcServiceInstanceGdceClusterGdceCluster(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandDataprocGdcServiceInstanceDisplayName(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandDataprocGdcServiceInstanceSparkServiceInstanceConfig(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandDataprocGdcServiceInstanceServiceAccount(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandDataprocGdcServiceInstanceEffectiveLabels(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (map[string]string, error) { + if v == nil { + return map[string]string{}, nil + } + m := make(map[string]string) + for k, val := range v.(map[string]interface{}) { + m[k] = val.(string) + } + return m, nil +} diff --git a/google-beta/services/dataprocgdc/resource_dataproc_gdc_service_instance_generated_test.go b/google-beta/services/dataprocgdc/resource_dataproc_gdc_service_instance_generated_test.go new file mode 100644 index 0000000000..9df4910d91 --- /dev/null +++ b/google-beta/services/dataprocgdc/resource_dataproc_gdc_service_instance_generated_test.go @@ -0,0 +1,114 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** Type: MMv1 *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package dataprocgdc_test + +import ( + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" + + "github.com/hashicorp/terraform-provider-google-beta/google-beta/acctest" + "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" +) + +func TestAccDataprocGdcServiceInstance_dataprocgdcServiceinstanceExample(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project": "gdce-cluster-monitoring", + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataprocGdcServiceInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataprocGdcServiceInstance_dataprocgdcServiceinstanceExample(context), + }, + { + ResourceName: "google_dataproc_gdc_service_instance.service-instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "location", "service_instance_id", "terraform_labels"}, + }, + }, + }) +} + +func testAccDataprocGdcServiceInstance_dataprocgdcServiceinstanceExample(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_dataproc_gdc_service_instance" "service-instance" { + service_instance_id = "tf-test-tf-e2e-service-instance%{random_suffix}" + project = "%{project}" + location = "us-west2" + gdce_cluster { + gdce_cluster = "projects/gdce-cluster-monitoring/locations/us-west2/clusters/gdce-prism-prober-ord106" + } + display_name = "A service instance" + labels = { + "test-label": "label-value" + } + service_account = "dataprocgdc-cep-workflows@gdce-cluster-monitoring.iam.gserviceaccount.com" +} +`, context) +} + +func testAccCheckDataprocGdcServiceInstanceDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "google_dataproc_gdc_service_instance" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := acctest.GoogleProviderConfig(t) + + url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{DataprocGdcBasePath}}projects/{{project}}/locations/{{location}}/serviceInstances/{{service_instance_id}}") + if err != nil { + return err + } + + billingProject := "" + + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: config.UserAgent, + }) + if err == nil { + return fmt.Errorf("DataprocGdcServiceInstance still exists at %s", url) + } + } + + return nil + } +} diff --git a/google-beta/services/dataprocgdc/resource_dataproc_gdc_service_instance_sweeper.go b/google-beta/services/dataprocgdc/resource_dataproc_gdc_service_instance_sweeper.go new file mode 100644 index 0000000000..b8497134c0 --- /dev/null +++ b/google-beta/services/dataprocgdc/resource_dataproc_gdc_service_instance_sweeper.go @@ -0,0 +1,143 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** Type: MMv1 *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package dataprocgdc + +import ( + "context" + "log" + "strings" + "testing" + + "github.com/hashicorp/terraform-provider-google-beta/google-beta/envvar" + "github.com/hashicorp/terraform-provider-google-beta/google-beta/sweeper" + "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" +) + +func init() { + sweeper.AddTestSweepers("DataprocGdcServiceInstance", testSweepDataprocGdcServiceInstance) +} + +// At the time of writing, the CI only passes us-central1 as the region +func testSweepDataprocGdcServiceInstance(region string) error { + resourceName := "DataprocGdcServiceInstance" + log.Printf("[INFO][SWEEPER_LOG] Starting sweeper for %s", resourceName) + + config, err := sweeper.SharedConfigForRegion(region) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error getting shared config for region: %s", err) + return err + } + + err = config.LoadAndValidate(context.Background()) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error loading: %s", err) + return err + } + + t := &testing.T{} + billingId := envvar.GetTestBillingAccountFromEnv(t) + + // Setup variables to replace in list template + d := &tpgresource.ResourceDataMock{ + FieldsInSchema: map[string]interface{}{ + "project": config.Project, + "region": region, + "location": region, + "zone": "-", + "billing_account": billingId, + }, + } + + listTemplate := strings.Split("https://dataprocgdc.googleapis.com/v1/projects/{{project}}/locations/{{location}}/serviceInstances", "?")[0] + listUrl, err := tpgresource.ReplaceVars(d, config, listTemplate) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error preparing sweeper list url: %s", err) + return nil + } + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: config.Project, + RawURL: listUrl, + UserAgent: config.UserAgent, + }) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] Error in response from request %s: %s", listUrl, err) + return nil + } + + resourceList, ok := res["serviceInstances"] + if !ok { + log.Printf("[INFO][SWEEPER_LOG] Nothing found in response.") + return nil + } + + rl := resourceList.([]interface{}) + + log.Printf("[INFO][SWEEPER_LOG] Found %d items in %s list response.", len(rl), resourceName) + // Keep count of items that aren't sweepable for logging. + nonPrefixCount := 0 + for _, ri := range rl { + obj := ri.(map[string]interface{}) + var name string + // Id detected in the delete URL, attempt to use id. + if obj["id"] != nil { + name = tpgresource.GetResourceNameFromSelfLink(obj["id"].(string)) + } else if obj["name"] != nil { + name = tpgresource.GetResourceNameFromSelfLink(obj["name"].(string)) + } else { + log.Printf("[INFO][SWEEPER_LOG] %s resource name and id were nil", resourceName) + return nil + } + // Skip resources that shouldn't be sweeped + if !sweeper.IsSweepableTestResource(name) { + nonPrefixCount++ + continue + } + + deleteTemplate := "https://dataprocgdc.googleapis.com/v1/projects/{{project}}/locations/{{location}}/serviceInstances/{{service_instance_id}}" + deleteUrl, err := tpgresource.ReplaceVars(d, config, deleteTemplate) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error preparing delete url: %s", err) + return nil + } + deleteUrl = deleteUrl + name + + // Don't wait on operations as we may have a lot to delete + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "DELETE", + Project: config.Project, + RawURL: deleteUrl, + UserAgent: config.UserAgent, + }) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] Error deleting for url %s : %s", deleteUrl, err) + } else { + log.Printf("[INFO][SWEEPER_LOG] Sent delete request for %s resource: %s", resourceName, name) + } + } + + if nonPrefixCount > 0 { + log.Printf("[INFO][SWEEPER_LOG] %d items were non-sweepable and skipped.", nonPrefixCount) + } + + return nil +} diff --git a/google-beta/sweeper/gcp_sweeper_test.go b/google-beta/sweeper/gcp_sweeper_test.go index b459de9968..396acbccbd 100644 --- a/google-beta/sweeper/gcp_sweeper_test.go +++ b/google-beta/sweeper/gcp_sweeper_test.go @@ -56,6 +56,7 @@ import ( _ "github.com/hashicorp/terraform-provider-google-beta/google-beta/services/datapipeline" _ "github.com/hashicorp/terraform-provider-google-beta/google-beta/services/dataplex" _ "github.com/hashicorp/terraform-provider-google-beta/google-beta/services/dataproc" + _ "github.com/hashicorp/terraform-provider-google-beta/google-beta/services/dataprocgdc" _ "github.com/hashicorp/terraform-provider-google-beta/google-beta/services/dataprocmetastore" _ "github.com/hashicorp/terraform-provider-google-beta/google-beta/services/datastream" _ "github.com/hashicorp/terraform-provider-google-beta/google-beta/services/deploymentmanager" diff --git a/google-beta/transport/config.go b/google-beta/transport/config.go index 215f380d2f..15fbe73464 100644 --- a/google-beta/transport/config.go +++ b/google-beta/transport/config.go @@ -239,6 +239,7 @@ type Config struct { DataPipelineBasePath string DataplexBasePath string DataprocBasePath string + DataprocGdcBasePath string DataprocMetastoreBasePath string DatastreamBasePath string DeploymentManagerBasePath string @@ -398,6 +399,7 @@ const DataLossPreventionBasePathKey = "DataLossPrevention" const DataPipelineBasePathKey = "DataPipeline" const DataplexBasePathKey = "Dataplex" const DataprocBasePathKey = "Dataproc" +const DataprocGdcBasePathKey = "DataprocGdc" const DataprocMetastoreBasePathKey = "DataprocMetastore" const DatastreamBasePathKey = "Datastream" const DeploymentManagerBasePathKey = "DeploymentManager" @@ -551,6 +553,7 @@ var DefaultBasePaths = map[string]string{ DataPipelineBasePathKey: "https://datapipelines.googleapis.com/v1/", DataplexBasePathKey: "https://dataplex.googleapis.com/v1/", DataprocBasePathKey: "https://dataproc.googleapis.com/v1/", + DataprocGdcBasePathKey: "https://dataprocgdc.googleapis.com/v1/", DataprocMetastoreBasePathKey: "https://metastore.googleapis.com/v1beta/", DatastreamBasePathKey: "https://datastream.googleapis.com/v1/", DeploymentManagerBasePathKey: "https://www.googleapis.com/deploymentmanager/v2/", @@ -975,6 +978,11 @@ func SetEndpointDefaults(d *schema.ResourceData) error { "GOOGLE_DATAPROC_CUSTOM_ENDPOINT", }, DefaultBasePaths[DataprocBasePathKey])) } + if d.Get("dataproc_gdc_custom_endpoint") == "" { + d.Set("dataproc_gdc_custom_endpoint", MultiEnvDefault([]string{ + "GOOGLE_DATAPROC_GDC_CUSTOM_ENDPOINT", + }, DefaultBasePaths[DataprocGdcBasePathKey])) + } if d.Get("dataproc_metastore_custom_endpoint") == "" { d.Set("dataproc_metastore_custom_endpoint", MultiEnvDefault([]string{ "GOOGLE_DATAPROC_METASTORE_CUSTOM_ENDPOINT", @@ -2402,6 +2410,7 @@ func ConfigureBasePaths(c *Config) { c.DataPipelineBasePath = DefaultBasePaths[DataPipelineBasePathKey] c.DataplexBasePath = DefaultBasePaths[DataplexBasePathKey] c.DataprocBasePath = DefaultBasePaths[DataprocBasePathKey] + c.DataprocGdcBasePath = DefaultBasePaths[DataprocGdcBasePathKey] c.DataprocMetastoreBasePath = DefaultBasePaths[DataprocMetastoreBasePathKey] c.DatastreamBasePath = DefaultBasePaths[DatastreamBasePathKey] c.DeploymentManagerBasePath = DefaultBasePaths[DeploymentManagerBasePathKey] diff --git a/website/docs/r/dataproc_gdc_service_instance.html.markdown b/website/docs/r/dataproc_gdc_service_instance.html.markdown new file mode 100644 index 0000000000..24a9bb720b --- /dev/null +++ b/website/docs/r/dataproc_gdc_service_instance.html.markdown @@ -0,0 +1,202 @@ +--- +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** Type: MMv1 *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in +# .github/CONTRIBUTING.md. +# +# ---------------------------------------------------------------------------- +subcategory: "Dataproc on GDC" +description: |- + A service instance is an instance of the Dataproc operator running on a GDC cluster. +--- + +# google_dataproc_gdc_service_instance + +A service instance is an instance of the Dataproc operator running on a GDC cluster. + + +To get more information about ServiceInstance, see: + +* [API documentation](https://cloud.google.com/dataproc-gdc/docs/reference/rest/v1/projects.locations.serviceInstances) +* How-to Guides + * [Dataproc Intro](https://cloud.google.com/dataproc/) + + +## Example Usage - Dataprocgdc Serviceinstance + + +```hcl +resource "google_dataproc_gdc_service_instance" "service-instance" { + service_instance_id = "tf-e2e-service-instance" + project = "my-project" + location = "us-west2" + gdce_cluster { + gdce_cluster = "projects/gdce-cluster-monitoring/locations/us-west2/clusters/gdce-prism-prober-ord106" + } + display_name = "A service instance" + labels = { + "test-label": "label-value" + } + service_account = "dataprocgdc-cep-workflows@gdce-cluster-monitoring.iam.gserviceaccount.com" +} +``` + +## Argument Reference + +The following arguments are supported: + + +* `location` - + (Required) + Location of the resource. + +* `service_instance_id` - + (Required) + Id of the service instance. + + +- - - + + +* `gdce_cluster` - + (Optional) + Gdce cluster information. + Structure is [documented below](#nested_gdce_cluster). + +* `display_name` - + (Optional) + User-provided human-readable name to be used in user interfaces. + +* `labels` - + (Optional) + The labels to associate with this service instance. Labels may be used for filtering and billing tracking. + **Note**: This field is non-authoritative, and will only manage the labels present in your configuration. + Please refer to the field `effective_labels` for all of the labels present on the resource. + +* `spark_service_instance_config` - + (Optional) + Spark-specific service instance configuration. + +* `service_account` - + (Optional) + Requested service account to associate with ServiceInstance. + +* `project` - (Optional) The ID of the project in which the resource belongs. + If it is not provided, the provider project is used. + + +The `gdce_cluster` block supports: + +* `gdce_cluster` - + (Required) + Gdce cluster resource id. + +## Attributes Reference + +In addition to the arguments listed above, the following computed attributes are exported: + +* `id` - an identifier for the resource with format `projects/{{project}}/locations/{{location}}/serviceInstances/{{service_instance_id}}` + +* `name` - + Identifier. The name of the service instance. + +* `uid` - + System generated unique identifier for this service instance, formatted as UUID4. + +* `create_time` - + The timestamp when the resource was created. + +* `update_time` - + The timestamp when the resource was most recently updated. + +* `requested_state` - + The intended state to which the service instance is reconciling. Possible values: + * `CREATING` + * `ACTIVE` + * `DISCONNECTED` + * `DELETING` + * `STOPPING` + * `STOPPED` + * `STARTING` + * `UPDATING` + * `FAILED` + +* `state` - + The current state. Possible values: + * `CREATING` + * `ACTIVE` + * `DISCONNECTED` + * `DELETING` + * `STOPPING` + * `STOPPED` + * `STARTING` + * `UPDATING` + * `FAILED` + +* `reconciling` - + Whether the service instance is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated. + +* `state_message` - + A message explaining the current state. + +* `effective_service_account` - + Effective service account associated with ServiceInstance. This will be the service_account if specified. Otherwise, it will be an automatically created per-resource P4SA that also automatically has Fleet Workload. Identity bindings applied. + +* `terraform_labels` - + The combination of labels configured directly on the resource + and default labels configured on the provider. + +* `effective_labels` - + All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Terraform, other clients and services. + + +## Timeouts + +This resource provides the following +[Timeouts](https://developer.hashicorp.com/terraform/plugin/sdkv2/resources/retries-and-customizable-timeouts) configuration options: + +- `create` - Default is 20 minutes. +- `update` - Default is 20 minutes. +- `delete` - Default is 20 minutes. + +## Import + + +ServiceInstance can be imported using any of these accepted formats: + +* `projects/{{project}}/locations/{{location}}/serviceInstances/{{service_instance_id}}` +* `{{project}}/{{location}}/{{service_instance_id}}` +* `{{location}}/{{service_instance_id}}` + + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ServiceInstance using one of the formats above. For example: + +```tf +import { + id = "projects/{{project}}/locations/{{location}}/serviceInstances/{{service_instance_id}}" + to = google_dataproc_gdc_service_instance.default +} +``` + +When using the [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import), ServiceInstance can be imported using one of the formats above. For example: + +``` +$ terraform import google_dataproc_gdc_service_instance.default projects/{{project}}/locations/{{location}}/serviceInstances/{{service_instance_id}} +$ terraform import google_dataproc_gdc_service_instance.default {{project}}/{{location}}/{{service_instance_id}} +$ terraform import google_dataproc_gdc_service_instance.default {{location}}/{{service_instance_id}} +``` + +## User Project Overrides + +This resource supports [User Project Overrides](https://registry.terraform.io/providers/hashicorp/google/latest/docs/guides/provider_reference#user_project_override).