-
Notifications
You must be signed in to change notification settings - Fork 9.6k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Google Cloud: BigQuery DataSet and Table resources #3764
Changes from all commits
4bef567
f440a28
191b7fc
00f0c1b
e6a59d8
9132344
a31f5a5
aae7410
e9b39d1
27b8418
7850f11
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,232 @@ | ||
package google | ||
|
||
import ( | ||
"fmt" | ||
"github.com/hashicorp/terraform/helper/schema" | ||
"google.golang.org/api/bigquery/v2" | ||
"google.golang.org/api/googleapi" | ||
) | ||
|
||
func resourceBigQueryDataset() *schema.Resource { | ||
return &schema.Resource{ | ||
Create: resourceBigQueryDatasetCreate, | ||
Read: resourceBigQueryDatasetRead, | ||
Update: resourceBigQueryDatasetUpdate, | ||
Delete: resourceBigQueryDatasetDelete, | ||
|
||
Schema: map[string]*schema.Schema{ | ||
"datasetId": &schema.Schema{ | ||
Type: schema.TypeString, | ||
Required: true, | ||
ForceNew: true, | ||
}, | ||
|
||
"friendlyName": &schema.Schema{ | ||
Type: schema.TypeString, | ||
Optional: true, | ||
}, | ||
|
||
"description": &schema.Schema{ | ||
Type: schema.TypeString, | ||
Optional: true, | ||
}, | ||
|
||
"location": &schema.Schema{ | ||
Type: schema.TypeString, | ||
Optional: true, | ||
}, | ||
|
||
"defaultTableExpirationMs": &schema.Schema{ | ||
Type: schema.TypeInt, | ||
Optional: true, | ||
}, | ||
|
||
"access": &schema.Schema{ | ||
Type: schema.TypeList, | ||
Optional: true, | ||
Elem: &schema.Resource{ | ||
Schema: map[string]*schema.Schema{ | ||
"role": &schema.Schema{ | ||
Type: schema.TypeString, | ||
Optional: true, | ||
}, | ||
|
||
"userByEmail": &schema.Schema{ | ||
Type: schema.TypeString, | ||
Optional: true, | ||
}, | ||
|
||
"groupByEmail": &schema.Schema{ | ||
Type: schema.TypeString, | ||
Optional: true, | ||
}, | ||
|
||
"domain": &schema.Schema{ | ||
Type: schema.TypeString, | ||
Optional: true, | ||
}, | ||
|
||
"specialGroup": &schema.Schema{ | ||
Type: schema.TypeString, | ||
Optional: true, | ||
}, | ||
|
||
"view": &schema.Schema{ | ||
Type: schema.TypeList, | ||
Optional: true, | ||
Elem: &schema.Resource{ | ||
Schema: map[string]*schema.Schema{ | ||
"projectId": &schema.Schema{ | ||
Type: schema.TypeString, | ||
Optional: true, | ||
}, | ||
|
||
"datasetId": &schema.Schema{ | ||
Type: schema.TypeString, | ||
Optional: true, | ||
}, | ||
|
||
"tableId": &schema.Schema{ | ||
Type: schema.TypeString, | ||
Optional: true, | ||
}, | ||
}, | ||
}, | ||
}, | ||
}, | ||
}, | ||
}, | ||
|
||
"self_link": &schema.Schema{ | ||
Type: schema.TypeString, | ||
Computed: true, | ||
}, | ||
|
||
"id": &schema.Schema{ | ||
Type: schema.TypeString, | ||
Computed: true, | ||
}, | ||
|
||
"lastModifiedTime": &schema.Schema{ | ||
Type: schema.TypeInt, | ||
Computed: true, | ||
}, | ||
}, | ||
} | ||
} | ||
|
||
func resourceBigQueryDatasetCreate(d *schema.ResourceData, meta interface{}) error { | ||
config := meta.(*Config) | ||
|
||
datasetRef := &bigquery.DatasetReference{DatasetId: d.Get("datasetId").(string), ProjectId: config.Project} | ||
|
||
dataset := &bigquery.Dataset{DatasetReference: datasetRef} | ||
|
||
if v, ok := d.GetOk("friendlyName"); ok { | ||
dataset.FriendlyName = v.(string) | ||
} | ||
|
||
if v, ok := d.GetOk("description"); ok { | ||
dataset.Description = v.(string) | ||
} | ||
|
||
if v, ok := d.GetOk("location"); ok { | ||
dataset.Location = v.(string) | ||
} | ||
|
||
if v, ok := d.GetOk("defaultTableExpirationMs"); ok { | ||
dataset.DefaultTableExpirationMs = v.(int64) | ||
} | ||
|
||
if v, ok := d.GetOk("access"); ok { | ||
accessList := make([]*bigquery.DatasetAccess, 0) | ||
for _, access_interface := range v.([]interface{}) { | ||
access_parsed := &bigquery.DatasetAccess{} | ||
access_raw := access_interface.(map[string]interface{}) | ||
if role, ok := access_raw["role"]; ok { | ||
access_parsed.Role = role.(string) | ||
} | ||
if userByEmail, ok := access_raw["userByEmail"]; ok { | ||
access_parsed.UserByEmail = userByEmail.(string) | ||
} | ||
if groupByEmail, ok := access_raw["groupByEmail"]; ok { | ||
access_parsed.GroupByEmail = groupByEmail.(string) | ||
} | ||
if domain, ok := access_raw["domain"]; ok { | ||
access_parsed.Domain = domain.(string) | ||
} | ||
if specialGroup, ok := access_raw["specialGroup"]; ok { | ||
access_parsed.SpecialGroup = specialGroup.(string) | ||
} | ||
if view, ok := access_raw["view"]; ok { | ||
view_raw := view.([]interface{}) | ||
if len(view_raw) > 1 { | ||
fmt.Errorf("There are more then one view records in a single access record, this is not valid.") | ||
} | ||
view_parsed := &bigquery.TableReference{} | ||
view_zero := view_raw[0].(map[string]interface{}) | ||
if projectId, ok := view_zero["projectId"]; ok { | ||
view_parsed.ProjectId = projectId.(string) | ||
} | ||
if datasetId, ok := view_zero["datasetId"]; ok { | ||
view_parsed.DatasetId = datasetId.(string) | ||
} | ||
if tableId, ok := view_zero["tableId"]; ok { | ||
view_parsed.TableId = tableId.(string) | ||
} | ||
access_parsed.View = view_parsed | ||
} | ||
|
||
accessList = append(accessList, access_parsed) | ||
} | ||
|
||
dataset.Access = accessList | ||
} | ||
|
||
call := config.clientBigQuery.Datasets.Insert(config.Project, dataset) | ||
_, err := call.Do() | ||
if err != nil { | ||
return err | ||
} | ||
|
||
return resourceBigQueryDatasetRead(d, meta) | ||
} | ||
|
||
func resourceBigQueryDatasetRead(d *schema.ResourceData, meta interface{}) error { | ||
config := meta.(*Config) | ||
|
||
call := config.clientBigQuery.Datasets.Get(config.Project, d.Get("datasetId").(string)) | ||
res, err := call.Do() | ||
if err != nil { | ||
if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 404 { | ||
// The resource doesn't exist anymore | ||
d.SetId("") | ||
|
||
return nil | ||
} | ||
return fmt.Errorf("Failed to read bigquery dataset %s with err: %q", d.Get("datasetId").(string), err) | ||
} | ||
|
||
d.SetId(res.Id) | ||
d.Set("self_link", res.SelfLink) | ||
d.Set("lastModifiedTime", res.LastModifiedTime) | ||
d.Set("id", res.Id) | ||
return nil | ||
} | ||
|
||
func resourceBigQueryDatasetUpdate(d *schema.ResourceData, meta interface{}) error { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. A note about the updates, the Access Controls should be managed like the Instances I talk about here. Let me know if my explanation there is unclear. Also, you mentioned patch semantics. They can get a little tricky, since There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. @lwander thanks for the link to handle Access Controls gracefully. I will endeavor to do that. I'm digging into the patch for updates now. Seems sane if a bit verbose to iterate through all optional HasChanges and then GetChange and only update the local copy of things tf has known about in the recent past, right? I think the process would be:
I can see myself going bonkers typing all of that but seems straightforward. Am I missing anything, big picture? And I'm not seeing any tests of update functions, is there any you'd like me to include? thanks for your patience with me being so slow on this. |
||
return nil | ||
} | ||
|
||
func resourceBigQueryDatasetDelete(d *schema.ResourceData, meta interface{}) error { | ||
config := meta.(*Config) | ||
|
||
call := config.clientBigQuery.Datasets.Delete(config.Project, d.Get("datasetId").(string)) | ||
err := call.Do() | ||
if err != nil { | ||
return err | ||
} | ||
|
||
d.SetId("") | ||
return nil | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,69 @@ | ||
package google | ||
|
||
import ( | ||
"fmt" | ||
"testing" | ||
|
||
"github.com/hashicorp/terraform/helper/resource" | ||
"github.com/hashicorp/terraform/terraform" | ||
) | ||
|
||
func TestAccBigqueryDatasetCreate(t *testing.T) { | ||
|
||
resource.Test(t, resource.TestCase{ | ||
PreCheck: func() { testAccPreCheck(t) }, | ||
Providers: testAccProviders, | ||
CheckDestroy: testAccCheckBigQueryDatasetDestroy, | ||
Steps: []resource.TestStep{ | ||
resource.TestStep{ | ||
Config: testAccBigQueryDataset, | ||
Check: resource.ComposeTestCheckFunc( | ||
testAccBigQueryDatasetExists( | ||
"google_bigquery_dataset.foobar"), | ||
), | ||
}, | ||
}, | ||
}) | ||
} | ||
|
||
func testAccCheckBigQueryDatasetDestroy(s *terraform.State) error { | ||
for _, rs := range s.RootModule().Resources { | ||
if rs.Type != "google_bigquery_dataset" { | ||
continue | ||
} | ||
|
||
config := testAccProvider.Meta().(*Config) | ||
_, err := config.clientBigQuery.Datasets.Get(config.Project, rs.Primary.Attributes["datasetId"]).Do() | ||
if err != nil { | ||
fmt.Errorf("Dataset still present") | ||
} | ||
} | ||
|
||
return nil | ||
} | ||
|
||
func testAccBigQueryDatasetExists(n string) resource.TestCheckFunc { | ||
return func(s *terraform.State) error { | ||
rs, ok := s.RootModule().Resources[n] | ||
if !ok { | ||
return fmt.Errorf("Not found: %s", n) | ||
} | ||
|
||
if rs.Primary.ID == "" { | ||
return fmt.Errorf("No ID is set") | ||
} | ||
config := testAccProvider.Meta().(*Config) | ||
_, err := config.clientBigQuery.Datasets.Get(config.Project, rs.Primary.Attributes["datasetId"]).Do() | ||
if err != nil { | ||
fmt.Errorf("BigQuery Dataset not present") | ||
} | ||
|
||
return nil | ||
} | ||
} | ||
|
||
const testAccBigQueryDataset = ` | ||
resource "google_bigquery_dataset" "foobar" { | ||
datasetId = "foobar" | ||
friendlyName = "hi" | ||
}` |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
This gets a bit tricky, since we implemented ACLs for GCS a little differently, but that was only possible because the API is different. The idea there was, you just provide a list of role entity pairs like this. The GCS API basically squashed
userByEmail
,groupByEmail
, anddomain
all into one field, so all we had to do was have the user concatenate the contents of that field with therole
, and that would be the access control object. Second, there was noview
type object attached with each GCS ACL, since the ACL is an entirely separate resource for GCS objects and buckets. Basically, to match the GCS Terraform interface, we'd really have to contort the BigQuery API, which is a shame.I'd say what you have here is best, I'm just leaving this comment in case people have questions about this decision in the future.
@sparkprime, what do you think?