Skip to content

Commit

Permalink
Merge pull request #3244 from terraform-providers/f-datafactory-pipeline
Browse files Browse the repository at this point in the history
New Resource: `azurerm_data_factory_pipeline`
  • Loading branch information
mbfrahry authored Apr 16, 2019
2 parents 8243be1 + 72f2633 commit 3f7bbac
Show file tree
Hide file tree
Showing 6 changed files with 521 additions and 0 deletions.
5 changes: 5 additions & 0 deletions azurerm/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -227,6 +227,7 @@ type ArmClient struct {
sqlVirtualNetworkRulesClient sql.VirtualNetworkRulesClient

// Data Factory
dataFactoryPipelineClient datafactory.PipelinesClient
dataFactoryClient datafactory.FactoriesClient
dataFactoryDatasetClient datafactory.DatasetsClient
dataFactoryLinkedServiceClient datafactory.LinkedServicesClient
Expand Down Expand Up @@ -908,6 +909,10 @@ func (c *ArmClient) registerDataFactoryClients(endpoint, subscriptionId string,
dataFactoryLinkedServiceClient := datafactory.NewLinkedServicesClientWithBaseURI(endpoint, subscriptionId)
c.configureClient(&dataFactoryLinkedServiceClient.Client, auth)
c.dataFactoryLinkedServiceClient = dataFactoryLinkedServiceClient

dataFactoryPipelineClient := datafactory.NewPipelinesClientWithBaseURI(endpoint, subscriptionId)
c.configureClient(&dataFactoryPipelineClient.Client, auth)
c.dataFactoryPipelineClient = dataFactoryPipelineClient
}

func (c *ArmClient) registerDataLakeStoreClients(endpoint, subscriptionId string, auth autorest.Authorizer) {
Expand Down
31 changes: 31 additions & 0 deletions azurerm/data_factory.go
Original file line number Diff line number Diff line change
Expand Up @@ -52,3 +52,34 @@ func flattenDataFactoryAnnotations(input *[]interface{}) []string {
}
return annotations
}

func expandDataFactoryVariables(input map[string]interface{}) map[string]*datafactory.VariableSpecification {
output := make(map[string]*datafactory.VariableSpecification)

for k, v := range input {
output[k] = &datafactory.VariableSpecification{
Type: datafactory.VariableTypeString,
DefaultValue: v.(string),
}
}

return output
}

func flattenDataFactoryVariables(input map[string]*datafactory.VariableSpecification) map[string]interface{} {
output := make(map[string]interface{})

for k, v := range input {
if v != nil {
// we only support string parameters at this time
val, ok := v.DefaultValue.(string)
if !ok {
log.Printf("[DEBUG] Skipping variable %q since it's not a string", k)
}

output[k] = val
}
}

return output
}
1 change: 1 addition & 0 deletions azurerm/provider.go
Original file line number Diff line number Diff line change
Expand Up @@ -223,6 +223,7 @@ func Provider() terraform.ResourceProvider {
"azurerm_data_factory": resourceArmDataFactory(),
"azurerm_data_factory_dataset_sql_server_table": resourceArmDataFactoryDatasetSQLServerTable(),
"azurerm_data_factory_linked_service_sql_server": resourceArmDataFactoryLinkedServiceSQLServer(),
"azurerm_data_factory_pipeline": resourceArmDataFactoryPipeline(),
"azurerm_data_lake_analytics_account": resourceArmDataLakeAnalyticsAccount(),
"azurerm_data_lake_analytics_firewall_rule": resourceArmDataLakeAnalyticsFirewallRule(),
"azurerm_data_lake_store_file": resourceArmDataLakeStoreFile(),
Expand Down
205 changes: 205 additions & 0 deletions azurerm/resource_arm_data_factory_pipeline.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,205 @@
package azurerm

import (
"fmt"
"log"
"regexp"

"github.com/Azure/azure-sdk-for-go/services/datafactory/mgmt/2018-06-01/datafactory"
"github.com/hashicorp/terraform/helper/schema"
"github.com/hashicorp/terraform/helper/validation"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils"
)

func resourceArmDataFactoryPipeline() *schema.Resource {
return &schema.Resource{
Create: resourceArmDataFactoryPipelineCreateUpdate,
Read: resourceArmDataFactoryPipelineRead,
Update: resourceArmDataFactoryPipelineCreateUpdate,
Delete: resourceArmDataFactoryPipelineDelete,
Importer: &schema.ResourceImporter{
State: schema.ImportStatePassthrough,
},

Schema: map[string]*schema.Schema{
"name": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
ValidateFunc: validateAzureRMDataFactoryPipelineName,
},

"data_factory_name": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
ValidateFunc: validation.StringMatch(
regexp.MustCompile(`^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$`),
`Invalid data_factory_name, see https://docs.microsoft.com/en-us/azure/data-factory/naming-rules`,
),
},

"resource_group_name": resourceGroupNameSchema(),

"parameters": {
Type: schema.TypeMap,
Optional: true,
},

"variables": {
Type: schema.TypeMap,
Optional: true,
},

"description": {
Type: schema.TypeString,
Optional: true,
},

"annotations": {
Type: schema.TypeList,
Optional: true,
Elem: &schema.Schema{
Type: schema.TypeString,
},
},
},
}
}

func resourceArmDataFactoryPipelineCreateUpdate(d *schema.ResourceData, meta interface{}) error {
client := meta.(*ArmClient).dataFactoryPipelineClient
ctx := meta.(*ArmClient).StopContext

log.Printf("[INFO] preparing arguments for Data Factory Pipeline creation.")

resourceGroupName := d.Get("resource_group_name").(string)
name := d.Get("name").(string)
dataFactoryName := d.Get("data_factory_name").(string)

if requireResourcesToBeImported && d.IsNewResource() {
existing, err := client.Get(ctx, resourceGroupName, dataFactoryName, name, "")
if err != nil {
if !utils.ResponseWasNotFound(existing.Response) {
return fmt.Errorf("Error checking for presence of existing Data Factory Pipeline %q (Resource Group %q / Data Factory %q): %s", name, resourceGroupName, dataFactoryName, err)
}
}

if existing.ID != nil && *existing.ID != "" {
return tf.ImportAsExistsError("azurerm_data_factory_pipeline", *existing.ID)
}
}

description := d.Get("description").(string)
pipeline := &datafactory.Pipeline{
Parameters: expandDataFactoryParameters(d.Get("parameters").(map[string]interface{})),
Variables: expandDataFactoryVariables(d.Get("variables").(map[string]interface{})),
Description: &description,
}

if v, ok := d.GetOk("annotations"); ok {
annotations := v.([]interface{})
pipeline.Annotations = &annotations
} else {
annotations := make([]interface{}, 0)
pipeline.Annotations = &annotations
}

config := datafactory.PipelineResource{
Pipeline: pipeline,
}

if _, err := client.CreateOrUpdate(ctx, resourceGroupName, dataFactoryName, name, config, ""); err != nil {
return fmt.Errorf("Error creating Data Factory Pipeline %q (Resource Group %q / Data Factory %q): %+v", name, resourceGroupName, dataFactoryName, err)
}

read, err := client.Get(ctx, resourceGroupName, dataFactoryName, name, "")
if err != nil {
return fmt.Errorf("Error retrieving Data Factory Pipeline %q (Resource Group %q / Data Factory %q): %+v", name, resourceGroupName, dataFactoryName, err)
}

if read.ID == nil {
return fmt.Errorf("Cannot read Data Factory Pipeline %q (Resource Group %q / Data Factory %q) ID", name, resourceGroupName, dataFactoryName)
}

d.SetId(*read.ID)

return resourceArmDataFactoryPipelineRead(d, meta)
}

func resourceArmDataFactoryPipelineRead(d *schema.ResourceData, meta interface{}) error {
client := meta.(*ArmClient).dataFactoryPipelineClient
ctx := meta.(*ArmClient).StopContext

id, err := parseAzureResourceID(d.Id())
if err != nil {
return err
}
dataFactoryName := id.Path["factories"]
name := id.Path["pipelines"]

resp, err := client.Get(ctx, id.ResourceGroup, dataFactoryName, name, "")
if err != nil {
if utils.ResponseWasNotFound(resp.Response) {
d.SetId("")
log.Printf("[DEBUG] Data Factory Pipeline %q was not found in Resource Group %q - removing from state!", name, id.ResourceGroup)
return nil
}
return fmt.Errorf("Error reading the state of Data Factory Pipeline %q: %+v", name, err)
}

d.Set("name", resp.Name)
d.Set("resource_group_name", id.ResourceGroup)
d.Set("data_factory_name", dataFactoryName)

if props := resp.Pipeline; props != nil {
d.Set("description", props.Description)

parameters := flattenDataFactoryParameters(props.Parameters)
if err := d.Set("parameters", parameters); err != nil {
return fmt.Errorf("Error setting `parameters`: %+v", err)
}

annotations := flattenDataFactoryAnnotations(props.Annotations)
if err := d.Set("annotations", annotations); err != nil {
return fmt.Errorf("Error setting `annotations`: %+v", err)
}

variables := flattenDataFactoryVariables(props.Variables)
if err := d.Set("variables", variables); err != nil {
return fmt.Errorf("Error setting `variables`: %+v", err)
}

}

return nil
}

func resourceArmDataFactoryPipelineDelete(d *schema.ResourceData, meta interface{}) error {
client := meta.(*ArmClient).dataFactoryPipelineClient
ctx := meta.(*ArmClient).StopContext

id, err := parseAzureResourceID(d.Id())
if err != nil {
return err
}
dataFactoryName := id.Path["factories"]
name := id.Path["pipelines"]
resourceGroupName := id.ResourceGroup

if _, err = client.Delete(ctx, resourceGroupName, dataFactoryName, name); err != nil {
return fmt.Errorf("Error deleting Data Factory Pipeline %q (Resource Group %q / Data Factory %q): %+v", name, resourceGroupName, dataFactoryName, err)
}

return nil
}

func validateAzureRMDataFactoryPipelineName(v interface{}, k string) (warnings []string, errors []error) {
value := v.(string)
if regexp.MustCompile(`^[.+?/<>*%&:\\]+$`).MatchString(value) {
errors = append(errors, fmt.Errorf("any of '.', '+', '?', '/', '<', '>', '*', '%%', '&', ':', '\\', are not allowed in %q: %q", k, value))
}

return warnings, errors
}
Loading

0 comments on commit 3f7bbac

Please sign in to comment.