Skip to content

Commit

Permalink
Start of pipeline resource
Browse files Browse the repository at this point in the history
  • Loading branch information
mbfrahry committed Apr 1, 2019
1 parent 50a6b55 commit 06bd322
Show file tree
Hide file tree
Showing 4 changed files with 359 additions and 1 deletion.
7 changes: 6 additions & 1 deletion azurerm/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,8 @@ type ArmClient struct {
sqlVirtualNetworkRulesClient sql.VirtualNetworkRulesClient

// Data Factory
dataFactoryClient datafactory.FactoriesClient
dataFactoryClient datafactory.FactoriesClient
dataFactoryPipelineClient datafactory.PipelinesClient

// Data Lake Store
dataLakeStoreAccountClient storeAccount.AccountsClient
Expand Down Expand Up @@ -817,6 +818,10 @@ func (c *ArmClient) registerDataFactoryClients(endpoint, subscriptionId string,
dataFactoryClient := datafactory.NewFactoriesClientWithBaseURI(endpoint, subscriptionId)
c.configureClient(&dataFactoryClient.Client, auth)
c.dataFactoryClient = dataFactoryClient

dataFactoryPipelineClient := datafactory.NewPipelinesClientWithBaseURI(endpoint, subscriptionId)
c.configureClient(&dataFactoryPipelineClient.Client, auth)
c.dataFactoryPipelineClient = dataFactoryPipelineClient
}

func (c *ArmClient) registerDataLakeStoreClients(endpoint, subscriptionId string, auth autorest.Authorizer) {
Expand Down
1 change: 1 addition & 0 deletions azurerm/provider.go
Original file line number Diff line number Diff line change
Expand Up @@ -206,6 +206,7 @@ func Provider() terraform.ResourceProvider {
"azurerm_container_service": resourceArmContainerService(),
"azurerm_cosmosdb_account": resourceArmCosmosDBAccount(),
"azurerm_data_factory_v2": resourceArmDataFactoryV2(),
"azurerm_data_factory_pipeline": resourceArmDataFactoryPipeline(),
"azurerm_data_lake_analytics_account": resourceArmDataLakeAnalyticsAccount(),
"azurerm_data_lake_analytics_firewall_rule": resourceArmDataLakeAnalyticsFirewallRule(),
"azurerm_data_lake_store_file": resourceArmDataLakeStoreFile(),
Expand Down
203 changes: 203 additions & 0 deletions azurerm/resource_arm_data_factory_pipeline.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,203 @@
package azurerm

import (
"fmt"
"log"
"regexp"

"github.com/Azure/azure-sdk-for-go/services/datafactory/mgmt/2018-06-01/datafactory"
"github.com/hashicorp/terraform/helper/schema"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils"
)

func resourceArmDataFactoryPipeline() *schema.Resource {
return &schema.Resource{
Create: resourceArmDataFactoryPipelineCreateUpdate,
Read: resourceArmDataFactoryPipelineRead,
Update: resourceArmDataFactoryPipelineCreateUpdate,
Delete: resourceArmDataFactoryPipelineDelete,
Importer: &schema.ResourceImporter{
State: schema.ImportStatePassthrough,
},

Schema: map[string]*schema.Schema{
"name": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
ValidateFunc: validateAzureRMDataFactoryPipelineName,
},

"data_factory_name": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
},

"resource_group_name": resourceGroupNameSchema(),

"parameters": {
Type: schema.TypeMap,
Optional: true,
},

/*
"activity": {
Type: schema.TypeList,
Required: true,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{},
},
},*/
},
}
}

func resourceArmDataFactoryPipelineCreateUpdate(d *schema.ResourceData, meta interface{}) error {
client := meta.(*ArmClient).dataFactoryPipelineClient
ctx := meta.(*ArmClient).StopContext

log.Printf("[INFO] preparing arguments for Data Factory Pipeline creation.")

resourceGroupName := d.Get("resource_group_name").(string)
name := d.Get("name").(string)
dataFactoryName := d.Get("data_factory_name").(string)

if requireResourcesToBeImported && d.IsNewResource() {
existing, err := client.Get(ctx, resourceGroupName, dataFactoryName, name, "")
if err != nil {
if !utils.ResponseWasNotFound(existing.Response) {
return fmt.Errorf("Error checking for presence of existing Data Factory Pipeline %q (Resource Group %q / Data Factory %q): %s", name, resourceGroupName, dataFactoryName, err)
}
}

if existing.ID != nil && *existing.ID != "" {
return tf.ImportAsExistsError("azurerm_data_factory_pipeline", *existing.ID)
}
}

parameters := expandDataFactoryPipelineParameters(d.Get("parameters").(map[string]interface{}))

pipeline := &datafactory.Pipeline{
Parameters: parameters,
}

config := datafactory.PipelineResource{
Pipeline: pipeline,
}

_, err := client.CreateOrUpdate(ctx, resourceGroupName, dataFactoryName, name, config, "")
if err != nil {
return fmt.Errorf("Error creating Data Factory Pipeline %q (Resource Group %q / Data Factory %q): %+v", name, resourceGroupName, dataFactoryName, err)
}

read, err := client.Get(ctx, resourceGroupName, dataFactoryName, name, "")
if err != nil {
return fmt.Errorf("Error retrieving Data Factory Pipeline %q (Resource Group %q / Data Factory %q): %+v", name, resourceGroupName, dataFactoryName, err)
}

if read.ID == nil {
return fmt.Errorf("Cannot read Data Factory Pipeline %q (Resource Group %q / Data Factory %q) ID", name, resourceGroupName, dataFactoryName)
}

d.SetId(*read.ID)

return resourceArmDataFactoryPipelineRead(d, meta)
}

func resourceArmDataFactoryPipelineRead(d *schema.ResourceData, meta interface{}) error {
client := meta.(*ArmClient).dataFactoryPipelineClient
ctx := meta.(*ArmClient).StopContext

id, err := parseAzureResourceID(d.Id())
if err != nil {
return err
}
dataFactoryName := id.Path["factories"]
name := id.Path["pipelines"]
resourceGroupName := id.ResourceGroup

resp, err := client.Get(ctx, resourceGroupName, dataFactoryName, name, "")
if err != nil {
if utils.ResponseWasNotFound(resp.Response) {
d.SetId("")
log.Printf("[DEBUG] Data Factory Pipeline %q was not found in Resource Group %q - removing from state!", name, resourceGroupName)
return nil
}
return fmt.Errorf("Error reading the state of Data Factory Pipeline %q: %+v", name, err)
}

d.Set("name", resp.Name)
d.Set("resource_group_name", resourceGroupName)
d.Set("data_factory_name", dataFactoryName)

if props := resp.Pipeline; props != nil {
parameters := flattenDataFactoryPipelineParameters(props.Parameters)
if err := d.Set("parameters", parameters); err != nil {
return fmt.Errorf("Error setting `parameters`: %+v", err)
}
}

return nil
}

func resourceArmDataFactoryPipelineDelete(d *schema.ResourceData, meta interface{}) error {
client := meta.(*ArmClient).dataFactoryPipelineClient
ctx := meta.(*ArmClient).StopContext

id, err := parseAzureResourceID(d.Id())
if err != nil {
return err
}
dataFactoryName := id.Path["factories"]
name := id.Path["pipelines"]
resourceGroupName := id.ResourceGroup

_, err = client.Delete(ctx, resourceGroupName, dataFactoryName, name)
if err != nil {
return fmt.Errorf("Error deleting Data Factory Pipeline %q (Resource Group %q / Data Factory %q): %+v", name, resourceGroupName, dataFactoryName, err)
}

return nil
}

func validateAzureRMDataFactoryPipelineName(v interface{}, k string) (warnings []string, errors []error) {
value := v.(string)
if regexp.MustCompile(`^[.+?/<>*%&:\\]+$`).MatchString(value) {
errors = append(errors, fmt.Errorf("any of '.', '+', '?', '/', '<', '>', '*', '%%', '&', ':', '\\', are not allowed in %q: %q", k, value))
}

return warnings, errors
}

func expandDataFactoryPipelineParameters(input map[string]interface{}) map[string]*datafactory.ParameterSpecification {
output := make(map[string]*datafactory.ParameterSpecification)

for k, v := range input {
output[k] = &datafactory.ParameterSpecification{
Type: datafactory.ParameterTypeString,
DefaultValue: v.(string),
}
}

return output
}

func flattenDataFactoryPipelineParameters(input map[string]*datafactory.ParameterSpecification) map[string]interface{} {
output := make(map[string]interface{})

for k, v := range input {
if v != nil {
// we only support string parameters at this time
val, ok := v.DefaultValue.(string)
if !ok {
log.Printf("[DEBUG] Skipping parameter %q since it's not a string", k)
}

output[k] = val
}
}

return output
}
149 changes: 149 additions & 0 deletions azurerm/resource_arm_data_factory_pipeline_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,149 @@
package azurerm

import (
"fmt"
"testing"

"github.com/hashicorp/terraform/helper/resource"
"github.com/hashicorp/terraform/terraform"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils"
)

func TestAccAzureRMDataFactoryPipeline_basic(t *testing.T) {
resourceName := "azurerm_data_factory_pipeline.test"
ri := tf.AccRandTimeInt()
config := testAccAzureRMDataFactoryPipeline_basic(ri, testLocation())
config2 := testAccAzureRMDataFactoryPipeline_update(ri, testLocation())

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testCheckAzureRMDataFactoryPipelineDestroy,
Steps: []resource.TestStep{
{
Config: config,
Check: resource.ComposeTestCheckFunc(
testCheckAzureRMDataFactoryPipelineExists(resourceName),
resource.TestCheckResourceAttr(resourceName, "parameters.%", "1"),
resource.TestCheckResourceAttr(resourceName, "parameters.test", "testparameter"),
),
},
{
Config: config2,
Check: resource.ComposeTestCheckFunc(
testCheckAzureRMDataFactoryPipelineExists(resourceName),
resource.TestCheckResourceAttr(resourceName, "parameters.%", "2"),
resource.TestCheckResourceAttr(resourceName, "parameters.test", "testparameter"),
resource.TestCheckResourceAttr(resourceName, "parameters.test2", "testparameter2"),
),
},
{
ResourceName: resourceName,
ImportState: true,
ImportStateVerify: true,
},
},
})
}

func testCheckAzureRMDataFactoryPipelineDestroy(s *terraform.State) error {
client := testAccProvider.Meta().(*ArmClient).dataFactoryPipelineClient
for _, rs := range s.RootModule().Resources {
if rs.Type != "azurerm_data_factory_pipeline" {
continue
}

name := rs.Primary.Attributes["name"]
dataFactoryName := rs.Primary.Attributes["data_factory_name"]
resourceGroup := rs.Primary.Attributes["resource_group_name"]

ctx := testAccProvider.Meta().(*ArmClient).StopContext
resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "")

if err != nil {
if !utils.ResponseWasNotFound(resp.Response) {
return err
}
}

return nil
}
return nil
}

func testCheckAzureRMDataFactoryPipelineExists(resourceName string) resource.TestCheckFunc {
return func(s *terraform.State) error {
rs, ok := s.RootModule().Resources[resourceName]
if !ok {
return fmt.Errorf("Not found: %s", resourceName)
}

name := rs.Primary.Attributes["name"]
dataFactoryName := rs.Primary.Attributes["data_factory_name"]
resourceGroup := rs.Primary.Attributes["resource_group_name"]

client := testAccProvider.Meta().(*ArmClient).dataFactoryPipelineClient
ctx := testAccProvider.Meta().(*ArmClient).StopContext
resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "")
if err != nil {
if utils.ResponseWasNotFound(resp.Response) {
return fmt.Errorf("Bad: API Management Property %q (Resource Group %q / Data Factory %q) does not exist", name, resourceGroup, dataFactoryName)
}
return fmt.Errorf("Bad: Get on DataFactoryPipelineClient: %+v", err)
}

return nil
}
}

func testAccAzureRMDataFactoryPipeline_basic(rInt int, location string) string {
return fmt.Sprintf(`
resource "azurerm_resource_group" "test" {
name = "acctestrg-%d"
location = "%s"
}
resource "azurerm_data_factory_v2" "test" {
name = "acctestdfv2%d"
location = "${azurerm_resource_group.test.location}"
resource_group_name = "${azurerm_resource_group.test.name}"
}
resource "azurerm_data_factory_pipeline" "test" {
name = "acctest%d"
resource_group_name = "${azurerm_resource_group.test.name}"
data_factory_name = "${azurerm_data_factory_v2.test.name}"
parameters = {
test = "testparameter"
}
}
`, rInt, location, rInt, rInt)
}

func testAccAzureRMDataFactoryPipeline_update(rInt int, location string) string {
return fmt.Sprintf(`
resource "azurerm_resource_group" "test" {
name = "acctestrg-%d"
location = "%s"
}
resource "azurerm_data_factory_v2" "test" {
name = "acctestdfv2%d"
location = "${azurerm_resource_group.test.location}"
resource_group_name = "${azurerm_resource_group.test.name}"
}
resource "azurerm_data_factory_pipeline" "test" {
name = "acctest%d"
resource_group_name = "${azurerm_resource_group.test.name}"
data_factory_name = "${azurerm_data_factory_v2.test.name}"
parameters = {
test = "testparameter"
test2 = "testparameter2"
}
}
`, rInt, location, rInt, rInt)
}

0 comments on commit 06bd322

Please sign in to comment.