diff --git a/azurerm/config.go b/azurerm/config.go index 941a5b15a0ba..f3c9ab86fa50 100644 --- a/azurerm/config.go +++ b/azurerm/config.go @@ -18,7 +18,8 @@ import ( "github.com/Azure/azure-sdk-for-go/services/containerregistry/mgmt/2017-10-01/containerregistry" "github.com/Azure/azure-sdk-for-go/services/containerservice/mgmt/2018-03-31/containerservice" "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2015-04-08/documentdb" - "github.com/Azure/azure-sdk-for-go/services/datalake/store/mgmt/2016-11-01/account" + analyticsAccount "github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account" + storeAccount "github.com/Azure/azure-sdk-for-go/services/datalake/store/mgmt/2016-11-01/account" "github.com/Azure/azure-sdk-for-go/services/eventgrid/mgmt/2018-01-01/eventgrid" "github.com/Azure/azure-sdk-for-go/services/eventhub/mgmt/2017-04-01/eventhub" "github.com/Azure/azure-sdk-for-go/services/graphrbac/1.6/graphrbac" @@ -143,8 +144,12 @@ type ArmClient struct { sqlVirtualNetworkRulesClient sql.VirtualNetworkRulesClient // Data Lake Store - dataLakeStoreAccountClient account.AccountsClient - dataLakeStoreFirewallRulesClient account.FirewallRulesClient + dataLakeStoreAccountClient storeAccount.AccountsClient + dataLakeStoreFirewallRulesClient storeAccount.FirewallRulesClient + + // Data Lake Store + dataLakeAnalyticsAccountClient analyticsAccount.AccountsClient + dataLakeAnalyticsFirewallRulesClient analyticsAccount.FirewallRulesClient // KeyVault keyVaultClient keyvault.VaultsClient @@ -664,13 +669,21 @@ func (c *ArmClient) registerDatabases(endpoint, subscriptionId string, auth auto } func (c *ArmClient) registerDataLakeStoreClients(endpoint, subscriptionId string, auth autorest.Authorizer, sender autorest.Sender) { - accountClient := account.NewAccountsClientWithBaseURI(endpoint, subscriptionId) - c.configureClient(&accountClient.Client, auth) - c.dataLakeStoreAccountClient = accountClient + storeAccountClient := storeAccount.NewAccountsClientWithBaseURI(endpoint, subscriptionId) + c.configureClient(&storeAccountClient.Client, auth) + c.dataLakeStoreAccountClient = storeAccountClient + + storeFirewallRulesClient := storeAccount.NewFirewallRulesClientWithBaseURI(endpoint, subscriptionId) + c.configureClient(&storeFirewallRulesClient.Client, auth) + c.dataLakeStoreFirewallRulesClient = storeFirewallRulesClient + + analyticsAccountClient := analyticsAccount.NewAccountsClientWithBaseURI(endpoint, subscriptionId) + c.configureClient(&analyticsAccountClient.Client, auth) + c.dataLakeAnalyticsAccountClient = analyticsAccountClient - firewallRulesClient := account.NewFirewallRulesClientWithBaseURI(endpoint, subscriptionId) - c.configureClient(&firewallRulesClient.Client, auth) - c.dataLakeStoreFirewallRulesClient = firewallRulesClient + analyticsFirewallRulesClient := analyticsAccount.NewFirewallRulesClientWithBaseURI(endpoint, subscriptionId) + c.configureClient(&analyticsFirewallRulesClient.Client, auth) + c.dataLakeAnalyticsFirewallRulesClient = analyticsFirewallRulesClient } func (c *ArmClient) registerDeviceClients(endpoint, subscriptionId string, auth autorest.Authorizer, sender autorest.Sender) { @@ -1006,22 +1019,22 @@ func (armClient *ArmClient) getKeyForStorageAccount(ctx context.Context, resourc } if err != nil { // We assume this is a transient error rather than a 404 (which is caught above), so assume the - // account still exists. - return "", true, fmt.Errorf("Error retrieving keys for storage account %q: %s", storageAccountName, err) + // storeAccount still exists. + return "", true, fmt.Errorf("Error retrieving keys for storage storeAccount %q: %s", storageAccountName, err) } if accountKeys.Keys == nil { - return "", false, fmt.Errorf("Nil key returned for storage account %q", storageAccountName) + return "", false, fmt.Errorf("Nil key returned for storage storeAccount %q", storageAccountName) } keys := *accountKeys.Keys if len(keys) <= 0 { - return "", false, fmt.Errorf("No keys returned for storage account %q", storageAccountName) + return "", false, fmt.Errorf("No keys returned for storage storeAccount %q", storageAccountName) } keyPtr := keys[0].Value if keyPtr == nil { - return "", false, fmt.Errorf("The first key returned is nil for storage account %q", storageAccountName) + return "", false, fmt.Errorf("The first key returned is nil for storage storeAccount %q", storageAccountName) } key = *keyPtr @@ -1043,7 +1056,7 @@ func (armClient *ArmClient) getBlobStorageClientForStorageAccount(ctx context.Co storageClient, err := mainStorage.NewClient(storageAccountName, key, armClient.environment.StorageEndpointSuffix, mainStorage.DefaultAPIVersion, true) if err != nil { - return nil, true, fmt.Errorf("Error creating storage client for storage account %q: %s", storageAccountName, err) + return nil, true, fmt.Errorf("Error creating storage client for storage storeAccount %q: %s", storageAccountName, err) } blobClient := storageClient.GetBlobService() @@ -1062,7 +1075,7 @@ func (armClient *ArmClient) getFileServiceClientForStorageAccount(ctx context.Co storageClient, err := mainStorage.NewClient(storageAccountName, key, armClient.environment.StorageEndpointSuffix, mainStorage.DefaultAPIVersion, true) if err != nil { - return nil, true, fmt.Errorf("Error creating storage client for storage account %q: %s", storageAccountName, err) + return nil, true, fmt.Errorf("Error creating storage client for storage storeAccount %q: %s", storageAccountName, err) } fileClient := storageClient.GetFileService() @@ -1081,7 +1094,7 @@ func (armClient *ArmClient) getTableServiceClientForStorageAccount(ctx context.C storageClient, err := mainStorage.NewClient(storageAccountName, key, armClient.environment.StorageEndpointSuffix, mainStorage.DefaultAPIVersion, true) if err != nil { - return nil, true, fmt.Errorf("Error creating storage client for storage account %q: %s", storageAccountName, err) + return nil, true, fmt.Errorf("Error creating storage client for storage storeAccount %q: %s", storageAccountName, err) } tableClient := storageClient.GetTableService() @@ -1100,7 +1113,7 @@ func (armClient *ArmClient) getQueueServiceClientForStorageAccount(ctx context.C storageClient, err := mainStorage.NewClient(storageAccountName, key, armClient.environment.StorageEndpointSuffix, mainStorage.DefaultAPIVersion, true) if err != nil { - return nil, true, fmt.Errorf("Error creating storage client for storage account %q: %s", storageAccountName, err) + return nil, true, fmt.Errorf("Error creating storage client for storage storeAccount %q: %s", storageAccountName, err) } queueClient := storageClient.GetQueueService() diff --git a/azurerm/helpers/azure/datalake.go b/azurerm/helpers/azure/datalake.go new file mode 100644 index 000000000000..1d606ea51bf7 --- /dev/null +++ b/azurerm/helpers/azure/datalake.go @@ -0,0 +1,23 @@ +package azure + +import ( + "regexp" + + "github.com/hashicorp/terraform/helper/schema" + "github.com/hashicorp/terraform/helper/validation" +) + +//store and analytic account names are the same +func ValidateDataLakeAccountName() schema.SchemaValidateFunc { + return validation.StringMatch( + regexp.MustCompile(`\A([a-z0-9]{3,24})\z`), + "Name can only consist of lowercase letters and numbers and must be between 3 and 24 characters long", + ) +} + +func ValidateDataLakeFirewallRuleName() schema.SchemaValidateFunc { + return validation.StringMatch( + regexp.MustCompile(`\A([-_a-zA-Z0-9]{3,50})\z`), + "Name can only consist of letters, numbers, underscores and hyphens and must be between 3 and 50 characters long", + ) +} diff --git a/azurerm/provider.go b/azurerm/provider.go index 071bc38b3335..f518ad0925fe 100644 --- a/azurerm/provider.go +++ b/azurerm/provider.go @@ -138,6 +138,8 @@ func Provider() terraform.ResourceProvider { "azurerm_container_service": resourceArmContainerService(), "azurerm_container_group": resourceArmContainerGroup(), "azurerm_cosmosdb_account": resourceArmCosmosDBAccount(), + "azurerm_data_lake_analytics_account": resourceArmDataLakeAnalyticsAccount(), + "azurerm_data_lake_analytics_firewall_rule": resourceArmDataLakeAnalyticsFirewallRule(), "azurerm_data_lake_store": resourceArmDataLakeStore(), "azurerm_data_lake_store_firewall_rule": resourceArmDataLakeStoreFirewallRule(), "azurerm_dns_a_record": resourceArmDnsARecord(), diff --git a/azurerm/resource_arm_data_lake_analytics_account.go b/azurerm/resource_arm_data_lake_analytics_account.go new file mode 100644 index 000000000000..28a9c476ba91 --- /dev/null +++ b/azurerm/resource_arm_data_lake_analytics_account.go @@ -0,0 +1,222 @@ +package azurerm + +import ( + "fmt" + "log" + + "github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/response" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" + + "github.com/hashicorp/terraform/helper/schema" + "github.com/hashicorp/terraform/helper/validation" +) + +func resourceArmDataLakeAnalyticsAccount() *schema.Resource { + return &schema.Resource{ + Create: resourceArmDateLakeAnalyticsAccountCreate, + Read: resourceArmDateLakeAnalyticsAccountRead, + Update: resourceArmDateLakeAnalyticsAccountUpdate, + Delete: resourceArmDateLakeAnalyticsAccountDelete, + + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateDataLakeAccountName(), + }, + + "location": locationSchema(), + + "resource_group_name": resourceGroupNameSchema(), + + "tier": { + Type: schema.TypeString, + Optional: true, + Default: string(account.Consumption), + DiffSuppressFunc: suppress.CaseDifference, + ValidateFunc: validation.StringInSlice([]string{ + string(account.Consumption), + string(account.Commitment100000AUHours), + string(account.Commitment10000AUHours), + string(account.Commitment1000AUHours), + string(account.Commitment100AUHours), + string(account.Commitment500000AUHours), + string(account.Commitment50000AUHours), + string(account.Commitment5000AUHours), + string(account.Commitment500AUHours), + }, true), + }, + + "default_store_account_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateDataLakeAccountName(), + }, + + "tags": tagsSchema(), + }, + } +} + +func resourceArmDateLakeAnalyticsAccountCreate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*ArmClient).dataLakeAnalyticsAccountClient + ctx := meta.(*ArmClient).StopContext + + name := d.Get("name").(string) + location := azureRMNormalizeLocation(d.Get("location").(string)) + resourceGroup := d.Get("resource_group_name").(string) + storeAccountName := d.Get("default_store_account_name").(string) + tier := d.Get("tier").(string) + tags := d.Get("tags").(map[string]interface{}) + + log.Printf("[INFO] preparing arguments for Azure ARM Date Lake Store creation %q (Resource Group %q)", name, resourceGroup) + + dateLakeAnalyticsAccount := account.CreateDataLakeAnalyticsAccountParameters{ + Location: &location, + Tags: expandTags(tags), + CreateDataLakeAnalyticsAccountProperties: &account.CreateDataLakeAnalyticsAccountProperties{ + NewTier: account.TierType(tier), + DefaultDataLakeStoreAccount: &storeAccountName, + DataLakeStoreAccounts: &[]account.AddDataLakeStoreWithAccountParameters{ + { + Name: &storeAccountName, + }, + }, + }, + } + + future, err := client.Create(ctx, resourceGroup, name, dateLakeAnalyticsAccount) + if err != nil { + return fmt.Errorf("Error issuing create request for Data Lake Analytics Account %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + err = future.WaitForCompletion(ctx, client.Client) + if err != nil { + return fmt.Errorf("Error creating Data Lake Analytics Account %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + read, err := client.Get(ctx, resourceGroup, name) + if err != nil { + return fmt.Errorf("Error retrieving Data Lake Analytics Account %q (Resource Group %q): %+v", name, resourceGroup, err) + } + if read.ID == nil { + return fmt.Errorf("Cannot read Data Lake Analytics Account %s (resource group %s) ID", name, resourceGroup) + } + + d.SetId(*read.ID) + + return resourceArmDateLakeAnalyticsAccountRead(d, meta) +} + +func resourceArmDateLakeAnalyticsAccountUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*ArmClient).dataLakeAnalyticsAccountClient + ctx := meta.(*ArmClient).StopContext + + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + storeAccountName := d.Get("default_store_account_name").(string) + newTier := d.Get("tier").(string) + newTags := d.Get("tags").(map[string]interface{}) + + props := &account.UpdateDataLakeAnalyticsAccountParameters{ + Tags: expandTags(newTags), + UpdateDataLakeAnalyticsAccountProperties: &account.UpdateDataLakeAnalyticsAccountProperties{ + NewTier: account.TierType(newTier), + DataLakeStoreAccounts: &[]account.UpdateDataLakeStoreWithAccountParameters{ + { + Name: &storeAccountName, + }, + }, + }, + } + + future, err := client.Update(ctx, resourceGroup, name, props) + if err != nil { + return fmt.Errorf("Error issuing update request for Data Lake Analytics Account %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + err = future.WaitForCompletion(ctx, client.Client) + if err != nil { + return fmt.Errorf("Error waiting for the update of Data Lake Analytics Account %q (Resource Group %q) to commplete: %+v", name, resourceGroup, err) + } + + return resourceArmDateLakeAnalyticsAccountRead(d, meta) +} + +func resourceArmDateLakeAnalyticsAccountRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*ArmClient).dataLakeAnalyticsAccountClient + ctx := meta.(*ArmClient).StopContext + + id, err := parseAzureResourceID(d.Id()) + if err != nil { + return err + } + + resourceGroup := id.ResourceGroup + name := id.Path["accounts"] + + resp, err := client.Get(ctx, resourceGroup, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[WARN] DataLakeAnalyticsAccountAccount '%s' was not found (resource group '%s')", name, resourceGroup) + d.SetId("") + return nil + } + return fmt.Errorf("Error making Read request on Azure Data Lake Analytics Account %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + d.Set("name", name) + d.Set("resource_group_name", resourceGroup) + if location := resp.Location; location != nil { + d.Set("location", azureRMNormalizeLocation(*location)) + } + + if properties := resp.DataLakeAnalyticsAccountProperties; properties != nil { + d.Set("tier", string(properties.CurrentTier)) + d.Set("default_store_account_name", properties.DefaultDataLakeStoreAccount) + } + + flattenAndSetTags(d, resp.Tags) + + return nil +} + +func resourceArmDateLakeAnalyticsAccountDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*ArmClient).dataLakeAnalyticsAccountClient + ctx := meta.(*ArmClient).StopContext + + id, err := parseAzureResourceID(d.Id()) + if err != nil { + return err + } + + resourceGroup := id.ResourceGroup + name := id.Path["accounts"] + future, err := client.Delete(ctx, resourceGroup, name) + if err != nil { + if response.WasNotFound(future.Response()) { + return nil + } + return fmt.Errorf("Error issuing delete request for Data Lake Analytics Account %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + err = future.WaitForCompletion(ctx, client.Client) + if err != nil { + if response.WasNotFound(future.Response()) { + return nil + } + return fmt.Errorf("Error deleting Data Lake Analytics Account %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + return nil +} diff --git a/azurerm/resource_arm_data_lake_analytics_account_test.go b/azurerm/resource_arm_data_lake_analytics_account_test.go new file mode 100644 index 000000000000..30dd8303df92 --- /dev/null +++ b/azurerm/resource_arm_data_lake_analytics_account_test.go @@ -0,0 +1,218 @@ +package azurerm + +import ( + "fmt" + "net/http" + "strconv" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccAzureRMDataLakeAnalyticsAccount_basic(t *testing.T) { + resourceName := "azurerm_data_lake_analytics_account.test" + ri := acctest.RandInt() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testCheckAzureRMDataLakeAnalyticsAccountDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataLakeAnalyticsAccount_basic(ri, testLocation()), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataLakeAnalyticsAccountExists(resourceName), + resource.TestCheckResourceAttr(resourceName, "tier", "Consumption"), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccAzureRMDataLakeAnalyticsAccount_tier(t *testing.T) { + resourceName := "azurerm_data_lake_analytics_account.test" + ri := acctest.RandInt() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testCheckAzureRMDataLakeAnalyticsAccountDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataLakeAnalyticsAccount_tier(ri, testLocation()), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataLakeAnalyticsAccountExists(resourceName), + resource.TestCheckResourceAttr(resourceName, "tier", "Commitment_100AUHours"), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccAzureRMDataLakeAnalyticsAccount_withTags(t *testing.T) { + resourceName := "azurerm_data_lake_analytics_account.test" + ri := acctest.RandInt() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testCheckAzureRMDataLakeAnalyticsAccountDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataLakeAnalyticsAccount_withTags(ri, testLocation()), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataLakeAnalyticsAccountExists(resourceName), + resource.TestCheckResourceAttr(resourceName, "tags.%", "2"), + ), + }, + { + Config: testAccAzureRMDataLakeAnalyticsAccount_withTagsUpdate(ri, testLocation()), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataLakeAnalyticsAccountExists(resourceName), + resource.TestCheckResourceAttr(resourceName, "tags.%", "1"), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testCheckAzureRMDataLakeAnalyticsAccountExists(name string) resource.TestCheckFunc { + return func(s *terraform.State) error { + // Ensure we have enough information in state to look up in API + rs, ok := s.RootModule().Resources[name] + if !ok { + return fmt.Errorf("Not found: %s", name) + } + + accountName := rs.Primary.Attributes["name"] + resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] + if !hasResourceGroup { + return fmt.Errorf("Bad: no resource group found in state for data lake store: %s", name) + } + + conn := testAccProvider.Meta().(*ArmClient).dataLakeAnalyticsAccountClient + ctx := testAccProvider.Meta().(*ArmClient).StopContext + + resp, err := conn.Get(ctx, resourceGroup, accountName) + if err != nil { + return fmt.Errorf("Bad: Get on dataLakeAnalyticsAccountClient: %+v", err) + } + + if resp.StatusCode == http.StatusNotFound { + return fmt.Errorf("Bad: Date Lake Analytics Account %q (resource group: %q) does not exist", accountName, resourceGroup) + } + + return nil + } +} + +func testCheckAzureRMDataLakeAnalyticsAccountDestroy(s *terraform.State) error { + conn := testAccProvider.Meta().(*ArmClient).dataLakeAnalyticsAccountClient + ctx := testAccProvider.Meta().(*ArmClient).StopContext + + for _, rs := range s.RootModule().Resources { + if rs.Type != "azurerm_data_lake_analytics_account" { + continue + } + + accountName := rs.Primary.Attributes["name"] + resourceGroup := rs.Primary.Attributes["resource_group_name"] + + resp, err := conn.Get(ctx, resourceGroup, accountName) + if err != nil { + if resp.StatusCode == http.StatusNotFound { + return nil + } + + return err + } + + return fmt.Errorf("Data Lake Analytics Account still exists:\n%#v", resp) + } + + return nil +} + +func testAccAzureRMDataLakeAnalyticsAccount_basic(rInt int, location string) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_lake_analytics_account" "test" { + name = "acctest%s" + resource_group_name = "${azurerm_resource_group.test.name}" + location = "${azurerm_resource_group.test.location}" + + default_store_account_name = "${azurerm_data_lake_store.test.name}" +} +`, testAccAzureRMDataLakeStore_basic(rInt, location), strconv.Itoa(rInt)[0:15]) +} + +func testAccAzureRMDataLakeAnalyticsAccount_tier(rInt int, location string) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_lake_analytics_account" "test" { + name = "acctest%s" + resource_group_name = "${azurerm_resource_group.test.name}" + location = "${azurerm_resource_group.test.location}" + + tier = "Commitment_100AUHours" + + default_store_account_name = "${azurerm_data_lake_store.test.name}" +} +`, testAccAzureRMDataLakeStore_basic(rInt, location), strconv.Itoa(rInt)[0:15]) +} + +func testAccAzureRMDataLakeAnalyticsAccount_withTags(rInt int, location string) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_lake_analytics_account" "test" { + name = "acctest%s" + resource_group_name = "${azurerm_resource_group.test.name}" + location = "${azurerm_resource_group.test.location}" + + default_store_account_name = "${azurerm_data_lake_store.test.name}" + + tags { + environment = "Production" + cost_center = "MSFT" + } +} +`, testAccAzureRMDataLakeStore_basic(rInt, location), strconv.Itoa(rInt)[0:15]) +} + +func testAccAzureRMDataLakeAnalyticsAccount_withTagsUpdate(rInt int, location string) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_lake_analytics_account" "test" { + name = "acctest%s" + resource_group_name = "${azurerm_resource_group.test.name}" + location = "${azurerm_resource_group.test.location}" + + default_store_account_name = "${azurerm_data_lake_store.test.name}" + + tags { + environment = "staging" + } +} +`, testAccAzureRMDataLakeStore_basic(rInt, location), strconv.Itoa(rInt)[0:15]) +} diff --git a/azurerm/resource_arm_data_lake_analytics_firewall_rule.go b/azurerm/resource_arm_data_lake_analytics_firewall_rule.go new file mode 100644 index 000000000000..8a4d01c11e4e --- /dev/null +++ b/azurerm/resource_arm_data_lake_analytics_firewall_rule.go @@ -0,0 +1,153 @@ +package azurerm + +import ( + "fmt" + "log" + + "github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account" + + "github.com/hashicorp/terraform/helper/schema" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/response" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmDataLakeAnalyticsFirewallRule() *schema.Resource { + return &schema.Resource{ + Create: resourceArmDateLakeAnalyticsFirewallRuleCreateUpdate, + Read: resourceArmDateLakeAnalyticsFirewallRuleRead, + Update: resourceArmDateLakeAnalyticsFirewallRuleCreateUpdate, + Delete: resourceArmDateLakeAnalyticsFirewallRuleDelete, + + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateDataLakeFirewallRuleName(), + }, + + "account_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateDataLakeAccountName(), + }, + + "resource_group_name": resourceGroupNameSchema(), + + "start_ip_address": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.IPv4Address, + }, + + "end_ip_address": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.IPv4Address, + }, + }, + } +} + +func resourceArmDateLakeAnalyticsFirewallRuleCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*ArmClient).dataLakeAnalyticsFirewallRulesClient + ctx := meta.(*ArmClient).StopContext + + name := d.Get("name").(string) + accountName := d.Get("account_name").(string) + resourceGroup := d.Get("resource_group_name").(string) + startIPAddress := d.Get("start_ip_address").(string) + endIPAddress := d.Get("end_ip_address").(string) + + log.Printf("[INFO] preparing arguments for Date Lake Analytics Firewall Rule creation %q (Resource Group %q)", name, resourceGroup) + + dateLakeStore := account.CreateOrUpdateFirewallRuleParameters{ + CreateOrUpdateFirewallRuleProperties: &account.CreateOrUpdateFirewallRuleProperties{ + StartIPAddress: utils.String(startIPAddress), + EndIPAddress: utils.String(endIPAddress), + }, + } + + _, err := client.CreateOrUpdate(ctx, resourceGroup, accountName, name, dateLakeStore) + if err != nil { + return fmt.Errorf("Error issuing create request for Data Lake Analytics %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + read, err := client.Get(ctx, resourceGroup, accountName, name) + if err != nil { + return fmt.Errorf("Error retrieving Data Lake Analytics Firewall Rule %q (Account %q / Resource Group %q): %+v", name, accountName, resourceGroup, err) + } + if read.ID == nil { + return fmt.Errorf("Cannot read Data Lake Analytics %q (Account %q / Resource Group %q) ID", name, accountName, resourceGroup) + } + + d.SetId(*read.ID) + + return resourceArmDateLakeAnalyticsFirewallRuleRead(d, meta) +} + +func resourceArmDateLakeAnalyticsFirewallRuleRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*ArmClient).dataLakeAnalyticsFirewallRulesClient + ctx := meta.(*ArmClient).StopContext + + id, err := parseAzureResourceID(d.Id()) + if err != nil { + return err + } + resourceGroup := id.ResourceGroup + accountName := id.Path["accounts"] + name := id.Path["firewallRules"] + + resp, err := client.Get(ctx, resourceGroup, accountName, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[WARN] Data Lake Analytics Firewall Rule %q was not found (Account %q / Resource Group %q)", name, accountName, resourceGroup) + d.SetId("") + return nil + } + return fmt.Errorf("Error making Read request on Azure Data Lake Analytics Firewall Rule %q (Account %q / Resource Group %q): %+v", name, accountName, resourceGroup, err) + } + + d.Set("name", name) + d.Set("account_name", accountName) + d.Set("resource_group_name", resourceGroup) + + if props := resp.FirewallRuleProperties; props != nil { + d.Set("start_ip_address", props.StartIPAddress) + d.Set("end_ip_address", props.EndIPAddress) + } + + return nil +} + +func resourceArmDateLakeAnalyticsFirewallRuleDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*ArmClient).dataLakeAnalyticsFirewallRulesClient + ctx := meta.(*ArmClient).StopContext + + id, err := parseAzureResourceID(d.Id()) + if err != nil { + return err + } + + resourceGroup := id.ResourceGroup + accountName := id.Path["accounts"] + name := id.Path["firewallRules"] + + resp, err := client.Delete(ctx, resourceGroup, accountName, name) + if err != nil { + if response.WasNotFound(resp.Response) { + return nil + } + return fmt.Errorf("Error issuing delete request for Data Lake Analytics Firewall Rule %q (Account %q / Resource Group %q): %+v", name, accountName, resourceGroup, err) + } + + return nil +} diff --git a/azurerm/resource_arm_data_lake_analytics_firewall_rule_test.go b/azurerm/resource_arm_data_lake_analytics_firewall_rule_test.go new file mode 100644 index 000000000000..c5e044612cd6 --- /dev/null +++ b/azurerm/resource_arm_data_lake_analytics_firewall_rule_test.go @@ -0,0 +1,186 @@ +package azurerm + +import ( + "fmt" + "net/http" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "strconv" +) + +func TestAccAzureRMDataLakeAnalyticsFirewallRule_basic(t *testing.T) { + resourceName := "azurerm_data_lake_analytics_firewall_rule.test" + ri := acctest.RandInt() + startIP := "1.1.1.1" + endIP := "2.2.2.2" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testCheckAzureRMDataLakeAnalyticsFirewallRuleDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataLakeAnalyticsFirewallRule_basic(ri, testLocation(), startIP, endIP), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataLakeAnalyticsFirewallRuleExists(resourceName), + resource.TestCheckResourceAttr(resourceName, "start_ip_address", startIP), + resource.TestCheckResourceAttr(resourceName, "end_ip_address", endIP), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccAzureRMDataLakeAnalyticsFirewallRule_update(t *testing.T) { + resourceName := "azurerm_data_lake_analytics_firewall_rule.test" + ri := acctest.RandInt() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testCheckAzureRMDataLakeAnalyticsFirewallRuleDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataLakeAnalyticsFirewallRule_basic(ri, testLocation(), "1.1.1.1", "2.2.2.2"), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataLakeAnalyticsFirewallRuleExists(resourceName), + resource.TestCheckResourceAttr(resourceName, "start_ip_address", "1.1.1.1"), + resource.TestCheckResourceAttr(resourceName, "end_ip_address", "2.2.2.2"), + ), + }, + { + Config: testAccAzureRMDataLakeAnalyticsFirewallRule_basic(ri, testLocation(), "2.2.2.2", "3.3.3.3"), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataLakeAnalyticsFirewallRuleExists(resourceName), + resource.TestCheckResourceAttr(resourceName, "start_ip_address", "2.2.2.2"), + resource.TestCheckResourceAttr(resourceName, "end_ip_address", "3.3.3.3"), + ), + }, + }, + }) +} + +func TestAccAzureRMDataLakeAnalyticsFirewallRule_azureServices(t *testing.T) { + resourceName := "azurerm_data_lake_analytics_firewall_rule.test" + ri := acctest.RandInt() + azureServicesIP := "0.0.0.0" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testCheckAzureRMDataLakeAnalyticsFirewallRuleDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataLakeAnalyticsFirewallRule_basic(ri, testLocation(), azureServicesIP, azureServicesIP), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataLakeAnalyticsFirewallRuleExists(resourceName), + resource.TestCheckResourceAttr(resourceName, "start_ip_address", azureServicesIP), + resource.TestCheckResourceAttr(resourceName, "end_ip_address", azureServicesIP), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testCheckAzureRMDataLakeAnalyticsFirewallRuleExists(name string) resource.TestCheckFunc { + return func(s *terraform.State) error { + // Ensure we have enough information in state to look up in API + rs, ok := s.RootModule().Resources[name] + if !ok { + return fmt.Errorf("Not found: %s", name) + } + + firewallRuleName := rs.Primary.Attributes["name"] + accountName := rs.Primary.Attributes["account_name"] + resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] + if !hasResourceGroup { + return fmt.Errorf("Bad: no resource group found in state for data lake store firewall rule: %s", name) + } + + conn := testAccProvider.Meta().(*ArmClient).dataLakeAnalyticsFirewallRulesClient + ctx := testAccProvider.Meta().(*ArmClient).StopContext + + resp, err := conn.Get(ctx, resourceGroup, accountName, firewallRuleName) + if err != nil { + return fmt.Errorf("Bad: Get on dataLakeAnalyticsFirewallRulesClient: %+v", err) + } + + if resp.StatusCode == http.StatusNotFound { + return fmt.Errorf("Bad: Date Lake Analytics Firewall Rule %q (Account %q / Resource Group: %q) does not exist", firewallRuleName, accountName, resourceGroup) + } + + return nil + } +} + +func testCheckAzureRMDataLakeAnalyticsFirewallRuleDestroy(s *terraform.State) error { + conn := testAccProvider.Meta().(*ArmClient).dataLakeAnalyticsFirewallRulesClient + ctx := testAccProvider.Meta().(*ArmClient).StopContext + + for _, rs := range s.RootModule().Resources { + if rs.Type != "azurerm_data_lake_analytics_firewall_rule" { + continue + } + + firewallRuleName := rs.Primary.Attributes["name"] + accountName := rs.Primary.Attributes["account_name"] + resourceGroup := rs.Primary.Attributes["resource_group_name"] + + resp, err := conn.Get(ctx, resourceGroup, accountName, firewallRuleName) + if err != nil { + if resp.StatusCode == http.StatusNotFound { + return nil + } + + return err + } + + return fmt.Errorf("Data Lake Analytics Firewall Rule still exists:\n%#v", resp) + } + + return nil +} + +func testAccAzureRMDataLakeAnalyticsFirewallRule_basic(rInt int, location, startIP, endIP string) string { + return fmt.Sprintf(` +resource "azurerm_resource_group" "test" { + name = "acctestRG-%[1]d" + location = "%[2]s" +} + +resource "azurerm_data_lake_store" "test" { + name = "acctest%[3]s" + resource_group_name = "${azurerm_resource_group.test.name}" + location = "${azurerm_resource_group.test.location}" +} + +resource "azurerm_data_lake_analytics_account" "test" { + name = "acctest%[3]s" + resource_group_name = "${azurerm_resource_group.test.name}" + location = "${azurerm_resource_group.test.location}" + + default_store_account_name = "${azurerm_data_lake_store.test.name}" +} + +resource "azurerm_data_lake_analytics_firewall_rule" "test" { + name = "acctest" + account_name = "${azurerm_data_lake_analytics_account.test.name}" + resource_group_name = "${azurerm_resource_group.test.name}" + start_ip_address = "%[4]s" + end_ip_address = "%[5]s" +} +`, rInt, location, strconv.Itoa(rInt)[0:15], startIP, endIP) +} diff --git a/azurerm/resource_arm_data_lake_store.go b/azurerm/resource_arm_data_lake_store.go index 523f2cc39247..4c413672f763 100644 --- a/azurerm/resource_arm_data_lake_store.go +++ b/azurerm/resource_arm_data_lake_store.go @@ -3,13 +3,15 @@ package azurerm import ( "fmt" "log" - "regexp" "github.com/Azure/azure-sdk-for-go/services/datalake/store/mgmt/2016-11-01/account" - "github.com/hashicorp/terraform/helper/schema" - "github.com/hashicorp/terraform/helper/validation" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/response" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" + + "github.com/hashicorp/terraform/helper/schema" + "github.com/hashicorp/terraform/helper/validation" ) func resourceArmDataLakeStore() *schema.Resource { @@ -24,13 +26,10 @@ func resourceArmDataLakeStore() *schema.Resource { Schema: map[string]*schema.Schema{ "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringMatch( - regexp.MustCompile(`\A([a-z0-9]{3,24})\z`), - "Name can only consist of lowercase letters and numbers, and must be between 3 and 24 characters long", - ), + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateDataLakeAccountName(), }, "location": locationSchema(), @@ -62,14 +61,14 @@ func resourceArmDateLakeStoreCreate(d *schema.ResourceData, meta interface{}) er client := meta.(*ArmClient).dataLakeStoreAccountClient ctx := meta.(*ArmClient).StopContext - log.Printf("[INFO] preparing arguments for Azure ARM Date Lake Store creation.") - name := d.Get("name").(string) location := azureRMNormalizeLocation(d.Get("location").(string)) resourceGroup := d.Get("resource_group_name").(string) tier := d.Get("tier").(string) tags := d.Get("tags").(map[string]interface{}) + log.Printf("[INFO] preparing arguments for Azure ARM Date Lake Store creation %q (Resource Group %q)", name, resourceGroup) + dateLakeStore := account.CreateDataLakeStoreAccountParameters{ Location: &location, Tags: expandTags(tags), diff --git a/azurerm/resource_arm_data_lake_store_firewall_rule.go b/azurerm/resource_arm_data_lake_store_firewall_rule.go index 5c126e41f72d..4f8993ffa9ef 100644 --- a/azurerm/resource_arm_data_lake_store_firewall_rule.go +++ b/azurerm/resource_arm_data_lake_store_firewall_rule.go @@ -6,6 +6,7 @@ import ( "github.com/Azure/azure-sdk-for-go/services/datalake/store/mgmt/2016-11-01/account" "github.com/hashicorp/terraform/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/response" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" @@ -23,15 +24,17 @@ func resourceArmDataLakeStoreFirewallRule() *schema.Resource { Schema: map[string]*schema.Schema{ "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateDataLakeFirewallRuleName(), }, "account_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateDataLakeAccountName(), }, "resource_group_name": resourceGroupNameSchema(), @@ -55,14 +58,14 @@ func resourceArmDateLakeStoreAccountFirewallRuleCreateUpdate(d *schema.ResourceD client := meta.(*ArmClient).dataLakeStoreFirewallRulesClient ctx := meta.(*ArmClient).StopContext - log.Printf("[INFO] preparing arguments for Date Lake Store Firewall Rule creation.") - name := d.Get("name").(string) accountName := d.Get("account_name").(string) resourceGroup := d.Get("resource_group_name").(string) startIPAddress := d.Get("start_ip_address").(string) endIPAddress := d.Get("end_ip_address").(string) + log.Printf("[INFO] preparing arguments for Date Lake Store Firewall Rule creation %q (Resource Group %q)", name, resourceGroup) + dateLakeStore := account.CreateOrUpdateFirewallRuleParameters{ CreateOrUpdateFirewallRuleProperties: &account.CreateOrUpdateFirewallRuleProperties{ StartIPAddress: utils.String(startIPAddress), @@ -134,6 +137,7 @@ func resourceArmDateLakeStoreAccountFirewallRuleDelete(d *schema.ResourceData, m resourceGroup := id.ResourceGroup accountName := id.Path["accounts"] name := id.Path["firewallRules"] + resp, err := client.Delete(ctx, resourceGroup, accountName, name) if err != nil { if response.WasNotFound(resp.Response) { diff --git a/azurerm/resource_arm_data_lake_store_firewall_rule_test.go b/azurerm/resource_arm_data_lake_store_firewall_rule_test.go index 16a7196ed427..54a5976d7f9d 100644 --- a/azurerm/resource_arm_data_lake_store_firewall_rule_test.go +++ b/azurerm/resource_arm_data_lake_store_firewall_rule_test.go @@ -8,23 +8,22 @@ import ( "github.com/hashicorp/terraform/helper/acctest" "github.com/hashicorp/terraform/helper/resource" "github.com/hashicorp/terraform/terraform" + "strconv" ) func TestAccAzureRMDataLakeStoreFirewallRule_basic(t *testing.T) { resourceName := "azurerm_data_lake_store_firewall_rule.test" + ri := acctest.RandInt() startIP := "1.1.1.1" endIP := "2.2.2.2" - ri := acctest.RandInt() - rs := acctest.RandString(4) - resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, Providers: testAccProviders, CheckDestroy: testCheckAzureRMDataLakeStoreFirewallRuleDestroy, Steps: []resource.TestStep{ { - Config: testAccAzureRMDataLakeStoreFirewallRule_basic(ri, rs, testLocation(), startIP, endIP), + Config: testAccAzureRMDataLakeStoreFirewallRule_basic(ri, testLocation(), startIP, endIP), Check: resource.ComposeTestCheckFunc( testCheckAzureRMDataLakeStoreFirewallRuleExists(resourceName), resource.TestCheckResourceAttr(resourceName, "start_ip_address", startIP), @@ -43,7 +42,6 @@ func TestAccAzureRMDataLakeStoreFirewallRule_basic(t *testing.T) { func TestAccAzureRMDataLakeStoreFirewallRule_update(t *testing.T) { resourceName := "azurerm_data_lake_store_firewall_rule.test" ri := acctest.RandInt() - rs := acctest.RandString(4) resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -51,7 +49,7 @@ func TestAccAzureRMDataLakeStoreFirewallRule_update(t *testing.T) { CheckDestroy: testCheckAzureRMDataLakeStoreFirewallRuleDestroy, Steps: []resource.TestStep{ { - Config: testAccAzureRMDataLakeStoreFirewallRule_basic(ri, rs, testLocation(), "1.1.1.1", "2.2.2.2"), + Config: testAccAzureRMDataLakeStoreFirewallRule_basic(ri, testLocation(), "1.1.1.1", "2.2.2.2"), Check: resource.ComposeTestCheckFunc( testCheckAzureRMDataLakeStoreFirewallRuleExists(resourceName), resource.TestCheckResourceAttr(resourceName, "start_ip_address", "1.1.1.1"), @@ -59,7 +57,7 @@ func TestAccAzureRMDataLakeStoreFirewallRule_update(t *testing.T) { ), }, { - Config: testAccAzureRMDataLakeStoreFirewallRule_basic(ri, rs, testLocation(), "2.2.2.2", "3.3.3.3"), + Config: testAccAzureRMDataLakeStoreFirewallRule_basic(ri, testLocation(), "2.2.2.2", "3.3.3.3"), Check: resource.ComposeTestCheckFunc( testCheckAzureRMDataLakeStoreFirewallRuleExists(resourceName), resource.TestCheckResourceAttr(resourceName, "start_ip_address", "2.2.2.2"), @@ -72,9 +70,8 @@ func TestAccAzureRMDataLakeStoreFirewallRule_update(t *testing.T) { func TestAccAzureRMDataLakeStoreFirewallRule_azureServices(t *testing.T) { resourceName := "azurerm_data_lake_store_firewall_rule.test" - azureServicesIP := "0.0.0.0" ri := acctest.RandInt() - rs := acctest.RandString(4) + azureServicesIP := "0.0.0.0" resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -82,7 +79,7 @@ func TestAccAzureRMDataLakeStoreFirewallRule_azureServices(t *testing.T) { CheckDestroy: testCheckAzureRMDataLakeStoreFirewallRuleDestroy, Steps: []resource.TestStep{ { - Config: testAccAzureRMDataLakeStoreFirewallRule_basic(ri, rs, testLocation(), azureServicesIP, azureServicesIP), + Config: testAccAzureRMDataLakeStoreFirewallRule_basic(ri, testLocation(), azureServicesIP, azureServicesIP), Check: resource.ComposeTestCheckFunc( testCheckAzureRMDataLakeStoreFirewallRuleExists(resourceName), resource.TestCheckResourceAttr(resourceName, "start_ip_address", azureServicesIP), @@ -157,7 +154,7 @@ func testCheckAzureRMDataLakeStoreFirewallRuleDestroy(s *terraform.State) error return nil } -func testAccAzureRMDataLakeStoreFirewallRule_basic(rInt int, rs, location, startIP, endIP string) string { +func testAccAzureRMDataLakeStoreFirewallRule_basic(rInt int, location, startIP, endIP string) string { return fmt.Sprintf(` resource "azurerm_resource_group" "test" { name = "acctestRG-%d" @@ -165,17 +162,17 @@ resource "azurerm_resource_group" "test" { } resource "azurerm_data_lake_store" "test" { - name = "unlikely23exst2acct%s" + name = "acctest%s" resource_group_name = "${azurerm_resource_group.test.name}" - location = "%s" + location = "${azurerm_resource_group.test.location}" } resource "azurerm_data_lake_store_firewall_rule" "test" { - name = "example" + name = "acctest" account_name = "${azurerm_data_lake_store.test.name}" resource_group_name = "${azurerm_resource_group.test.name}" start_ip_address = "%s" end_ip_address = "%s" } -`, rInt, location, rs, location, startIP, endIP) +`, rInt, location, strconv.Itoa(rInt)[0:15], startIP, endIP) } diff --git a/azurerm/resource_arm_data_lake_store_test.go b/azurerm/resource_arm_data_lake_store_test.go index c0dc71fc5fc3..9ef5d65b9978 100644 --- a/azurerm/resource_arm_data_lake_store_test.go +++ b/azurerm/resource_arm_data_lake_store_test.go @@ -3,6 +3,7 @@ package azurerm import ( "fmt" "net/http" + "strconv" "testing" "github.com/hashicorp/terraform/helper/acctest" @@ -13,8 +14,6 @@ import ( func TestAccAzureRMDataLakeStore_basic(t *testing.T) { resourceName := "azurerm_data_lake_store.test" ri := acctest.RandInt() - rs := acctest.RandString(4) - config := testAccAzureRMDataLakeStore_basic(ri, rs, testLocation()) resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -22,7 +21,7 @@ func TestAccAzureRMDataLakeStore_basic(t *testing.T) { CheckDestroy: testCheckAzureRMDataLakeStoreDestroy, Steps: []resource.TestStep{ { - Config: config, + Config: testAccAzureRMDataLakeStore_basic(ri, testLocation()), Check: resource.ComposeTestCheckFunc( testCheckAzureRMDataLakeStoreExists(resourceName), resource.TestCheckResourceAttr(resourceName, "tier", "Consumption"), @@ -40,8 +39,6 @@ func TestAccAzureRMDataLakeStore_basic(t *testing.T) { func TestAccAzureRMDataLakeStore_tier(t *testing.T) { resourceName := "azurerm_data_lake_store.test" ri := acctest.RandInt() - rs := acctest.RandString(4) - config := testAccAzureRMDataLakeStore_tier(ri, rs, testLocation()) resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -49,7 +46,7 @@ func TestAccAzureRMDataLakeStore_tier(t *testing.T) { CheckDestroy: testCheckAzureRMDataLakeStoreDestroy, Steps: []resource.TestStep{ { - Config: config, + Config: testAccAzureRMDataLakeStore_tier(ri, testLocation()), Check: resource.ComposeTestCheckFunc( testCheckAzureRMDataLakeStoreExists(resourceName), resource.TestCheckResourceAttr(resourceName, "tier", "Commitment_1TB"), @@ -67,10 +64,6 @@ func TestAccAzureRMDataLakeStore_tier(t *testing.T) { func TestAccAzureRMDataLakeStore_withTags(t *testing.T) { resourceName := "azurerm_data_lake_store.test" ri := acctest.RandInt() - rs := acctest.RandString(4) - location := testLocation() - preConfig := testAccAzureRMDataLakeStore_withTags(ri, rs, location) - postConfig := testAccAzureRMDataLakeStore_withTagsUpdate(ri, rs, location) resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -78,14 +71,14 @@ func TestAccAzureRMDataLakeStore_withTags(t *testing.T) { CheckDestroy: testCheckAzureRMDataLakeStoreDestroy, Steps: []resource.TestStep{ { - Config: preConfig, + Config: testAccAzureRMDataLakeStore_withTags(ri, testLocation()), Check: resource.ComposeTestCheckFunc( testCheckAzureRMDataLakeStoreExists(resourceName), resource.TestCheckResourceAttr(resourceName, "tags.%", "2"), ), }, { - Config: postConfig, + Config: testAccAzureRMDataLakeStore_withTagsUpdate(ri, testLocation()), Check: resource.ComposeTestCheckFunc( testCheckAzureRMDataLakeStoreExists(resourceName), resource.TestCheckResourceAttr(resourceName, "tags.%", "1"), @@ -157,7 +150,7 @@ func testCheckAzureRMDataLakeStoreDestroy(s *terraform.State) error { return nil } -func testAccAzureRMDataLakeStore_basic(rInt int, rs string, location string) string { +func testAccAzureRMDataLakeStore_basic(rInt int, location string) string { return fmt.Sprintf(` resource "azurerm_resource_group" "test" { name = "acctestRG-%d" @@ -165,64 +158,65 @@ resource "azurerm_resource_group" "test" { } resource "azurerm_data_lake_store" "test" { - name = "unlikely23exst2acct%s" + name = "acctest%s" resource_group_name = "${azurerm_resource_group.test.name}" - location = "%s" + location = "${azurerm_resource_group.test.location}" } -`, rInt, location, rs, location) +`, rInt, location, strconv.Itoa(rInt)[0:15]) } -func testAccAzureRMDataLakeStore_tier(rInt int, rs string, location string) string { +func testAccAzureRMDataLakeStore_tier(rInt int, location string) string { return fmt.Sprintf(` resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" +name = "acctestRG-%d" location = "%s" } resource "azurerm_data_lake_store" "test" { - name = "unlikely23exst2acct%s" + name = "acctest%s" resource_group_name = "${azurerm_resource_group.test.name}" - location = "%s" + location = "${azurerm_resource_group.test.location}" + tier = "Commitment_1TB" } -`, rInt, location, rs, location) +`, rInt, location, strconv.Itoa(rInt)[0:15]) } -func testAccAzureRMDataLakeStore_withTags(rInt int, rs string, location string) string { +func testAccAzureRMDataLakeStore_withTags(rInt int, location string) string { return fmt.Sprintf(` resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" +name = "acctestRG-%d" location = "%s" } resource "azurerm_data_lake_store" "test" { - name = "unlikely23exst2acct%s" + name = "acctest%s" resource_group_name = "${azurerm_resource_group.test.name}" - location = "%s" + location = "${azurerm_resource_group.test.location}" tags { environment = "Production" cost_center = "MSFT" } } -`, rInt, location, rs, location) +`, rInt, location, strconv.Itoa(rInt)[0:15]) } -func testAccAzureRMDataLakeStore_withTagsUpdate(rInt int, rs string, location string) string { +func testAccAzureRMDataLakeStore_withTagsUpdate(rInt int, location string) string { return fmt.Sprintf(` resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" +name = "acctestRG-%d" location = "%s" } resource "azurerm_data_lake_store" "test" { - name = "unlikely23exst2acct%s" + name = "acctest%s" resource_group_name = "${azurerm_resource_group.test.name}" - location = "%s" + location = "${azurerm_resource_group.test.location}" tags { environment = "staging" } } -`, rInt, location, rs, location) +`, rInt, location, strconv.Itoa(rInt)[0:15]) } diff --git a/examples/datalake/main.tf b/examples/datalake/main.tf new file mode 100644 index 000000000000..81d19af1a783 --- /dev/null +++ b/examples/datalake/main.tf @@ -0,0 +1,42 @@ +resource "azurerm_resource_group" "example" { + name = "${var.resource_group}" + location = "${var.location}" +} + +resource "random_integer" "ri" { + min = 10000 + max = 99999 +} + +resource "azurerm_data_lake_store" "example" { + name = "tfexdlstore${random_integer.ri.result}" + resource_group_name = "${azurerm_resource_group.example.name}" + location = "${azurerm_resource_group.example.location}" + + tier = "Consumption" +} + +resource "azurerm_data_lake_store_firewall_rule" "test" { + name = "tfex-datalakestore-fwrule" + account_name = "${azurerm_data_lake_store.example.name}" + resource_group_name = "${azurerm_resource_group.example.name}" + start_ip_address = "0.0.0.0" + end_ip_address = "0.0.0.0" +} + +resource "azurerm_data_lake_analytics_account" "example" { + name = "tfexdlanalytics${random_integer.ri.result}" + resource_group_name = "${azurerm_resource_group.example.name}" + location = "${azurerm_resource_group.example.location}" + tier = "Consumption" + + default_store_account_name = "${azurerm_data_lake_store.example.name}" +} + +resource "azurerm_data_lake_analytics_firewall_rule" "test" { + name = "tfex-datalakestore-fwrule" + account_name = "${azurerm_data_lake_analytics_account.example.name}" + resource_group_name = "${azurerm_resource_group.example.name}" + start_ip_address = "0.0.0.0" + end_ip_address = "0.0.0.0" +} \ No newline at end of file diff --git a/examples/datalake/outputs.tf b/examples/datalake/outputs.tf new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/examples/datalake/variables.tf b/examples/datalake/variables.tf new file mode 100644 index 000000000000..eb526f1debdd --- /dev/null +++ b/examples/datalake/variables.tf @@ -0,0 +1,9 @@ +variable "resource_group" { + description = "The name of the resource group in which to create the Service Bus" + default = "tfex-datalake" +} + +variable "location" { + description = "The location/region where the Service Bus is created. Changing this forces a new resource to be created." + default = "westeurope" +} diff --git a/examples/eventhub/variables.tf b/examples/eventhub/variables.tf index 5fec1720c26d..d9128a00744c 100644 --- a/examples/eventhub/variables.tf +++ b/examples/eventhub/variables.tf @@ -1,6 +1,6 @@ variable "resource_group" { description = "The name of the resource group in which to create the Service Bus" - default = "tfex-servicebus-topic_subscription" + default = "tfex-eventhub" } variable "location" { diff --git a/examples/servicebus/variables.tf b/examples/servicebus/variables.tf index 5fec1720c26d..25326c12e2cd 100644 --- a/examples/servicebus/variables.tf +++ b/examples/servicebus/variables.tf @@ -1,6 +1,6 @@ variable "resource_group" { description = "The name of the resource group in which to create the Service Bus" - default = "tfex-servicebus-topic_subscription" + default = "tfex-servicebus" } variable "location" { diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/accounts.go b/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/accounts.go new file mode 100644 index 000000000000..87a86a01a267 --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/accounts.go @@ -0,0 +1,690 @@ +package account + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/autorest/validation" + "net/http" +) + +// AccountsClient is the creates an Azure Data Lake Analytics account management client. +type AccountsClient struct { + BaseClient +} + +// NewAccountsClient creates an instance of the AccountsClient client. +func NewAccountsClient(subscriptionID string) AccountsClient { + return NewAccountsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewAccountsClientWithBaseURI creates an instance of the AccountsClient client. +func NewAccountsClientWithBaseURI(baseURI string, subscriptionID string) AccountsClient { + return AccountsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// CheckNameAvailability checks whether the specified account name is available or taken. +// Parameters: +// location - the resource location without whitespace. +// parameters - parameters supplied to check the Data Lake Analytics account name availability. +func (client AccountsClient) CheckNameAvailability(ctx context.Context, location string, parameters CheckNameAvailabilityParameters) (result NameAvailabilityInformation, err error) { + if err := validation.Validate([]validation.Validation{ + {TargetValue: parameters, + Constraints: []validation.Constraint{{Target: "parameters.Name", Name: validation.Null, Rule: true, Chain: nil}, + {Target: "parameters.Type", Name: validation.Null, Rule: true, Chain: nil}}}}); err != nil { + return result, validation.NewError("account.AccountsClient", "CheckNameAvailability", err.Error()) + } + + req, err := client.CheckNameAvailabilityPreparer(ctx, location, parameters) + if err != nil { + err = autorest.NewErrorWithError(err, "account.AccountsClient", "CheckNameAvailability", nil, "Failure preparing request") + return + } + + resp, err := client.CheckNameAvailabilitySender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "account.AccountsClient", "CheckNameAvailability", resp, "Failure sending request") + return + } + + result, err = client.CheckNameAvailabilityResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.AccountsClient", "CheckNameAvailability", resp, "Failure responding to request") + } + + return +} + +// CheckNameAvailabilityPreparer prepares the CheckNameAvailability request. +func (client AccountsClient) CheckNameAvailabilityPreparer(ctx context.Context, location string, parameters CheckNameAvailabilityParameters) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "location": autorest.Encode("path", location), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPost(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.DataLakeAnalytics/locations/{location}/checkNameAvailability", pathParameters), + autorest.WithJSON(parameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// CheckNameAvailabilitySender sends the CheckNameAvailability request. The method will close the +// http.Response Body if it receives an error. +func (client AccountsClient) CheckNameAvailabilitySender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// CheckNameAvailabilityResponder handles the response to the CheckNameAvailability request. The method always +// closes the http.Response Body. +func (client AccountsClient) CheckNameAvailabilityResponder(resp *http.Response) (result NameAvailabilityInformation, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Create creates the specified Data Lake Analytics account. This supplies the user with computation services for Data +// Lake Analytics workloads. +// Parameters: +// resourceGroupName - the name of the Azure resource group. +// accountName - the name of the Data Lake Analytics account. +// parameters - parameters supplied to create a new Data Lake Analytics account. +func (client AccountsClient) Create(ctx context.Context, resourceGroupName string, accountName string, parameters CreateDataLakeAnalyticsAccountParameters) (result AccountsCreateFutureType, err error) { + if err := validation.Validate([]validation.Validation{ + {TargetValue: parameters, + Constraints: []validation.Constraint{{Target: "parameters.Location", Name: validation.Null, Rule: true, Chain: nil}, + {Target: "parameters.CreateDataLakeAnalyticsAccountProperties", Name: validation.Null, Rule: true, + Chain: []validation.Constraint{{Target: "parameters.CreateDataLakeAnalyticsAccountProperties.DefaultDataLakeStoreAccount", Name: validation.Null, Rule: true, Chain: nil}, + {Target: "parameters.CreateDataLakeAnalyticsAccountProperties.DataLakeStoreAccounts", Name: validation.Null, Rule: true, Chain: nil}, + {Target: "parameters.CreateDataLakeAnalyticsAccountProperties.MaxJobCount", Name: validation.Null, Rule: false, + Chain: []validation.Constraint{{Target: "parameters.CreateDataLakeAnalyticsAccountProperties.MaxJobCount", Name: validation.InclusiveMinimum, Rule: 1, Chain: nil}}}, + {Target: "parameters.CreateDataLakeAnalyticsAccountProperties.MaxDegreeOfParallelism", Name: validation.Null, Rule: false, + Chain: []validation.Constraint{{Target: "parameters.CreateDataLakeAnalyticsAccountProperties.MaxDegreeOfParallelism", Name: validation.InclusiveMinimum, Rule: 1, Chain: nil}}}, + {Target: "parameters.CreateDataLakeAnalyticsAccountProperties.MaxDegreeOfParallelismPerJob", Name: validation.Null, Rule: false, + Chain: []validation.Constraint{{Target: "parameters.CreateDataLakeAnalyticsAccountProperties.MaxDegreeOfParallelismPerJob", Name: validation.InclusiveMinimum, Rule: 1, Chain: nil}}}, + {Target: "parameters.CreateDataLakeAnalyticsAccountProperties.MinPriorityPerJob", Name: validation.Null, Rule: false, + Chain: []validation.Constraint{{Target: "parameters.CreateDataLakeAnalyticsAccountProperties.MinPriorityPerJob", Name: validation.InclusiveMinimum, Rule: 1, Chain: nil}}}, + {Target: "parameters.CreateDataLakeAnalyticsAccountProperties.QueryStoreRetention", Name: validation.Null, Rule: false, + Chain: []validation.Constraint{{Target: "parameters.CreateDataLakeAnalyticsAccountProperties.QueryStoreRetention", Name: validation.InclusiveMaximum, Rule: int64(180), Chain: nil}, + {Target: "parameters.CreateDataLakeAnalyticsAccountProperties.QueryStoreRetention", Name: validation.InclusiveMinimum, Rule: 1, Chain: nil}, + }}, + }}}}}); err != nil { + return result, validation.NewError("account.AccountsClient", "Create", err.Error()) + } + + req, err := client.CreatePreparer(ctx, resourceGroupName, accountName, parameters) + if err != nil { + err = autorest.NewErrorWithError(err, "account.AccountsClient", "Create", nil, "Failure preparing request") + return + } + + result, err = client.CreateSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "account.AccountsClient", "Create", result.Response(), "Failure sending request") + return + } + + return +} + +// CreatePreparer prepares the Create request. +func (client AccountsClient) CreatePreparer(ctx context.Context, resourceGroupName string, accountName string, parameters CreateDataLakeAnalyticsAccountParameters) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}", pathParameters), + autorest.WithJSON(parameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// CreateSender sends the Create request. The method will close the +// http.Response Body if it receives an error. +func (client AccountsClient) CreateSender(req *http.Request) (future AccountsCreateFutureType, err error) { + var resp *http.Response + resp, err = autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + err = autorest.Respond(resp, azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// CreateResponder handles the response to the Create request. The method always +// closes the http.Response Body. +func (client AccountsClient) CreateResponder(resp *http.Response) (result DataLakeAnalyticsAccount, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Delete begins the delete process for the Data Lake Analytics account object specified by the account name. +// Parameters: +// resourceGroupName - the name of the Azure resource group. +// accountName - the name of the Data Lake Analytics account. +func (client AccountsClient) Delete(ctx context.Context, resourceGroupName string, accountName string) (result AccountsDeleteFutureType, err error) { + req, err := client.DeletePreparer(ctx, resourceGroupName, accountName) + if err != nil { + err = autorest.NewErrorWithError(err, "account.AccountsClient", "Delete", nil, "Failure preparing request") + return + } + + result, err = client.DeleteSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "account.AccountsClient", "Delete", result.Response(), "Failure sending request") + return + } + + return +} + +// DeletePreparer prepares the Delete request. +func (client AccountsClient) DeletePreparer(ctx context.Context, resourceGroupName string, accountName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsDelete(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// DeleteSender sends the Delete request. The method will close the +// http.Response Body if it receives an error. +func (client AccountsClient) DeleteSender(req *http.Request) (future AccountsDeleteFutureType, err error) { + var resp *http.Response + resp, err = autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + err = autorest.Respond(resp, azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// DeleteResponder handles the response to the Delete request. The method always +// closes the http.Response Body. +func (client AccountsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent), + autorest.ByClosing()) + result.Response = resp + return +} + +// Get gets details of the specified Data Lake Analytics account. +// Parameters: +// resourceGroupName - the name of the Azure resource group. +// accountName - the name of the Data Lake Analytics account. +func (client AccountsClient) Get(ctx context.Context, resourceGroupName string, accountName string) (result DataLakeAnalyticsAccount, err error) { + req, err := client.GetPreparer(ctx, resourceGroupName, accountName) + if err != nil { + err = autorest.NewErrorWithError(err, "account.AccountsClient", "Get", nil, "Failure preparing request") + return + } + + resp, err := client.GetSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "account.AccountsClient", "Get", resp, "Failure sending request") + return + } + + result, err = client.GetResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.AccountsClient", "Get", resp, "Failure responding to request") + } + + return +} + +// GetPreparer prepares the Get request. +func (client AccountsClient) GetPreparer(ctx context.Context, resourceGroupName string, accountName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetSender sends the Get request. The method will close the +// http.Response Body if it receives an error. +func (client AccountsClient) GetSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// GetResponder handles the response to the Get request. The method always +// closes the http.Response Body. +func (client AccountsClient) GetResponder(resp *http.Response) (result DataLakeAnalyticsAccount, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// List gets the first page of Data Lake Analytics accounts, if any, within the current subscription. This includes a +// link to the next page, if any. +// Parameters: +// filter - oData filter. Optional. +// top - the number of items to return. Optional. +// skip - the number of items to skip over before returning elements. Optional. +// selectParameter - oData Select statement. Limits the properties on each entry to just those requested, e.g. +// Categories?$select=CategoryName,Description. Optional. +// orderby - orderBy clause. One or more comma-separated expressions with an optional "asc" (the default) or +// "desc" depending on the order you'd like the values sorted, e.g. Categories?$orderby=CategoryName desc. +// Optional. +// count - the Boolean value of true or false to request a count of the matching resources included with the +// resources in the response, e.g. Categories?$count=true. Optional. +func (client AccountsClient) List(ctx context.Context, filter string, top *int32, skip *int32, selectParameter string, orderby string, count *bool) (result DataLakeAnalyticsAccountListResultPage, err error) { + if err := validation.Validate([]validation.Validation{ + {TargetValue: top, + Constraints: []validation.Constraint{{Target: "top", Name: validation.Null, Rule: false, + Chain: []validation.Constraint{{Target: "top", Name: validation.InclusiveMinimum, Rule: 1, Chain: nil}}}}}, + {TargetValue: skip, + Constraints: []validation.Constraint{{Target: "skip", Name: validation.Null, Rule: false, + Chain: []validation.Constraint{{Target: "skip", Name: validation.InclusiveMinimum, Rule: 1, Chain: nil}}}}}}); err != nil { + return result, validation.NewError("account.AccountsClient", "List", err.Error()) + } + + result.fn = client.listNextResults + req, err := client.ListPreparer(ctx, filter, top, skip, selectParameter, orderby, count) + if err != nil { + err = autorest.NewErrorWithError(err, "account.AccountsClient", "List", nil, "Failure preparing request") + return + } + + resp, err := client.ListSender(req) + if err != nil { + result.dlaalr.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "account.AccountsClient", "List", resp, "Failure sending request") + return + } + + result.dlaalr, err = client.ListResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.AccountsClient", "List", resp, "Failure responding to request") + } + + return +} + +// ListPreparer prepares the List request. +func (client AccountsClient) ListPreparer(ctx context.Context, filter string, top *int32, skip *int32, selectParameter string, orderby string, count *bool) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(filter) > 0 { + queryParameters["$filter"] = autorest.Encode("query", filter) + } + if top != nil { + queryParameters["$top"] = autorest.Encode("query", *top) + } + if skip != nil { + queryParameters["$skip"] = autorest.Encode("query", *skip) + } + if len(selectParameter) > 0 { + queryParameters["$select"] = autorest.Encode("query", selectParameter) + } + if len(orderby) > 0 { + queryParameters["$orderby"] = autorest.Encode("query", orderby) + } + if count != nil { + queryParameters["$count"] = autorest.Encode("query", *count) + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.DataLakeAnalytics/accounts", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListSender sends the List request. The method will close the +// http.Response Body if it receives an error. +func (client AccountsClient) ListSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// ListResponder handles the response to the List request. The method always +// closes the http.Response Body. +func (client AccountsClient) ListResponder(resp *http.Response) (result DataLakeAnalyticsAccountListResult, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listNextResults retrieves the next set of results, if any. +func (client AccountsClient) listNextResults(lastResults DataLakeAnalyticsAccountListResult) (result DataLakeAnalyticsAccountListResult, err error) { + req, err := lastResults.dataLakeAnalyticsAccountListResultPreparer() + if err != nil { + return result, autorest.NewErrorWithError(err, "account.AccountsClient", "listNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "account.AccountsClient", "listNextResults", resp, "Failure sending next results request") + } + result, err = client.ListResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.AccountsClient", "listNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListComplete enumerates all values, automatically crossing page boundaries as required. +func (client AccountsClient) ListComplete(ctx context.Context, filter string, top *int32, skip *int32, selectParameter string, orderby string, count *bool) (result DataLakeAnalyticsAccountListResultIterator, err error) { + result.page, err = client.List(ctx, filter, top, skip, selectParameter, orderby, count) + return +} + +// ListByResourceGroup gets the first page of Data Lake Analytics accounts, if any, within a specific resource group. +// This includes a link to the next page, if any. +// Parameters: +// resourceGroupName - the name of the Azure resource group. +// filter - oData filter. Optional. +// top - the number of items to return. Optional. +// skip - the number of items to skip over before returning elements. Optional. +// selectParameter - oData Select statement. Limits the properties on each entry to just those requested, e.g. +// Categories?$select=CategoryName,Description. Optional. +// orderby - orderBy clause. One or more comma-separated expressions with an optional "asc" (the default) or +// "desc" depending on the order you'd like the values sorted, e.g. Categories?$orderby=CategoryName desc. +// Optional. +// count - the Boolean value of true or false to request a count of the matching resources included with the +// resources in the response, e.g. Categories?$count=true. Optional. +func (client AccountsClient) ListByResourceGroup(ctx context.Context, resourceGroupName string, filter string, top *int32, skip *int32, selectParameter string, orderby string, count *bool) (result DataLakeAnalyticsAccountListResultPage, err error) { + if err := validation.Validate([]validation.Validation{ + {TargetValue: top, + Constraints: []validation.Constraint{{Target: "top", Name: validation.Null, Rule: false, + Chain: []validation.Constraint{{Target: "top", Name: validation.InclusiveMinimum, Rule: 1, Chain: nil}}}}}, + {TargetValue: skip, + Constraints: []validation.Constraint{{Target: "skip", Name: validation.Null, Rule: false, + Chain: []validation.Constraint{{Target: "skip", Name: validation.InclusiveMinimum, Rule: 1, Chain: nil}}}}}}); err != nil { + return result, validation.NewError("account.AccountsClient", "ListByResourceGroup", err.Error()) + } + + result.fn = client.listByResourceGroupNextResults + req, err := client.ListByResourceGroupPreparer(ctx, resourceGroupName, filter, top, skip, selectParameter, orderby, count) + if err != nil { + err = autorest.NewErrorWithError(err, "account.AccountsClient", "ListByResourceGroup", nil, "Failure preparing request") + return + } + + resp, err := client.ListByResourceGroupSender(req) + if err != nil { + result.dlaalr.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "account.AccountsClient", "ListByResourceGroup", resp, "Failure sending request") + return + } + + result.dlaalr, err = client.ListByResourceGroupResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.AccountsClient", "ListByResourceGroup", resp, "Failure responding to request") + } + + return +} + +// ListByResourceGroupPreparer prepares the ListByResourceGroup request. +func (client AccountsClient) ListByResourceGroupPreparer(ctx context.Context, resourceGroupName string, filter string, top *int32, skip *int32, selectParameter string, orderby string, count *bool) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(filter) > 0 { + queryParameters["$filter"] = autorest.Encode("query", filter) + } + if top != nil { + queryParameters["$top"] = autorest.Encode("query", *top) + } + if skip != nil { + queryParameters["$skip"] = autorest.Encode("query", *skip) + } + if len(selectParameter) > 0 { + queryParameters["$select"] = autorest.Encode("query", selectParameter) + } + if len(orderby) > 0 { + queryParameters["$orderby"] = autorest.Encode("query", orderby) + } + if count != nil { + queryParameters["$count"] = autorest.Encode("query", *count) + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListByResourceGroupSender sends the ListByResourceGroup request. The method will close the +// http.Response Body if it receives an error. +func (client AccountsClient) ListByResourceGroupSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// ListByResourceGroupResponder handles the response to the ListByResourceGroup request. The method always +// closes the http.Response Body. +func (client AccountsClient) ListByResourceGroupResponder(resp *http.Response) (result DataLakeAnalyticsAccountListResult, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listByResourceGroupNextResults retrieves the next set of results, if any. +func (client AccountsClient) listByResourceGroupNextResults(lastResults DataLakeAnalyticsAccountListResult) (result DataLakeAnalyticsAccountListResult, err error) { + req, err := lastResults.dataLakeAnalyticsAccountListResultPreparer() + if err != nil { + return result, autorest.NewErrorWithError(err, "account.AccountsClient", "listByResourceGroupNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListByResourceGroupSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "account.AccountsClient", "listByResourceGroupNextResults", resp, "Failure sending next results request") + } + result, err = client.ListByResourceGroupResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.AccountsClient", "listByResourceGroupNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListByResourceGroupComplete enumerates all values, automatically crossing page boundaries as required. +func (client AccountsClient) ListByResourceGroupComplete(ctx context.Context, resourceGroupName string, filter string, top *int32, skip *int32, selectParameter string, orderby string, count *bool) (result DataLakeAnalyticsAccountListResultIterator, err error) { + result.page, err = client.ListByResourceGroup(ctx, resourceGroupName, filter, top, skip, selectParameter, orderby, count) + return +} + +// Update updates the Data Lake Analytics account object specified by the accountName with the contents of the account +// object. +// Parameters: +// resourceGroupName - the name of the Azure resource group. +// accountName - the name of the Data Lake Analytics account. +// parameters - parameters supplied to the update Data Lake Analytics account operation. +func (client AccountsClient) Update(ctx context.Context, resourceGroupName string, accountName string, parameters *UpdateDataLakeAnalyticsAccountParameters) (result AccountsUpdateFutureType, err error) { + req, err := client.UpdatePreparer(ctx, resourceGroupName, accountName, parameters) + if err != nil { + err = autorest.NewErrorWithError(err, "account.AccountsClient", "Update", nil, "Failure preparing request") + return + } + + result, err = client.UpdateSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "account.AccountsClient", "Update", result.Response(), "Failure sending request") + return + } + + return +} + +// UpdatePreparer prepares the Update request. +func (client AccountsClient) UpdatePreparer(ctx context.Context, resourceGroupName string, accountName string, parameters *UpdateDataLakeAnalyticsAccountParameters) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPatch(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + if parameters != nil { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithJSON(parameters)) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// UpdateSender sends the Update request. The method will close the +// http.Response Body if it receives an error. +func (client AccountsClient) UpdateSender(req *http.Request) (future AccountsUpdateFutureType, err error) { + var resp *http.Response + resp, err = autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + err = autorest.Respond(resp, azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated, http.StatusAccepted)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// UpdateResponder handles the response to the Update request. The method always +// closes the http.Response Body. +func (client AccountsClient) UpdateResponder(resp *http.Response) (result DataLakeAnalyticsAccount, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated, http.StatusAccepted), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/client.go b/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/client.go new file mode 100644 index 000000000000..76477f63fec1 --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/client.go @@ -0,0 +1,51 @@ +// Package account implements the Azure ARM Account service API version 2016-11-01. +// +// Creates an Azure Data Lake Analytics account management client. +package account + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "github.com/Azure/go-autorest/autorest" +) + +const ( + // DefaultBaseURI is the default URI used for the service Account + DefaultBaseURI = "https://management.azure.com" +) + +// BaseClient is the base client for Account. +type BaseClient struct { + autorest.Client + BaseURI string + SubscriptionID string +} + +// New creates an instance of the BaseClient client. +func New(subscriptionID string) BaseClient { + return NewWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewWithBaseURI creates an instance of the BaseClient client. +func NewWithBaseURI(baseURI string, subscriptionID string) BaseClient { + return BaseClient{ + Client: autorest.NewClientWithUserAgent(UserAgent()), + BaseURI: baseURI, + SubscriptionID: subscriptionID, + } +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/computepolicies.go b/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/computepolicies.go new file mode 100644 index 000000000000..6a92f7dadfc0 --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/computepolicies.go @@ -0,0 +1,435 @@ +package account + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/autorest/validation" + "net/http" +) + +// ComputePoliciesClient is the creates an Azure Data Lake Analytics account management client. +type ComputePoliciesClient struct { + BaseClient +} + +// NewComputePoliciesClient creates an instance of the ComputePoliciesClient client. +func NewComputePoliciesClient(subscriptionID string) ComputePoliciesClient { + return NewComputePoliciesClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewComputePoliciesClientWithBaseURI creates an instance of the ComputePoliciesClient client. +func NewComputePoliciesClientWithBaseURI(baseURI string, subscriptionID string) ComputePoliciesClient { + return ComputePoliciesClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// CreateOrUpdate creates or updates the specified compute policy. During update, the compute policy with the specified +// name will be replaced with this new compute policy. An account supports, at most, 50 policies +// Parameters: +// resourceGroupName - the name of the Azure resource group. +// accountName - the name of the Data Lake Analytics account. +// computePolicyName - the name of the compute policy to create or update. +// parameters - parameters supplied to create or update the compute policy. The max degree of parallelism per +// job property, min priority per job property, or both must be present. +func (client ComputePoliciesClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, accountName string, computePolicyName string, parameters CreateOrUpdateComputePolicyParameters) (result ComputePolicy, err error) { + if err := validation.Validate([]validation.Validation{ + {TargetValue: parameters, + Constraints: []validation.Constraint{{Target: "parameters.CreateOrUpdateComputePolicyProperties", Name: validation.Null, Rule: true, + Chain: []validation.Constraint{{Target: "parameters.CreateOrUpdateComputePolicyProperties.ObjectID", Name: validation.Null, Rule: true, Chain: nil}, + {Target: "parameters.CreateOrUpdateComputePolicyProperties.MaxDegreeOfParallelismPerJob", Name: validation.Null, Rule: false, + Chain: []validation.Constraint{{Target: "parameters.CreateOrUpdateComputePolicyProperties.MaxDegreeOfParallelismPerJob", Name: validation.InclusiveMinimum, Rule: 1, Chain: nil}}}, + {Target: "parameters.CreateOrUpdateComputePolicyProperties.MinPriorityPerJob", Name: validation.Null, Rule: false, + Chain: []validation.Constraint{{Target: "parameters.CreateOrUpdateComputePolicyProperties.MinPriorityPerJob", Name: validation.InclusiveMinimum, Rule: 1, Chain: nil}}}, + }}}}}); err != nil { + return result, validation.NewError("account.ComputePoliciesClient", "CreateOrUpdate", err.Error()) + } + + req, err := client.CreateOrUpdatePreparer(ctx, resourceGroupName, accountName, computePolicyName, parameters) + if err != nil { + err = autorest.NewErrorWithError(err, "account.ComputePoliciesClient", "CreateOrUpdate", nil, "Failure preparing request") + return + } + + resp, err := client.CreateOrUpdateSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "account.ComputePoliciesClient", "CreateOrUpdate", resp, "Failure sending request") + return + } + + result, err = client.CreateOrUpdateResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.ComputePoliciesClient", "CreateOrUpdate", resp, "Failure responding to request") + } + + return +} + +// CreateOrUpdatePreparer prepares the CreateOrUpdate request. +func (client ComputePoliciesClient) CreateOrUpdatePreparer(ctx context.Context, resourceGroupName string, accountName string, computePolicyName string, parameters CreateOrUpdateComputePolicyParameters) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "computePolicyName": autorest.Encode("path", computePolicyName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/computePolicies/{computePolicyName}", pathParameters), + autorest.WithJSON(parameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the +// http.Response Body if it receives an error. +func (client ComputePoliciesClient) CreateOrUpdateSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always +// closes the http.Response Body. +func (client ComputePoliciesClient) CreateOrUpdateResponder(resp *http.Response) (result ComputePolicy, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Delete deletes the specified compute policy from the specified Data Lake Analytics account +// Parameters: +// resourceGroupName - the name of the Azure resource group. +// accountName - the name of the Data Lake Analytics account. +// computePolicyName - the name of the compute policy to delete. +func (client ComputePoliciesClient) Delete(ctx context.Context, resourceGroupName string, accountName string, computePolicyName string) (result autorest.Response, err error) { + req, err := client.DeletePreparer(ctx, resourceGroupName, accountName, computePolicyName) + if err != nil { + err = autorest.NewErrorWithError(err, "account.ComputePoliciesClient", "Delete", nil, "Failure preparing request") + return + } + + resp, err := client.DeleteSender(req) + if err != nil { + result.Response = resp + err = autorest.NewErrorWithError(err, "account.ComputePoliciesClient", "Delete", resp, "Failure sending request") + return + } + + result, err = client.DeleteResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.ComputePoliciesClient", "Delete", resp, "Failure responding to request") + } + + return +} + +// DeletePreparer prepares the Delete request. +func (client ComputePoliciesClient) DeletePreparer(ctx context.Context, resourceGroupName string, accountName string, computePolicyName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "computePolicyName": autorest.Encode("path", computePolicyName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsDelete(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/computePolicies/{computePolicyName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// DeleteSender sends the Delete request. The method will close the +// http.Response Body if it receives an error. +func (client ComputePoliciesClient) DeleteSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// DeleteResponder handles the response to the Delete request. The method always +// closes the http.Response Body. +func (client ComputePoliciesClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNoContent), + autorest.ByClosing()) + result.Response = resp + return +} + +// Get gets the specified Data Lake Analytics compute policy. +// Parameters: +// resourceGroupName - the name of the Azure resource group. +// accountName - the name of the Data Lake Analytics account. +// computePolicyName - the name of the compute policy to retrieve. +func (client ComputePoliciesClient) Get(ctx context.Context, resourceGroupName string, accountName string, computePolicyName string) (result ComputePolicy, err error) { + req, err := client.GetPreparer(ctx, resourceGroupName, accountName, computePolicyName) + if err != nil { + err = autorest.NewErrorWithError(err, "account.ComputePoliciesClient", "Get", nil, "Failure preparing request") + return + } + + resp, err := client.GetSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "account.ComputePoliciesClient", "Get", resp, "Failure sending request") + return + } + + result, err = client.GetResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.ComputePoliciesClient", "Get", resp, "Failure responding to request") + } + + return +} + +// GetPreparer prepares the Get request. +func (client ComputePoliciesClient) GetPreparer(ctx context.Context, resourceGroupName string, accountName string, computePolicyName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "computePolicyName": autorest.Encode("path", computePolicyName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/computePolicies/{computePolicyName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetSender sends the Get request. The method will close the +// http.Response Body if it receives an error. +func (client ComputePoliciesClient) GetSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// GetResponder handles the response to the Get request. The method always +// closes the http.Response Body. +func (client ComputePoliciesClient) GetResponder(resp *http.Response) (result ComputePolicy, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// ListByAccount lists the Data Lake Analytics compute policies within the specified Data Lake Analytics account. An +// account supports, at most, 50 policies +// Parameters: +// resourceGroupName - the name of the Azure resource group. +// accountName - the name of the Data Lake Analytics account. +func (client ComputePoliciesClient) ListByAccount(ctx context.Context, resourceGroupName string, accountName string) (result ComputePolicyListResultPage, err error) { + result.fn = client.listByAccountNextResults + req, err := client.ListByAccountPreparer(ctx, resourceGroupName, accountName) + if err != nil { + err = autorest.NewErrorWithError(err, "account.ComputePoliciesClient", "ListByAccount", nil, "Failure preparing request") + return + } + + resp, err := client.ListByAccountSender(req) + if err != nil { + result.cplr.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "account.ComputePoliciesClient", "ListByAccount", resp, "Failure sending request") + return + } + + result.cplr, err = client.ListByAccountResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.ComputePoliciesClient", "ListByAccount", resp, "Failure responding to request") + } + + return +} + +// ListByAccountPreparer prepares the ListByAccount request. +func (client ComputePoliciesClient) ListByAccountPreparer(ctx context.Context, resourceGroupName string, accountName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/computePolicies", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListByAccountSender sends the ListByAccount request. The method will close the +// http.Response Body if it receives an error. +func (client ComputePoliciesClient) ListByAccountSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// ListByAccountResponder handles the response to the ListByAccount request. The method always +// closes the http.Response Body. +func (client ComputePoliciesClient) ListByAccountResponder(resp *http.Response) (result ComputePolicyListResult, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listByAccountNextResults retrieves the next set of results, if any. +func (client ComputePoliciesClient) listByAccountNextResults(lastResults ComputePolicyListResult) (result ComputePolicyListResult, err error) { + req, err := lastResults.computePolicyListResultPreparer() + if err != nil { + return result, autorest.NewErrorWithError(err, "account.ComputePoliciesClient", "listByAccountNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListByAccountSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "account.ComputePoliciesClient", "listByAccountNextResults", resp, "Failure sending next results request") + } + result, err = client.ListByAccountResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.ComputePoliciesClient", "listByAccountNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListByAccountComplete enumerates all values, automatically crossing page boundaries as required. +func (client ComputePoliciesClient) ListByAccountComplete(ctx context.Context, resourceGroupName string, accountName string) (result ComputePolicyListResultIterator, err error) { + result.page, err = client.ListByAccount(ctx, resourceGroupName, accountName) + return +} + +// Update updates the specified compute policy. +// Parameters: +// resourceGroupName - the name of the Azure resource group. +// accountName - the name of the Data Lake Analytics account. +// computePolicyName - the name of the compute policy to update. +// parameters - parameters supplied to update the compute policy. +func (client ComputePoliciesClient) Update(ctx context.Context, resourceGroupName string, accountName string, computePolicyName string, parameters *UpdateComputePolicyParameters) (result ComputePolicy, err error) { + req, err := client.UpdatePreparer(ctx, resourceGroupName, accountName, computePolicyName, parameters) + if err != nil { + err = autorest.NewErrorWithError(err, "account.ComputePoliciesClient", "Update", nil, "Failure preparing request") + return + } + + resp, err := client.UpdateSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "account.ComputePoliciesClient", "Update", resp, "Failure sending request") + return + } + + result, err = client.UpdateResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.ComputePoliciesClient", "Update", resp, "Failure responding to request") + } + + return +} + +// UpdatePreparer prepares the Update request. +func (client ComputePoliciesClient) UpdatePreparer(ctx context.Context, resourceGroupName string, accountName string, computePolicyName string, parameters *UpdateComputePolicyParameters) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "computePolicyName": autorest.Encode("path", computePolicyName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPatch(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/computePolicies/{computePolicyName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + if parameters != nil { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithJSON(parameters)) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// UpdateSender sends the Update request. The method will close the +// http.Response Body if it receives an error. +func (client ComputePoliciesClient) UpdateSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// UpdateResponder handles the response to the Update request. The method always +// closes the http.Response Body. +func (client ComputePoliciesClient) UpdateResponder(resp *http.Response) (result ComputePolicy, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/datalakestoreaccounts.go b/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/datalakestoreaccounts.go new file mode 100644 index 000000000000..a260fc76201e --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/datalakestoreaccounts.go @@ -0,0 +1,386 @@ +package account + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/autorest/validation" + "net/http" +) + +// DataLakeStoreAccountsClient is the creates an Azure Data Lake Analytics account management client. +type DataLakeStoreAccountsClient struct { + BaseClient +} + +// NewDataLakeStoreAccountsClient creates an instance of the DataLakeStoreAccountsClient client. +func NewDataLakeStoreAccountsClient(subscriptionID string) DataLakeStoreAccountsClient { + return NewDataLakeStoreAccountsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewDataLakeStoreAccountsClientWithBaseURI creates an instance of the DataLakeStoreAccountsClient client. +func NewDataLakeStoreAccountsClientWithBaseURI(baseURI string, subscriptionID string) DataLakeStoreAccountsClient { + return DataLakeStoreAccountsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// Add updates the specified Data Lake Analytics account to include the additional Data Lake Store account. +// Parameters: +// resourceGroupName - the name of the Azure resource group. +// accountName - the name of the Data Lake Analytics account. +// dataLakeStoreAccountName - the name of the Data Lake Store account to add. +// parameters - the details of the Data Lake Store account. +func (client DataLakeStoreAccountsClient) Add(ctx context.Context, resourceGroupName string, accountName string, dataLakeStoreAccountName string, parameters *AddDataLakeStoreParameters) (result autorest.Response, err error) { + req, err := client.AddPreparer(ctx, resourceGroupName, accountName, dataLakeStoreAccountName, parameters) + if err != nil { + err = autorest.NewErrorWithError(err, "account.DataLakeStoreAccountsClient", "Add", nil, "Failure preparing request") + return + } + + resp, err := client.AddSender(req) + if err != nil { + result.Response = resp + err = autorest.NewErrorWithError(err, "account.DataLakeStoreAccountsClient", "Add", resp, "Failure sending request") + return + } + + result, err = client.AddResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.DataLakeStoreAccountsClient", "Add", resp, "Failure responding to request") + } + + return +} + +// AddPreparer prepares the Add request. +func (client DataLakeStoreAccountsClient) AddPreparer(ctx context.Context, resourceGroupName string, accountName string, dataLakeStoreAccountName string, parameters *AddDataLakeStoreParameters) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "dataLakeStoreAccountName": autorest.Encode("path", dataLakeStoreAccountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/dataLakeStoreAccounts/{dataLakeStoreAccountName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + if parameters != nil { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithJSON(parameters)) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// AddSender sends the Add request. The method will close the +// http.Response Body if it receives an error. +func (client DataLakeStoreAccountsClient) AddSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// AddResponder handles the response to the Add request. The method always +// closes the http.Response Body. +func (client DataLakeStoreAccountsClient) AddResponder(resp *http.Response) (result autorest.Response, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByClosing()) + result.Response = resp + return +} + +// Delete updates the Data Lake Analytics account specified to remove the specified Data Lake Store account. +// Parameters: +// resourceGroupName - the name of the Azure resource group. +// accountName - the name of the Data Lake Analytics account. +// dataLakeStoreAccountName - the name of the Data Lake Store account to remove +func (client DataLakeStoreAccountsClient) Delete(ctx context.Context, resourceGroupName string, accountName string, dataLakeStoreAccountName string) (result autorest.Response, err error) { + req, err := client.DeletePreparer(ctx, resourceGroupName, accountName, dataLakeStoreAccountName) + if err != nil { + err = autorest.NewErrorWithError(err, "account.DataLakeStoreAccountsClient", "Delete", nil, "Failure preparing request") + return + } + + resp, err := client.DeleteSender(req) + if err != nil { + result.Response = resp + err = autorest.NewErrorWithError(err, "account.DataLakeStoreAccountsClient", "Delete", resp, "Failure sending request") + return + } + + result, err = client.DeleteResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.DataLakeStoreAccountsClient", "Delete", resp, "Failure responding to request") + } + + return +} + +// DeletePreparer prepares the Delete request. +func (client DataLakeStoreAccountsClient) DeletePreparer(ctx context.Context, resourceGroupName string, accountName string, dataLakeStoreAccountName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "dataLakeStoreAccountName": autorest.Encode("path", dataLakeStoreAccountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsDelete(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/dataLakeStoreAccounts/{dataLakeStoreAccountName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// DeleteSender sends the Delete request. The method will close the +// http.Response Body if it receives an error. +func (client DataLakeStoreAccountsClient) DeleteSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// DeleteResponder handles the response to the Delete request. The method always +// closes the http.Response Body. +func (client DataLakeStoreAccountsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByClosing()) + result.Response = resp + return +} + +// Get gets the specified Data Lake Store account details in the specified Data Lake Analytics account. +// Parameters: +// resourceGroupName - the name of the Azure resource group. +// accountName - the name of the Data Lake Analytics account. +// dataLakeStoreAccountName - the name of the Data Lake Store account to retrieve +func (client DataLakeStoreAccountsClient) Get(ctx context.Context, resourceGroupName string, accountName string, dataLakeStoreAccountName string) (result DataLakeStoreAccountInformation, err error) { + req, err := client.GetPreparer(ctx, resourceGroupName, accountName, dataLakeStoreAccountName) + if err != nil { + err = autorest.NewErrorWithError(err, "account.DataLakeStoreAccountsClient", "Get", nil, "Failure preparing request") + return + } + + resp, err := client.GetSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "account.DataLakeStoreAccountsClient", "Get", resp, "Failure sending request") + return + } + + result, err = client.GetResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.DataLakeStoreAccountsClient", "Get", resp, "Failure responding to request") + } + + return +} + +// GetPreparer prepares the Get request. +func (client DataLakeStoreAccountsClient) GetPreparer(ctx context.Context, resourceGroupName string, accountName string, dataLakeStoreAccountName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "dataLakeStoreAccountName": autorest.Encode("path", dataLakeStoreAccountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/dataLakeStoreAccounts/{dataLakeStoreAccountName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetSender sends the Get request. The method will close the +// http.Response Body if it receives an error. +func (client DataLakeStoreAccountsClient) GetSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// GetResponder handles the response to the Get request. The method always +// closes the http.Response Body. +func (client DataLakeStoreAccountsClient) GetResponder(resp *http.Response) (result DataLakeStoreAccountInformation, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// ListByAccount gets the first page of Data Lake Store accounts linked to the specified Data Lake Analytics account. +// The response includes a link to the next page, if any. +// Parameters: +// resourceGroupName - the name of the Azure resource group. +// accountName - the name of the Data Lake Analytics account. +// filter - oData filter. Optional. +// top - the number of items to return. Optional. +// skip - the number of items to skip over before returning elements. Optional. +// selectParameter - oData Select statement. Limits the properties on each entry to just those requested, e.g. +// Categories?$select=CategoryName,Description. Optional. +// orderby - orderBy clause. One or more comma-separated expressions with an optional "asc" (the default) or +// "desc" depending on the order you'd like the values sorted, e.g. Categories?$orderby=CategoryName desc. +// Optional. +// count - the Boolean value of true or false to request a count of the matching resources included with the +// resources in the response, e.g. Categories?$count=true. Optional. +func (client DataLakeStoreAccountsClient) ListByAccount(ctx context.Context, resourceGroupName string, accountName string, filter string, top *int32, skip *int32, selectParameter string, orderby string, count *bool) (result DataLakeStoreAccountInformationListResultPage, err error) { + if err := validation.Validate([]validation.Validation{ + {TargetValue: top, + Constraints: []validation.Constraint{{Target: "top", Name: validation.Null, Rule: false, + Chain: []validation.Constraint{{Target: "top", Name: validation.InclusiveMinimum, Rule: 1, Chain: nil}}}}}, + {TargetValue: skip, + Constraints: []validation.Constraint{{Target: "skip", Name: validation.Null, Rule: false, + Chain: []validation.Constraint{{Target: "skip", Name: validation.InclusiveMinimum, Rule: 1, Chain: nil}}}}}}); err != nil { + return result, validation.NewError("account.DataLakeStoreAccountsClient", "ListByAccount", err.Error()) + } + + result.fn = client.listByAccountNextResults + req, err := client.ListByAccountPreparer(ctx, resourceGroupName, accountName, filter, top, skip, selectParameter, orderby, count) + if err != nil { + err = autorest.NewErrorWithError(err, "account.DataLakeStoreAccountsClient", "ListByAccount", nil, "Failure preparing request") + return + } + + resp, err := client.ListByAccountSender(req) + if err != nil { + result.dlsailr.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "account.DataLakeStoreAccountsClient", "ListByAccount", resp, "Failure sending request") + return + } + + result.dlsailr, err = client.ListByAccountResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.DataLakeStoreAccountsClient", "ListByAccount", resp, "Failure responding to request") + } + + return +} + +// ListByAccountPreparer prepares the ListByAccount request. +func (client DataLakeStoreAccountsClient) ListByAccountPreparer(ctx context.Context, resourceGroupName string, accountName string, filter string, top *int32, skip *int32, selectParameter string, orderby string, count *bool) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(filter) > 0 { + queryParameters["$filter"] = autorest.Encode("query", filter) + } + if top != nil { + queryParameters["$top"] = autorest.Encode("query", *top) + } + if skip != nil { + queryParameters["$skip"] = autorest.Encode("query", *skip) + } + if len(selectParameter) > 0 { + queryParameters["$select"] = autorest.Encode("query", selectParameter) + } + if len(orderby) > 0 { + queryParameters["$orderby"] = autorest.Encode("query", orderby) + } + if count != nil { + queryParameters["$count"] = autorest.Encode("query", *count) + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/dataLakeStoreAccounts", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListByAccountSender sends the ListByAccount request. The method will close the +// http.Response Body if it receives an error. +func (client DataLakeStoreAccountsClient) ListByAccountSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// ListByAccountResponder handles the response to the ListByAccount request. The method always +// closes the http.Response Body. +func (client DataLakeStoreAccountsClient) ListByAccountResponder(resp *http.Response) (result DataLakeStoreAccountInformationListResult, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listByAccountNextResults retrieves the next set of results, if any. +func (client DataLakeStoreAccountsClient) listByAccountNextResults(lastResults DataLakeStoreAccountInformationListResult) (result DataLakeStoreAccountInformationListResult, err error) { + req, err := lastResults.dataLakeStoreAccountInformationListResultPreparer() + if err != nil { + return result, autorest.NewErrorWithError(err, "account.DataLakeStoreAccountsClient", "listByAccountNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListByAccountSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "account.DataLakeStoreAccountsClient", "listByAccountNextResults", resp, "Failure sending next results request") + } + result, err = client.ListByAccountResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.DataLakeStoreAccountsClient", "listByAccountNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListByAccountComplete enumerates all values, automatically crossing page boundaries as required. +func (client DataLakeStoreAccountsClient) ListByAccountComplete(ctx context.Context, resourceGroupName string, accountName string, filter string, top *int32, skip *int32, selectParameter string, orderby string, count *bool) (result DataLakeStoreAccountInformationListResultIterator, err error) { + result.page, err = client.ListByAccount(ctx, resourceGroupName, accountName, filter, top, skip, selectParameter, orderby, count) + return +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/firewallrules.go b/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/firewallrules.go new file mode 100644 index 000000000000..b5cea63864f6 --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/firewallrules.go @@ -0,0 +1,430 @@ +package account + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/autorest/validation" + "net/http" +) + +// FirewallRulesClient is the creates an Azure Data Lake Analytics account management client. +type FirewallRulesClient struct { + BaseClient +} + +// NewFirewallRulesClient creates an instance of the FirewallRulesClient client. +func NewFirewallRulesClient(subscriptionID string) FirewallRulesClient { + return NewFirewallRulesClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewFirewallRulesClientWithBaseURI creates an instance of the FirewallRulesClient client. +func NewFirewallRulesClientWithBaseURI(baseURI string, subscriptionID string) FirewallRulesClient { + return FirewallRulesClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// CreateOrUpdate creates or updates the specified firewall rule. During update, the firewall rule with the specified +// name will be replaced with this new firewall rule. +// Parameters: +// resourceGroupName - the name of the Azure resource group. +// accountName - the name of the Data Lake Analytics account. +// firewallRuleName - the name of the firewall rule to create or update. +// parameters - parameters supplied to create or update the firewall rule. +func (client FirewallRulesClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, accountName string, firewallRuleName string, parameters CreateOrUpdateFirewallRuleParameters) (result FirewallRule, err error) { + if err := validation.Validate([]validation.Validation{ + {TargetValue: parameters, + Constraints: []validation.Constraint{{Target: "parameters.CreateOrUpdateFirewallRuleProperties", Name: validation.Null, Rule: true, + Chain: []validation.Constraint{{Target: "parameters.CreateOrUpdateFirewallRuleProperties.StartIPAddress", Name: validation.Null, Rule: true, Chain: nil}, + {Target: "parameters.CreateOrUpdateFirewallRuleProperties.EndIPAddress", Name: validation.Null, Rule: true, Chain: nil}, + }}}}}); err != nil { + return result, validation.NewError("account.FirewallRulesClient", "CreateOrUpdate", err.Error()) + } + + req, err := client.CreateOrUpdatePreparer(ctx, resourceGroupName, accountName, firewallRuleName, parameters) + if err != nil { + err = autorest.NewErrorWithError(err, "account.FirewallRulesClient", "CreateOrUpdate", nil, "Failure preparing request") + return + } + + resp, err := client.CreateOrUpdateSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "account.FirewallRulesClient", "CreateOrUpdate", resp, "Failure sending request") + return + } + + result, err = client.CreateOrUpdateResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.FirewallRulesClient", "CreateOrUpdate", resp, "Failure responding to request") + } + + return +} + +// CreateOrUpdatePreparer prepares the CreateOrUpdate request. +func (client FirewallRulesClient) CreateOrUpdatePreparer(ctx context.Context, resourceGroupName string, accountName string, firewallRuleName string, parameters CreateOrUpdateFirewallRuleParameters) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "firewallRuleName": autorest.Encode("path", firewallRuleName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/firewallRules/{firewallRuleName}", pathParameters), + autorest.WithJSON(parameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the +// http.Response Body if it receives an error. +func (client FirewallRulesClient) CreateOrUpdateSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always +// closes the http.Response Body. +func (client FirewallRulesClient) CreateOrUpdateResponder(resp *http.Response) (result FirewallRule, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Delete deletes the specified firewall rule from the specified Data Lake Analytics account +// Parameters: +// resourceGroupName - the name of the Azure resource group. +// accountName - the name of the Data Lake Analytics account. +// firewallRuleName - the name of the firewall rule to delete. +func (client FirewallRulesClient) Delete(ctx context.Context, resourceGroupName string, accountName string, firewallRuleName string) (result autorest.Response, err error) { + req, err := client.DeletePreparer(ctx, resourceGroupName, accountName, firewallRuleName) + if err != nil { + err = autorest.NewErrorWithError(err, "account.FirewallRulesClient", "Delete", nil, "Failure preparing request") + return + } + + resp, err := client.DeleteSender(req) + if err != nil { + result.Response = resp + err = autorest.NewErrorWithError(err, "account.FirewallRulesClient", "Delete", resp, "Failure sending request") + return + } + + result, err = client.DeleteResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.FirewallRulesClient", "Delete", resp, "Failure responding to request") + } + + return +} + +// DeletePreparer prepares the Delete request. +func (client FirewallRulesClient) DeletePreparer(ctx context.Context, resourceGroupName string, accountName string, firewallRuleName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "firewallRuleName": autorest.Encode("path", firewallRuleName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsDelete(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/firewallRules/{firewallRuleName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// DeleteSender sends the Delete request. The method will close the +// http.Response Body if it receives an error. +func (client FirewallRulesClient) DeleteSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// DeleteResponder handles the response to the Delete request. The method always +// closes the http.Response Body. +func (client FirewallRulesClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNoContent), + autorest.ByClosing()) + result.Response = resp + return +} + +// Get gets the specified Data Lake Analytics firewall rule. +// Parameters: +// resourceGroupName - the name of the Azure resource group. +// accountName - the name of the Data Lake Analytics account. +// firewallRuleName - the name of the firewall rule to retrieve. +func (client FirewallRulesClient) Get(ctx context.Context, resourceGroupName string, accountName string, firewallRuleName string) (result FirewallRule, err error) { + req, err := client.GetPreparer(ctx, resourceGroupName, accountName, firewallRuleName) + if err != nil { + err = autorest.NewErrorWithError(err, "account.FirewallRulesClient", "Get", nil, "Failure preparing request") + return + } + + resp, err := client.GetSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "account.FirewallRulesClient", "Get", resp, "Failure sending request") + return + } + + result, err = client.GetResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.FirewallRulesClient", "Get", resp, "Failure responding to request") + } + + return +} + +// GetPreparer prepares the Get request. +func (client FirewallRulesClient) GetPreparer(ctx context.Context, resourceGroupName string, accountName string, firewallRuleName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "firewallRuleName": autorest.Encode("path", firewallRuleName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/firewallRules/{firewallRuleName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetSender sends the Get request. The method will close the +// http.Response Body if it receives an error. +func (client FirewallRulesClient) GetSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// GetResponder handles the response to the Get request. The method always +// closes the http.Response Body. +func (client FirewallRulesClient) GetResponder(resp *http.Response) (result FirewallRule, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// ListByAccount lists the Data Lake Analytics firewall rules within the specified Data Lake Analytics account. +// Parameters: +// resourceGroupName - the name of the Azure resource group. +// accountName - the name of the Data Lake Analytics account. +func (client FirewallRulesClient) ListByAccount(ctx context.Context, resourceGroupName string, accountName string) (result FirewallRuleListResultPage, err error) { + result.fn = client.listByAccountNextResults + req, err := client.ListByAccountPreparer(ctx, resourceGroupName, accountName) + if err != nil { + err = autorest.NewErrorWithError(err, "account.FirewallRulesClient", "ListByAccount", nil, "Failure preparing request") + return + } + + resp, err := client.ListByAccountSender(req) + if err != nil { + result.frlr.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "account.FirewallRulesClient", "ListByAccount", resp, "Failure sending request") + return + } + + result.frlr, err = client.ListByAccountResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.FirewallRulesClient", "ListByAccount", resp, "Failure responding to request") + } + + return +} + +// ListByAccountPreparer prepares the ListByAccount request. +func (client FirewallRulesClient) ListByAccountPreparer(ctx context.Context, resourceGroupName string, accountName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/firewallRules", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListByAccountSender sends the ListByAccount request. The method will close the +// http.Response Body if it receives an error. +func (client FirewallRulesClient) ListByAccountSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// ListByAccountResponder handles the response to the ListByAccount request. The method always +// closes the http.Response Body. +func (client FirewallRulesClient) ListByAccountResponder(resp *http.Response) (result FirewallRuleListResult, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listByAccountNextResults retrieves the next set of results, if any. +func (client FirewallRulesClient) listByAccountNextResults(lastResults FirewallRuleListResult) (result FirewallRuleListResult, err error) { + req, err := lastResults.firewallRuleListResultPreparer() + if err != nil { + return result, autorest.NewErrorWithError(err, "account.FirewallRulesClient", "listByAccountNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListByAccountSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "account.FirewallRulesClient", "listByAccountNextResults", resp, "Failure sending next results request") + } + result, err = client.ListByAccountResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.FirewallRulesClient", "listByAccountNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListByAccountComplete enumerates all values, automatically crossing page boundaries as required. +func (client FirewallRulesClient) ListByAccountComplete(ctx context.Context, resourceGroupName string, accountName string) (result FirewallRuleListResultIterator, err error) { + result.page, err = client.ListByAccount(ctx, resourceGroupName, accountName) + return +} + +// Update updates the specified firewall rule. +// Parameters: +// resourceGroupName - the name of the Azure resource group. +// accountName - the name of the Data Lake Analytics account. +// firewallRuleName - the name of the firewall rule to update. +// parameters - parameters supplied to update the firewall rule. +func (client FirewallRulesClient) Update(ctx context.Context, resourceGroupName string, accountName string, firewallRuleName string, parameters *UpdateFirewallRuleParameters) (result FirewallRule, err error) { + req, err := client.UpdatePreparer(ctx, resourceGroupName, accountName, firewallRuleName, parameters) + if err != nil { + err = autorest.NewErrorWithError(err, "account.FirewallRulesClient", "Update", nil, "Failure preparing request") + return + } + + resp, err := client.UpdateSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "account.FirewallRulesClient", "Update", resp, "Failure sending request") + return + } + + result, err = client.UpdateResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.FirewallRulesClient", "Update", resp, "Failure responding to request") + } + + return +} + +// UpdatePreparer prepares the Update request. +func (client FirewallRulesClient) UpdatePreparer(ctx context.Context, resourceGroupName string, accountName string, firewallRuleName string, parameters *UpdateFirewallRuleParameters) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "firewallRuleName": autorest.Encode("path", firewallRuleName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPatch(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/firewallRules/{firewallRuleName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + if parameters != nil { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithJSON(parameters)) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// UpdateSender sends the Update request. The method will close the +// http.Response Body if it receives an error. +func (client FirewallRulesClient) UpdateSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// UpdateResponder handles the response to the Update request. The method always +// closes the http.Response Body. +func (client FirewallRulesClient) UpdateResponder(resp *http.Response) (result FirewallRule, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/locations.go b/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/locations.go new file mode 100644 index 000000000000..48fc897a05a9 --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/locations.go @@ -0,0 +1,105 @@ +package account + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "net/http" +) + +// LocationsClient is the creates an Azure Data Lake Analytics account management client. +type LocationsClient struct { + BaseClient +} + +// NewLocationsClient creates an instance of the LocationsClient client. +func NewLocationsClient(subscriptionID string) LocationsClient { + return NewLocationsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewLocationsClientWithBaseURI creates an instance of the LocationsClient client. +func NewLocationsClientWithBaseURI(baseURI string, subscriptionID string) LocationsClient { + return LocationsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// GetCapability gets subscription-level properties and limits for Data Lake Analytics specified by resource location. +// Parameters: +// location - the resource location without whitespace. +func (client LocationsClient) GetCapability(ctx context.Context, location string) (result CapabilityInformation, err error) { + req, err := client.GetCapabilityPreparer(ctx, location) + if err != nil { + err = autorest.NewErrorWithError(err, "account.LocationsClient", "GetCapability", nil, "Failure preparing request") + return + } + + resp, err := client.GetCapabilitySender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "account.LocationsClient", "GetCapability", resp, "Failure sending request") + return + } + + result, err = client.GetCapabilityResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.LocationsClient", "GetCapability", resp, "Failure responding to request") + } + + return +} + +// GetCapabilityPreparer prepares the GetCapability request. +func (client LocationsClient) GetCapabilityPreparer(ctx context.Context, location string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "location": autorest.Encode("path", location), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.DataLakeAnalytics/locations/{location}/capability", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetCapabilitySender sends the GetCapability request. The method will close the +// http.Response Body if it receives an error. +func (client LocationsClient) GetCapabilitySender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// GetCapabilityResponder handles the response to the GetCapability request. The method always +// closes the http.Response Body. +func (client LocationsClient) GetCapabilityResponder(resp *http.Response) (result CapabilityInformation, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNotFound), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/models.go b/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/models.go new file mode 100644 index 000000000000..56af0c25b8b8 --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/models.go @@ -0,0 +1,2794 @@ +package account + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "encoding/json" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/autorest/date" + "github.com/Azure/go-autorest/autorest/to" + "github.com/satori/go.uuid" + "net/http" +) + +// AADObjectType enumerates the values for aad object type. +type AADObjectType string + +const ( + // Group ... + Group AADObjectType = "Group" + // ServicePrincipal ... + ServicePrincipal AADObjectType = "ServicePrincipal" + // User ... + User AADObjectType = "User" +) + +// PossibleAADObjectTypeValues returns an array of possible values for the AADObjectType const type. +func PossibleAADObjectTypeValues() []AADObjectType { + return []AADObjectType{Group, ServicePrincipal, User} +} + +// DataLakeAnalyticsAccountState enumerates the values for data lake analytics account state. +type DataLakeAnalyticsAccountState string + +const ( + // Active ... + Active DataLakeAnalyticsAccountState = "Active" + // Suspended ... + Suspended DataLakeAnalyticsAccountState = "Suspended" +) + +// PossibleDataLakeAnalyticsAccountStateValues returns an array of possible values for the DataLakeAnalyticsAccountState const type. +func PossibleDataLakeAnalyticsAccountStateValues() []DataLakeAnalyticsAccountState { + return []DataLakeAnalyticsAccountState{Active, Suspended} +} + +// DataLakeAnalyticsAccountStatus enumerates the values for data lake analytics account status. +type DataLakeAnalyticsAccountStatus string + +const ( + // Canceled ... + Canceled DataLakeAnalyticsAccountStatus = "Canceled" + // Creating ... + Creating DataLakeAnalyticsAccountStatus = "Creating" + // Deleted ... + Deleted DataLakeAnalyticsAccountStatus = "Deleted" + // Deleting ... + Deleting DataLakeAnalyticsAccountStatus = "Deleting" + // Failed ... + Failed DataLakeAnalyticsAccountStatus = "Failed" + // Patching ... + Patching DataLakeAnalyticsAccountStatus = "Patching" + // Resuming ... + Resuming DataLakeAnalyticsAccountStatus = "Resuming" + // Running ... + Running DataLakeAnalyticsAccountStatus = "Running" + // Succeeded ... + Succeeded DataLakeAnalyticsAccountStatus = "Succeeded" + // Suspending ... + Suspending DataLakeAnalyticsAccountStatus = "Suspending" + // Undeleting ... + Undeleting DataLakeAnalyticsAccountStatus = "Undeleting" +) + +// PossibleDataLakeAnalyticsAccountStatusValues returns an array of possible values for the DataLakeAnalyticsAccountStatus const type. +func PossibleDataLakeAnalyticsAccountStatusValues() []DataLakeAnalyticsAccountStatus { + return []DataLakeAnalyticsAccountStatus{Canceled, Creating, Deleted, Deleting, Failed, Patching, Resuming, Running, Succeeded, Suspending, Undeleting} +} + +// FirewallAllowAzureIpsState enumerates the values for firewall allow azure ips state. +type FirewallAllowAzureIpsState string + +const ( + // Disabled ... + Disabled FirewallAllowAzureIpsState = "Disabled" + // Enabled ... + Enabled FirewallAllowAzureIpsState = "Enabled" +) + +// PossibleFirewallAllowAzureIpsStateValues returns an array of possible values for the FirewallAllowAzureIpsState const type. +func PossibleFirewallAllowAzureIpsStateValues() []FirewallAllowAzureIpsState { + return []FirewallAllowAzureIpsState{Disabled, Enabled} +} + +// FirewallState enumerates the values for firewall state. +type FirewallState string + +const ( + // FirewallStateDisabled ... + FirewallStateDisabled FirewallState = "Disabled" + // FirewallStateEnabled ... + FirewallStateEnabled FirewallState = "Enabled" +) + +// PossibleFirewallStateValues returns an array of possible values for the FirewallState const type. +func PossibleFirewallStateValues() []FirewallState { + return []FirewallState{FirewallStateDisabled, FirewallStateEnabled} +} + +// OperationOrigin enumerates the values for operation origin. +type OperationOrigin string + +const ( + // OperationOriginSystem ... + OperationOriginSystem OperationOrigin = "system" + // OperationOriginUser ... + OperationOriginUser OperationOrigin = "user" + // OperationOriginUsersystem ... + OperationOriginUsersystem OperationOrigin = "user,system" +) + +// PossibleOperationOriginValues returns an array of possible values for the OperationOrigin const type. +func PossibleOperationOriginValues() []OperationOrigin { + return []OperationOrigin{OperationOriginSystem, OperationOriginUser, OperationOriginUsersystem} +} + +// SubscriptionState enumerates the values for subscription state. +type SubscriptionState string + +const ( + // SubscriptionStateDeleted ... + SubscriptionStateDeleted SubscriptionState = "Deleted" + // SubscriptionStateRegistered ... + SubscriptionStateRegistered SubscriptionState = "Registered" + // SubscriptionStateSuspended ... + SubscriptionStateSuspended SubscriptionState = "Suspended" + // SubscriptionStateUnregistered ... + SubscriptionStateUnregistered SubscriptionState = "Unregistered" + // SubscriptionStateWarned ... + SubscriptionStateWarned SubscriptionState = "Warned" +) + +// PossibleSubscriptionStateValues returns an array of possible values for the SubscriptionState const type. +func PossibleSubscriptionStateValues() []SubscriptionState { + return []SubscriptionState{SubscriptionStateDeleted, SubscriptionStateRegistered, SubscriptionStateSuspended, SubscriptionStateUnregistered, SubscriptionStateWarned} +} + +// TierType enumerates the values for tier type. +type TierType string + +const ( + // Commitment100000AUHours ... + Commitment100000AUHours TierType = "Commitment_100000AUHours" + // Commitment10000AUHours ... + Commitment10000AUHours TierType = "Commitment_10000AUHours" + // Commitment1000AUHours ... + Commitment1000AUHours TierType = "Commitment_1000AUHours" + // Commitment100AUHours ... + Commitment100AUHours TierType = "Commitment_100AUHours" + // Commitment500000AUHours ... + Commitment500000AUHours TierType = "Commitment_500000AUHours" + // Commitment50000AUHours ... + Commitment50000AUHours TierType = "Commitment_50000AUHours" + // Commitment5000AUHours ... + Commitment5000AUHours TierType = "Commitment_5000AUHours" + // Commitment500AUHours ... + Commitment500AUHours TierType = "Commitment_500AUHours" + // Consumption ... + Consumption TierType = "Consumption" +) + +// PossibleTierTypeValues returns an array of possible values for the TierType const type. +func PossibleTierTypeValues() []TierType { + return []TierType{Commitment100000AUHours, Commitment10000AUHours, Commitment1000AUHours, Commitment100AUHours, Commitment500000AUHours, Commitment50000AUHours, Commitment5000AUHours, Commitment500AUHours, Consumption} +} + +// AccountsCreateFutureType an abstraction for monitoring and retrieving the results of a long-running operation. +type AccountsCreateFutureType struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *AccountsCreateFutureType) Result(client AccountsClient) (dlaa DataLakeAnalyticsAccount, err error) { + var done bool + done, err = future.Done(client) + if err != nil { + err = autorest.NewErrorWithError(err, "account.AccountsCreateFutureType", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("account.AccountsCreateFutureType") + return + } + sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) + if dlaa.Response.Response, err = future.GetResult(sender); err == nil && dlaa.Response.Response.StatusCode != http.StatusNoContent { + dlaa, err = client.CreateResponder(dlaa.Response.Response) + if err != nil { + err = autorest.NewErrorWithError(err, "account.AccountsCreateFutureType", "Result", dlaa.Response.Response, "Failure responding to request") + } + } + return +} + +// AccountsDeleteFutureType an abstraction for monitoring and retrieving the results of a long-running operation. +type AccountsDeleteFutureType struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *AccountsDeleteFutureType) Result(client AccountsClient) (ar autorest.Response, err error) { + var done bool + done, err = future.Done(client) + if err != nil { + err = autorest.NewErrorWithError(err, "account.AccountsDeleteFutureType", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("account.AccountsDeleteFutureType") + return + } + ar.Response = future.Response() + return +} + +// AccountsUpdateFutureType an abstraction for monitoring and retrieving the results of a long-running operation. +type AccountsUpdateFutureType struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *AccountsUpdateFutureType) Result(client AccountsClient) (dlaa DataLakeAnalyticsAccount, err error) { + var done bool + done, err = future.Done(client) + if err != nil { + err = autorest.NewErrorWithError(err, "account.AccountsUpdateFutureType", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("account.AccountsUpdateFutureType") + return + } + sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) + if dlaa.Response.Response, err = future.GetResult(sender); err == nil && dlaa.Response.Response.StatusCode != http.StatusNoContent { + dlaa, err = client.UpdateResponder(dlaa.Response.Response) + if err != nil { + err = autorest.NewErrorWithError(err, "account.AccountsUpdateFutureType", "Result", dlaa.Response.Response, "Failure responding to request") + } + } + return +} + +// AddDataLakeStoreParameters the parameters used to add a new Data Lake Store account. +type AddDataLakeStoreParameters struct { + // AddDataLakeStoreProperties - The Data Lake Store account properties to use when adding a new Data Lake Store account. + *AddDataLakeStoreProperties `json:"properties,omitempty"` +} + +// MarshalJSON is the custom marshaler for AddDataLakeStoreParameters. +func (adlsp AddDataLakeStoreParameters) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if adlsp.AddDataLakeStoreProperties != nil { + objectMap["properties"] = adlsp.AddDataLakeStoreProperties + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for AddDataLakeStoreParameters struct. +func (adlsp *AddDataLakeStoreParameters) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var addDataLakeStoreProperties AddDataLakeStoreProperties + err = json.Unmarshal(*v, &addDataLakeStoreProperties) + if err != nil { + return err + } + adlsp.AddDataLakeStoreProperties = &addDataLakeStoreProperties + } + } + } + + return nil +} + +// AddDataLakeStoreProperties the Data Lake Store account properties to use when adding a new Data Lake Store +// account. +type AddDataLakeStoreProperties struct { + // Suffix - The optional suffix for the Data Lake Store account. + Suffix *string `json:"suffix,omitempty"` +} + +// AddDataLakeStoreWithAccountParameters the parameters used to add a new Data Lake Store account while creating a +// new Data Lake Analytics account. +type AddDataLakeStoreWithAccountParameters struct { + // Name - The unique name of the Data Lake Store account to add. + Name *string `json:"name,omitempty"` + // AddDataLakeStoreProperties - The Data Lake Store account properties to use when adding a new Data Lake Store account. + *AddDataLakeStoreProperties `json:"properties,omitempty"` +} + +// MarshalJSON is the custom marshaler for AddDataLakeStoreWithAccountParameters. +func (adlswap AddDataLakeStoreWithAccountParameters) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if adlswap.Name != nil { + objectMap["name"] = adlswap.Name + } + if adlswap.AddDataLakeStoreProperties != nil { + objectMap["properties"] = adlswap.AddDataLakeStoreProperties + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for AddDataLakeStoreWithAccountParameters struct. +func (adlswap *AddDataLakeStoreWithAccountParameters) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + adlswap.Name = &name + } + case "properties": + if v != nil { + var addDataLakeStoreProperties AddDataLakeStoreProperties + err = json.Unmarshal(*v, &addDataLakeStoreProperties) + if err != nil { + return err + } + adlswap.AddDataLakeStoreProperties = &addDataLakeStoreProperties + } + } + } + + return nil +} + +// AddStorageAccountParameters the parameters used to add a new Azure Storage account. +type AddStorageAccountParameters struct { + // AddStorageAccountProperties - The Azure Storage account properties to use when adding a new Azure Storage account. + *AddStorageAccountProperties `json:"properties,omitempty"` +} + +// MarshalJSON is the custom marshaler for AddStorageAccountParameters. +func (asap AddStorageAccountParameters) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if asap.AddStorageAccountProperties != nil { + objectMap["properties"] = asap.AddStorageAccountProperties + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for AddStorageAccountParameters struct. +func (asap *AddStorageAccountParameters) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var addStorageAccountProperties AddStorageAccountProperties + err = json.Unmarshal(*v, &addStorageAccountProperties) + if err != nil { + return err + } + asap.AddStorageAccountProperties = &addStorageAccountProperties + } + } + } + + return nil +} + +// AddStorageAccountProperties the Azure Storage account properties to use when adding a new Azure Storage account. +type AddStorageAccountProperties struct { + // AccessKey - The access key associated with this Azure Storage account that will be used to connect to it. + AccessKey *string `json:"accessKey,omitempty"` + // Suffix - The optional suffix for the storage account. + Suffix *string `json:"suffix,omitempty"` +} + +// AddStorageAccountWithAccountParameters the parameters used to add a new Azure Storage account while creating a +// new Data Lake Analytics account. +type AddStorageAccountWithAccountParameters struct { + // Name - The unique name of the Azure Storage account to add. + Name *string `json:"name,omitempty"` + // AddStorageAccountProperties - The Azure Storage account properties to use when adding a new Azure Storage account. + *AddStorageAccountProperties `json:"properties,omitempty"` +} + +// MarshalJSON is the custom marshaler for AddStorageAccountWithAccountParameters. +func (asawap AddStorageAccountWithAccountParameters) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if asawap.Name != nil { + objectMap["name"] = asawap.Name + } + if asawap.AddStorageAccountProperties != nil { + objectMap["properties"] = asawap.AddStorageAccountProperties + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for AddStorageAccountWithAccountParameters struct. +func (asawap *AddStorageAccountWithAccountParameters) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + asawap.Name = &name + } + case "properties": + if v != nil { + var addStorageAccountProperties AddStorageAccountProperties + err = json.Unmarshal(*v, &addStorageAccountProperties) + if err != nil { + return err + } + asawap.AddStorageAccountProperties = &addStorageAccountProperties + } + } + } + + return nil +} + +// CapabilityInformation subscription-level properties and limits for Data Lake Analytics. +type CapabilityInformation struct { + autorest.Response `json:"-"` + // SubscriptionID - The subscription credentials that uniquely identifies the subscription. + SubscriptionID *uuid.UUID `json:"subscriptionId,omitempty"` + // State - The subscription state. Possible values include: 'SubscriptionStateRegistered', 'SubscriptionStateSuspended', 'SubscriptionStateDeleted', 'SubscriptionStateUnregistered', 'SubscriptionStateWarned' + State SubscriptionState `json:"state,omitempty"` + // MaxAccountCount - The maximum supported number of accounts under this subscription. + MaxAccountCount *int32 `json:"maxAccountCount,omitempty"` + // AccountCount - The current number of accounts under this subscription. + AccountCount *int32 `json:"accountCount,omitempty"` + // MigrationState - The Boolean value of true or false to indicate the maintenance state. + MigrationState *bool `json:"migrationState,omitempty"` +} + +// CheckNameAvailabilityParameters data Lake Analytics account name availability check parameters. +type CheckNameAvailabilityParameters struct { + // Name - The Data Lake Analytics name to check availability for. + Name *string `json:"name,omitempty"` + // Type - The resource type. Note: This should not be set by the user, as the constant value is Microsoft.DataLakeAnalytics/accounts + Type *string `json:"type,omitempty"` +} + +// ComputePolicy data Lake Analytics compute policy information. +type ComputePolicy struct { + autorest.Response `json:"-"` + // ComputePolicyProperties - The compute policy properties. + *ComputePolicyProperties `json:"properties,omitempty"` + // ID - The resource identifier. + ID *string `json:"id,omitempty"` + // Name - The resource name. + Name *string `json:"name,omitempty"` + // Type - The resource type. + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for ComputePolicy. +func (cp ComputePolicy) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if cp.ComputePolicyProperties != nil { + objectMap["properties"] = cp.ComputePolicyProperties + } + if cp.ID != nil { + objectMap["id"] = cp.ID + } + if cp.Name != nil { + objectMap["name"] = cp.Name + } + if cp.Type != nil { + objectMap["type"] = cp.Type + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for ComputePolicy struct. +func (cp *ComputePolicy) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var computePolicyProperties ComputePolicyProperties + err = json.Unmarshal(*v, &computePolicyProperties) + if err != nil { + return err + } + cp.ComputePolicyProperties = &computePolicyProperties + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + cp.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + cp.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + cp.Type = &typeVar + } + } + } + + return nil +} + +// ComputePolicyListResult the list of compute policies in the account. +type ComputePolicyListResult struct { + autorest.Response `json:"-"` + // Value - The results of the list operation. + Value *[]ComputePolicy `json:"value,omitempty"` + // NextLink - The link (url) to the next page of results. + NextLink *string `json:"nextLink,omitempty"` +} + +// ComputePolicyListResultIterator provides access to a complete listing of ComputePolicy values. +type ComputePolicyListResultIterator struct { + i int + page ComputePolicyListResultPage +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *ComputePolicyListResultIterator) Next() error { + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err := iter.page.Next() + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter ComputePolicyListResultIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter ComputePolicyListResultIterator) Response() ComputePolicyListResult { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter ComputePolicyListResultIterator) Value() ComputePolicy { + if !iter.page.NotDone() { + return ComputePolicy{} + } + return iter.page.Values()[iter.i] +} + +// IsEmpty returns true if the ListResult contains no values. +func (cplr ComputePolicyListResult) IsEmpty() bool { + return cplr.Value == nil || len(*cplr.Value) == 0 +} + +// computePolicyListResultPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (cplr ComputePolicyListResult) computePolicyListResultPreparer() (*http.Request, error) { + if cplr.NextLink == nil || len(to.String(cplr.NextLink)) < 1 { + return nil, nil + } + return autorest.Prepare(&http.Request{}, + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(cplr.NextLink))) +} + +// ComputePolicyListResultPage contains a page of ComputePolicy values. +type ComputePolicyListResultPage struct { + fn func(ComputePolicyListResult) (ComputePolicyListResult, error) + cplr ComputePolicyListResult +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *ComputePolicyListResultPage) Next() error { + next, err := page.fn(page.cplr) + if err != nil { + return err + } + page.cplr = next + return nil +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page ComputePolicyListResultPage) NotDone() bool { + return !page.cplr.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page ComputePolicyListResultPage) Response() ComputePolicyListResult { + return page.cplr +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page ComputePolicyListResultPage) Values() []ComputePolicy { + if page.cplr.IsEmpty() { + return nil + } + return *page.cplr.Value +} + +// ComputePolicyProperties the compute policy properties. +type ComputePolicyProperties struct { + // ObjectID - The AAD object identifier for the entity to create a policy for. + ObjectID *uuid.UUID `json:"objectId,omitempty"` + // ObjectType - The type of AAD object the object identifier refers to. Possible values include: 'User', 'Group', 'ServicePrincipal' + ObjectType AADObjectType `json:"objectType,omitempty"` + // MaxDegreeOfParallelismPerJob - The maximum degree of parallelism per job this user can use to submit jobs. + MaxDegreeOfParallelismPerJob *int32 `json:"maxDegreeOfParallelismPerJob,omitempty"` + // MinPriorityPerJob - The minimum priority per job this user can use to submit jobs. + MinPriorityPerJob *int32 `json:"minPriorityPerJob,omitempty"` +} + +// CreateComputePolicyWithAccountParameters the parameters used to create a new compute policy while creating a new +// Data Lake Analytics account. +type CreateComputePolicyWithAccountParameters struct { + // Name - The unique name of the compute policy to create. + Name *string `json:"name,omitempty"` + // CreateOrUpdateComputePolicyProperties - The compute policy properties to use when creating a new compute policy. + *CreateOrUpdateComputePolicyProperties `json:"properties,omitempty"` +} + +// MarshalJSON is the custom marshaler for CreateComputePolicyWithAccountParameters. +func (ccpwap CreateComputePolicyWithAccountParameters) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if ccpwap.Name != nil { + objectMap["name"] = ccpwap.Name + } + if ccpwap.CreateOrUpdateComputePolicyProperties != nil { + objectMap["properties"] = ccpwap.CreateOrUpdateComputePolicyProperties + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for CreateComputePolicyWithAccountParameters struct. +func (ccpwap *CreateComputePolicyWithAccountParameters) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + ccpwap.Name = &name + } + case "properties": + if v != nil { + var createOrUpdateComputePolicyProperties CreateOrUpdateComputePolicyProperties + err = json.Unmarshal(*v, &createOrUpdateComputePolicyProperties) + if err != nil { + return err + } + ccpwap.CreateOrUpdateComputePolicyProperties = &createOrUpdateComputePolicyProperties + } + } + } + + return nil +} + +// CreateDataLakeAnalyticsAccountParameters the parameters to use for creating a Data Lake Analytics account. +type CreateDataLakeAnalyticsAccountParameters struct { + // Location - The resource location. + Location *string `json:"location,omitempty"` + // Tags - The resource tags. + Tags map[string]*string `json:"tags"` + // CreateDataLakeAnalyticsAccountProperties - The Data Lake Analytics account properties to use for creating. + *CreateDataLakeAnalyticsAccountProperties `json:"properties,omitempty"` +} + +// MarshalJSON is the custom marshaler for CreateDataLakeAnalyticsAccountParameters. +func (cdlaap CreateDataLakeAnalyticsAccountParameters) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if cdlaap.Location != nil { + objectMap["location"] = cdlaap.Location + } + if cdlaap.Tags != nil { + objectMap["tags"] = cdlaap.Tags + } + if cdlaap.CreateDataLakeAnalyticsAccountProperties != nil { + objectMap["properties"] = cdlaap.CreateDataLakeAnalyticsAccountProperties + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for CreateDataLakeAnalyticsAccountParameters struct. +func (cdlaap *CreateDataLakeAnalyticsAccountParameters) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "location": + if v != nil { + var location string + err = json.Unmarshal(*v, &location) + if err != nil { + return err + } + cdlaap.Location = &location + } + case "tags": + if v != nil { + var tags map[string]*string + err = json.Unmarshal(*v, &tags) + if err != nil { + return err + } + cdlaap.Tags = tags + } + case "properties": + if v != nil { + var createDataLakeAnalyticsAccountProperties CreateDataLakeAnalyticsAccountProperties + err = json.Unmarshal(*v, &createDataLakeAnalyticsAccountProperties) + if err != nil { + return err + } + cdlaap.CreateDataLakeAnalyticsAccountProperties = &createDataLakeAnalyticsAccountProperties + } + } + } + + return nil +} + +// CreateDataLakeAnalyticsAccountProperties ... +type CreateDataLakeAnalyticsAccountProperties struct { + // DefaultDataLakeStoreAccount - The default Data Lake Store account associated with this account. + DefaultDataLakeStoreAccount *string `json:"defaultDataLakeStoreAccount,omitempty"` + // DataLakeStoreAccounts - The list of Data Lake Store accounts associated with this account. + DataLakeStoreAccounts *[]AddDataLakeStoreWithAccountParameters `json:"dataLakeStoreAccounts,omitempty"` + // StorageAccounts - The list of Azure Blob Storage accounts associated with this account. + StorageAccounts *[]AddStorageAccountWithAccountParameters `json:"storageAccounts,omitempty"` + // ComputePolicies - The list of compute policies associated with this account. + ComputePolicies *[]CreateComputePolicyWithAccountParameters `json:"computePolicies,omitempty"` + // FirewallRules - The list of firewall rules associated with this account. + FirewallRules *[]CreateFirewallRuleWithAccountParameters `json:"firewallRules,omitempty"` + // FirewallState - The current state of the IP address firewall for this account. Possible values include: 'FirewallStateEnabled', 'FirewallStateDisabled' + FirewallState FirewallState `json:"firewallState,omitempty"` + // FirewallAllowAzureIps - The current state of allowing or disallowing IPs originating within Azure through the firewall. If the firewall is disabled, this is not enforced. Possible values include: 'Enabled', 'Disabled' + FirewallAllowAzureIps FirewallAllowAzureIpsState `json:"firewallAllowAzureIps,omitempty"` + // NewTier - The commitment tier for the next month. Possible values include: 'Consumption', 'Commitment100AUHours', 'Commitment500AUHours', 'Commitment1000AUHours', 'Commitment5000AUHours', 'Commitment10000AUHours', 'Commitment50000AUHours', 'Commitment100000AUHours', 'Commitment500000AUHours' + NewTier TierType `json:"newTier,omitempty"` + // MaxJobCount - The maximum supported jobs running under the account at the same time. + MaxJobCount *int32 `json:"maxJobCount,omitempty"` + // MaxDegreeOfParallelism - The maximum supported degree of parallelism for this account. + MaxDegreeOfParallelism *int32 `json:"maxDegreeOfParallelism,omitempty"` + // MaxDegreeOfParallelismPerJob - The maximum supported degree of parallelism per job for this account. + MaxDegreeOfParallelismPerJob *int32 `json:"maxDegreeOfParallelismPerJob,omitempty"` + // MinPriorityPerJob - The minimum supported priority per job for this account. + MinPriorityPerJob *int32 `json:"minPriorityPerJob,omitempty"` + // QueryStoreRetention - The number of days that job metadata is retained. + QueryStoreRetention *int32 `json:"queryStoreRetention,omitempty"` +} + +// CreateFirewallRuleWithAccountParameters the parameters used to create a new firewall rule while creating a new +// Data Lake Analytics account. +type CreateFirewallRuleWithAccountParameters struct { + // Name - The unique name of the firewall rule to create. + Name *string `json:"name,omitempty"` + // CreateOrUpdateFirewallRuleProperties - The firewall rule properties to use when creating a new firewall rule. + *CreateOrUpdateFirewallRuleProperties `json:"properties,omitempty"` +} + +// MarshalJSON is the custom marshaler for CreateFirewallRuleWithAccountParameters. +func (cfrwap CreateFirewallRuleWithAccountParameters) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if cfrwap.Name != nil { + objectMap["name"] = cfrwap.Name + } + if cfrwap.CreateOrUpdateFirewallRuleProperties != nil { + objectMap["properties"] = cfrwap.CreateOrUpdateFirewallRuleProperties + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for CreateFirewallRuleWithAccountParameters struct. +func (cfrwap *CreateFirewallRuleWithAccountParameters) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + cfrwap.Name = &name + } + case "properties": + if v != nil { + var createOrUpdateFirewallRuleProperties CreateOrUpdateFirewallRuleProperties + err = json.Unmarshal(*v, &createOrUpdateFirewallRuleProperties) + if err != nil { + return err + } + cfrwap.CreateOrUpdateFirewallRuleProperties = &createOrUpdateFirewallRuleProperties + } + } + } + + return nil +} + +// CreateOrUpdateComputePolicyParameters the parameters used to create a new compute policy. +type CreateOrUpdateComputePolicyParameters struct { + // CreateOrUpdateComputePolicyProperties - The compute policy properties to use when creating a new compute policy. + *CreateOrUpdateComputePolicyProperties `json:"properties,omitempty"` +} + +// MarshalJSON is the custom marshaler for CreateOrUpdateComputePolicyParameters. +func (coucpp CreateOrUpdateComputePolicyParameters) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if coucpp.CreateOrUpdateComputePolicyProperties != nil { + objectMap["properties"] = coucpp.CreateOrUpdateComputePolicyProperties + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for CreateOrUpdateComputePolicyParameters struct. +func (coucpp *CreateOrUpdateComputePolicyParameters) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var createOrUpdateComputePolicyProperties CreateOrUpdateComputePolicyProperties + err = json.Unmarshal(*v, &createOrUpdateComputePolicyProperties) + if err != nil { + return err + } + coucpp.CreateOrUpdateComputePolicyProperties = &createOrUpdateComputePolicyProperties + } + } + } + + return nil +} + +// CreateOrUpdateComputePolicyProperties the compute policy properties to use when creating a new compute policy. +type CreateOrUpdateComputePolicyProperties struct { + // ObjectID - The AAD object identifier for the entity to create a policy for. + ObjectID *uuid.UUID `json:"objectId,omitempty"` + // ObjectType - The type of AAD object the object identifier refers to. Possible values include: 'User', 'Group', 'ServicePrincipal' + ObjectType AADObjectType `json:"objectType,omitempty"` + // MaxDegreeOfParallelismPerJob - The maximum degree of parallelism per job this user can use to submit jobs. This property, the min priority per job property, or both must be passed. + MaxDegreeOfParallelismPerJob *int32 `json:"maxDegreeOfParallelismPerJob,omitempty"` + // MinPriorityPerJob - The minimum priority per job this user can use to submit jobs. This property, the max degree of parallelism per job property, or both must be passed. + MinPriorityPerJob *int32 `json:"minPriorityPerJob,omitempty"` +} + +// CreateOrUpdateFirewallRuleParameters the parameters used to create a new firewall rule. +type CreateOrUpdateFirewallRuleParameters struct { + // CreateOrUpdateFirewallRuleProperties - The firewall rule properties to use when creating a new firewall rule. + *CreateOrUpdateFirewallRuleProperties `json:"properties,omitempty"` +} + +// MarshalJSON is the custom marshaler for CreateOrUpdateFirewallRuleParameters. +func (coufrp CreateOrUpdateFirewallRuleParameters) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if coufrp.CreateOrUpdateFirewallRuleProperties != nil { + objectMap["properties"] = coufrp.CreateOrUpdateFirewallRuleProperties + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for CreateOrUpdateFirewallRuleParameters struct. +func (coufrp *CreateOrUpdateFirewallRuleParameters) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var createOrUpdateFirewallRuleProperties CreateOrUpdateFirewallRuleProperties + err = json.Unmarshal(*v, &createOrUpdateFirewallRuleProperties) + if err != nil { + return err + } + coufrp.CreateOrUpdateFirewallRuleProperties = &createOrUpdateFirewallRuleProperties + } + } + } + + return nil +} + +// CreateOrUpdateFirewallRuleProperties the firewall rule properties to use when creating a new firewall rule. +type CreateOrUpdateFirewallRuleProperties struct { + // StartIPAddress - The start IP address for the firewall rule. This can be either ipv4 or ipv6. Start and End should be in the same protocol. + StartIPAddress *string `json:"startIpAddress,omitempty"` + // EndIPAddress - The end IP address for the firewall rule. This can be either ipv4 or ipv6. Start and End should be in the same protocol. + EndIPAddress *string `json:"endIpAddress,omitempty"` +} + +// DataLakeAnalyticsAccount a Data Lake Analytics account object, containing all information associated with the +// named Data Lake Analytics account. +type DataLakeAnalyticsAccount struct { + autorest.Response `json:"-"` + // DataLakeAnalyticsAccountProperties - The properties defined by Data Lake Analytics all properties are specific to each resource provider. + *DataLakeAnalyticsAccountProperties `json:"properties,omitempty"` + // ID - The resource identifer. + ID *string `json:"id,omitempty"` + // Name - The resource name. + Name *string `json:"name,omitempty"` + // Type - The resource type. + Type *string `json:"type,omitempty"` + // Location - The resource location. + Location *string `json:"location,omitempty"` + // Tags - The resource tags. + Tags map[string]*string `json:"tags"` +} + +// MarshalJSON is the custom marshaler for DataLakeAnalyticsAccount. +func (dlaa DataLakeAnalyticsAccount) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if dlaa.DataLakeAnalyticsAccountProperties != nil { + objectMap["properties"] = dlaa.DataLakeAnalyticsAccountProperties + } + if dlaa.ID != nil { + objectMap["id"] = dlaa.ID + } + if dlaa.Name != nil { + objectMap["name"] = dlaa.Name + } + if dlaa.Type != nil { + objectMap["type"] = dlaa.Type + } + if dlaa.Location != nil { + objectMap["location"] = dlaa.Location + } + if dlaa.Tags != nil { + objectMap["tags"] = dlaa.Tags + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for DataLakeAnalyticsAccount struct. +func (dlaa *DataLakeAnalyticsAccount) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var dataLakeAnalyticsAccountProperties DataLakeAnalyticsAccountProperties + err = json.Unmarshal(*v, &dataLakeAnalyticsAccountProperties) + if err != nil { + return err + } + dlaa.DataLakeAnalyticsAccountProperties = &dataLakeAnalyticsAccountProperties + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + dlaa.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + dlaa.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + dlaa.Type = &typeVar + } + case "location": + if v != nil { + var location string + err = json.Unmarshal(*v, &location) + if err != nil { + return err + } + dlaa.Location = &location + } + case "tags": + if v != nil { + var tags map[string]*string + err = json.Unmarshal(*v, &tags) + if err != nil { + return err + } + dlaa.Tags = tags + } + } + } + + return nil +} + +// DataLakeAnalyticsAccountBasic a Data Lake Analytics account object, containing all information associated with +// the named Data Lake Analytics account. +type DataLakeAnalyticsAccountBasic struct { + // DataLakeAnalyticsAccountPropertiesBasic - The properties defined by Data Lake Analytics all properties are specific to each resource provider. + *DataLakeAnalyticsAccountPropertiesBasic `json:"properties,omitempty"` + // ID - The resource identifer. + ID *string `json:"id,omitempty"` + // Name - The resource name. + Name *string `json:"name,omitempty"` + // Type - The resource type. + Type *string `json:"type,omitempty"` + // Location - The resource location. + Location *string `json:"location,omitempty"` + // Tags - The resource tags. + Tags map[string]*string `json:"tags"` +} + +// MarshalJSON is the custom marshaler for DataLakeAnalyticsAccountBasic. +func (dlaab DataLakeAnalyticsAccountBasic) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if dlaab.DataLakeAnalyticsAccountPropertiesBasic != nil { + objectMap["properties"] = dlaab.DataLakeAnalyticsAccountPropertiesBasic + } + if dlaab.ID != nil { + objectMap["id"] = dlaab.ID + } + if dlaab.Name != nil { + objectMap["name"] = dlaab.Name + } + if dlaab.Type != nil { + objectMap["type"] = dlaab.Type + } + if dlaab.Location != nil { + objectMap["location"] = dlaab.Location + } + if dlaab.Tags != nil { + objectMap["tags"] = dlaab.Tags + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for DataLakeAnalyticsAccountBasic struct. +func (dlaab *DataLakeAnalyticsAccountBasic) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var dataLakeAnalyticsAccountPropertiesBasic DataLakeAnalyticsAccountPropertiesBasic + err = json.Unmarshal(*v, &dataLakeAnalyticsAccountPropertiesBasic) + if err != nil { + return err + } + dlaab.DataLakeAnalyticsAccountPropertiesBasic = &dataLakeAnalyticsAccountPropertiesBasic + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + dlaab.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + dlaab.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + dlaab.Type = &typeVar + } + case "location": + if v != nil { + var location string + err = json.Unmarshal(*v, &location) + if err != nil { + return err + } + dlaab.Location = &location + } + case "tags": + if v != nil { + var tags map[string]*string + err = json.Unmarshal(*v, &tags) + if err != nil { + return err + } + dlaab.Tags = tags + } + } + } + + return nil +} + +// DataLakeAnalyticsAccountListResult data Lake Analytics account list information. +type DataLakeAnalyticsAccountListResult struct { + autorest.Response `json:"-"` + // Value - The results of the list operation. + Value *[]DataLakeAnalyticsAccountBasic `json:"value,omitempty"` + // NextLink - The link (url) to the next page of results. + NextLink *string `json:"nextLink,omitempty"` +} + +// DataLakeAnalyticsAccountListResultIterator provides access to a complete listing of +// DataLakeAnalyticsAccountBasic values. +type DataLakeAnalyticsAccountListResultIterator struct { + i int + page DataLakeAnalyticsAccountListResultPage +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *DataLakeAnalyticsAccountListResultIterator) Next() error { + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err := iter.page.Next() + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter DataLakeAnalyticsAccountListResultIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter DataLakeAnalyticsAccountListResultIterator) Response() DataLakeAnalyticsAccountListResult { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter DataLakeAnalyticsAccountListResultIterator) Value() DataLakeAnalyticsAccountBasic { + if !iter.page.NotDone() { + return DataLakeAnalyticsAccountBasic{} + } + return iter.page.Values()[iter.i] +} + +// IsEmpty returns true if the ListResult contains no values. +func (dlaalr DataLakeAnalyticsAccountListResult) IsEmpty() bool { + return dlaalr.Value == nil || len(*dlaalr.Value) == 0 +} + +// dataLakeAnalyticsAccountListResultPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (dlaalr DataLakeAnalyticsAccountListResult) dataLakeAnalyticsAccountListResultPreparer() (*http.Request, error) { + if dlaalr.NextLink == nil || len(to.String(dlaalr.NextLink)) < 1 { + return nil, nil + } + return autorest.Prepare(&http.Request{}, + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(dlaalr.NextLink))) +} + +// DataLakeAnalyticsAccountListResultPage contains a page of DataLakeAnalyticsAccountBasic values. +type DataLakeAnalyticsAccountListResultPage struct { + fn func(DataLakeAnalyticsAccountListResult) (DataLakeAnalyticsAccountListResult, error) + dlaalr DataLakeAnalyticsAccountListResult +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *DataLakeAnalyticsAccountListResultPage) Next() error { + next, err := page.fn(page.dlaalr) + if err != nil { + return err + } + page.dlaalr = next + return nil +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page DataLakeAnalyticsAccountListResultPage) NotDone() bool { + return !page.dlaalr.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page DataLakeAnalyticsAccountListResultPage) Response() DataLakeAnalyticsAccountListResult { + return page.dlaalr +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page DataLakeAnalyticsAccountListResultPage) Values() []DataLakeAnalyticsAccountBasic { + if page.dlaalr.IsEmpty() { + return nil + } + return *page.dlaalr.Value +} + +// DataLakeAnalyticsAccountProperties the account specific properties that are associated with an underlying Data +// Lake Analytics account. Returned only when retrieving a specific account. +type DataLakeAnalyticsAccountProperties struct { + // DefaultDataLakeStoreAccount - The default Data Lake Store account associated with this account. + DefaultDataLakeStoreAccount *string `json:"defaultDataLakeStoreAccount,omitempty"` + // DataLakeStoreAccounts - The list of Data Lake Store accounts associated with this account. + DataLakeStoreAccounts *[]DataLakeStoreAccountInformation `json:"dataLakeStoreAccounts,omitempty"` + // StorageAccounts - The list of Azure Blob Storage accounts associated with this account. + StorageAccounts *[]StorageAccountInformation `json:"storageAccounts,omitempty"` + // ComputePolicies - The list of compute policies associated with this account. + ComputePolicies *[]ComputePolicy `json:"computePolicies,omitempty"` + // FirewallRules - The list of firewall rules associated with this account. + FirewallRules *[]FirewallRule `json:"firewallRules,omitempty"` + // FirewallState - The current state of the IP address firewall for this account. Possible values include: 'FirewallStateEnabled', 'FirewallStateDisabled' + FirewallState FirewallState `json:"firewallState,omitempty"` + // FirewallAllowAzureIps - The current state of allowing or disallowing IPs originating within Azure through the firewall. If the firewall is disabled, this is not enforced. Possible values include: 'Enabled', 'Disabled' + FirewallAllowAzureIps FirewallAllowAzureIpsState `json:"firewallAllowAzureIps,omitempty"` + // NewTier - The commitment tier for the next month. Possible values include: 'Consumption', 'Commitment100AUHours', 'Commitment500AUHours', 'Commitment1000AUHours', 'Commitment5000AUHours', 'Commitment10000AUHours', 'Commitment50000AUHours', 'Commitment100000AUHours', 'Commitment500000AUHours' + NewTier TierType `json:"newTier,omitempty"` + // CurrentTier - The commitment tier in use for the current month. Possible values include: 'Consumption', 'Commitment100AUHours', 'Commitment500AUHours', 'Commitment1000AUHours', 'Commitment5000AUHours', 'Commitment10000AUHours', 'Commitment50000AUHours', 'Commitment100000AUHours', 'Commitment500000AUHours' + CurrentTier TierType `json:"currentTier,omitempty"` + // MaxJobCount - The maximum supported jobs running under the account at the same time. + MaxJobCount *int32 `json:"maxJobCount,omitempty"` + // SystemMaxJobCount - The system defined maximum supported jobs running under the account at the same time, which restricts the maximum number of running jobs the user can set for the account. + SystemMaxJobCount *int32 `json:"systemMaxJobCount,omitempty"` + // MaxDegreeOfParallelism - The maximum supported degree of parallelism for this account. + MaxDegreeOfParallelism *int32 `json:"maxDegreeOfParallelism,omitempty"` + // SystemMaxDegreeOfParallelism - The system defined maximum supported degree of parallelism for this account, which restricts the maximum value of parallelism the user can set for the account. + SystemMaxDegreeOfParallelism *int32 `json:"systemMaxDegreeOfParallelism,omitempty"` + // MaxDegreeOfParallelismPerJob - The maximum supported degree of parallelism per job for this account. + MaxDegreeOfParallelismPerJob *int32 `json:"maxDegreeOfParallelismPerJob,omitempty"` + // MinPriorityPerJob - The minimum supported priority per job for this account. + MinPriorityPerJob *int32 `json:"minPriorityPerJob,omitempty"` + // QueryStoreRetention - The number of days that job metadata is retained. + QueryStoreRetention *int32 `json:"queryStoreRetention,omitempty"` + // AccountID - The unique identifier associated with this Data Lake Analytics account. + AccountID *uuid.UUID `json:"accountId,omitempty"` + // ProvisioningState - The provisioning status of the Data Lake Analytics account. Possible values include: 'Failed', 'Creating', 'Running', 'Succeeded', 'Patching', 'Suspending', 'Resuming', 'Deleting', 'Deleted', 'Undeleting', 'Canceled' + ProvisioningState DataLakeAnalyticsAccountStatus `json:"provisioningState,omitempty"` + // State - The state of the Data Lake Analytics account. Possible values include: 'Active', 'Suspended' + State DataLakeAnalyticsAccountState `json:"state,omitempty"` + // CreationTime - The account creation time. + CreationTime *date.Time `json:"creationTime,omitempty"` + // LastModifiedTime - The account last modified time. + LastModifiedTime *date.Time `json:"lastModifiedTime,omitempty"` + // Endpoint - The full CName endpoint for this account. + Endpoint *string `json:"endpoint,omitempty"` +} + +// DataLakeAnalyticsAccountPropertiesBasic the basic account specific properties that are associated with an +// underlying Data Lake Analytics account. +type DataLakeAnalyticsAccountPropertiesBasic struct { + // AccountID - The unique identifier associated with this Data Lake Analytics account. + AccountID *uuid.UUID `json:"accountId,omitempty"` + // ProvisioningState - The provisioning status of the Data Lake Analytics account. Possible values include: 'Failed', 'Creating', 'Running', 'Succeeded', 'Patching', 'Suspending', 'Resuming', 'Deleting', 'Deleted', 'Undeleting', 'Canceled' + ProvisioningState DataLakeAnalyticsAccountStatus `json:"provisioningState,omitempty"` + // State - The state of the Data Lake Analytics account. Possible values include: 'Active', 'Suspended' + State DataLakeAnalyticsAccountState `json:"state,omitempty"` + // CreationTime - The account creation time. + CreationTime *date.Time `json:"creationTime,omitempty"` + // LastModifiedTime - The account last modified time. + LastModifiedTime *date.Time `json:"lastModifiedTime,omitempty"` + // Endpoint - The full CName endpoint for this account. + Endpoint *string `json:"endpoint,omitempty"` +} + +// DataLakeStoreAccountInformation data Lake Store account information. +type DataLakeStoreAccountInformation struct { + autorest.Response `json:"-"` + // DataLakeStoreAccountInformationProperties - The Data Lake Store account properties. + *DataLakeStoreAccountInformationProperties `json:"properties,omitempty"` + // ID - The resource identifier. + ID *string `json:"id,omitempty"` + // Name - The resource name. + Name *string `json:"name,omitempty"` + // Type - The resource type. + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for DataLakeStoreAccountInformation. +func (dlsai DataLakeStoreAccountInformation) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if dlsai.DataLakeStoreAccountInformationProperties != nil { + objectMap["properties"] = dlsai.DataLakeStoreAccountInformationProperties + } + if dlsai.ID != nil { + objectMap["id"] = dlsai.ID + } + if dlsai.Name != nil { + objectMap["name"] = dlsai.Name + } + if dlsai.Type != nil { + objectMap["type"] = dlsai.Type + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for DataLakeStoreAccountInformation struct. +func (dlsai *DataLakeStoreAccountInformation) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var dataLakeStoreAccountInformationProperties DataLakeStoreAccountInformationProperties + err = json.Unmarshal(*v, &dataLakeStoreAccountInformationProperties) + if err != nil { + return err + } + dlsai.DataLakeStoreAccountInformationProperties = &dataLakeStoreAccountInformationProperties + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + dlsai.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + dlsai.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + dlsai.Type = &typeVar + } + } + } + + return nil +} + +// DataLakeStoreAccountInformationListResult data Lake Store account list information. +type DataLakeStoreAccountInformationListResult struct { + autorest.Response `json:"-"` + // Value - The results of the list operation. + Value *[]DataLakeStoreAccountInformation `json:"value,omitempty"` + // NextLink - The link (url) to the next page of results. + NextLink *string `json:"nextLink,omitempty"` +} + +// DataLakeStoreAccountInformationListResultIterator provides access to a complete listing of +// DataLakeStoreAccountInformation values. +type DataLakeStoreAccountInformationListResultIterator struct { + i int + page DataLakeStoreAccountInformationListResultPage +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *DataLakeStoreAccountInformationListResultIterator) Next() error { + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err := iter.page.Next() + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter DataLakeStoreAccountInformationListResultIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter DataLakeStoreAccountInformationListResultIterator) Response() DataLakeStoreAccountInformationListResult { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter DataLakeStoreAccountInformationListResultIterator) Value() DataLakeStoreAccountInformation { + if !iter.page.NotDone() { + return DataLakeStoreAccountInformation{} + } + return iter.page.Values()[iter.i] +} + +// IsEmpty returns true if the ListResult contains no values. +func (dlsailr DataLakeStoreAccountInformationListResult) IsEmpty() bool { + return dlsailr.Value == nil || len(*dlsailr.Value) == 0 +} + +// dataLakeStoreAccountInformationListResultPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (dlsailr DataLakeStoreAccountInformationListResult) dataLakeStoreAccountInformationListResultPreparer() (*http.Request, error) { + if dlsailr.NextLink == nil || len(to.String(dlsailr.NextLink)) < 1 { + return nil, nil + } + return autorest.Prepare(&http.Request{}, + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(dlsailr.NextLink))) +} + +// DataLakeStoreAccountInformationListResultPage contains a page of DataLakeStoreAccountInformation values. +type DataLakeStoreAccountInformationListResultPage struct { + fn func(DataLakeStoreAccountInformationListResult) (DataLakeStoreAccountInformationListResult, error) + dlsailr DataLakeStoreAccountInformationListResult +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *DataLakeStoreAccountInformationListResultPage) Next() error { + next, err := page.fn(page.dlsailr) + if err != nil { + return err + } + page.dlsailr = next + return nil +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page DataLakeStoreAccountInformationListResultPage) NotDone() bool { + return !page.dlsailr.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page DataLakeStoreAccountInformationListResultPage) Response() DataLakeStoreAccountInformationListResult { + return page.dlsailr +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page DataLakeStoreAccountInformationListResultPage) Values() []DataLakeStoreAccountInformation { + if page.dlsailr.IsEmpty() { + return nil + } + return *page.dlsailr.Value +} + +// DataLakeStoreAccountInformationProperties the Data Lake Store account properties. +type DataLakeStoreAccountInformationProperties struct { + // Suffix - The optional suffix for the Data Lake Store account. + Suffix *string `json:"suffix,omitempty"` +} + +// FirewallRule data Lake Analytics firewall rule information. +type FirewallRule struct { + autorest.Response `json:"-"` + // FirewallRuleProperties - The firewall rule properties. + *FirewallRuleProperties `json:"properties,omitempty"` + // ID - The resource identifier. + ID *string `json:"id,omitempty"` + // Name - The resource name. + Name *string `json:"name,omitempty"` + // Type - The resource type. + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for FirewallRule. +func (fr FirewallRule) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if fr.FirewallRuleProperties != nil { + objectMap["properties"] = fr.FirewallRuleProperties + } + if fr.ID != nil { + objectMap["id"] = fr.ID + } + if fr.Name != nil { + objectMap["name"] = fr.Name + } + if fr.Type != nil { + objectMap["type"] = fr.Type + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for FirewallRule struct. +func (fr *FirewallRule) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var firewallRuleProperties FirewallRuleProperties + err = json.Unmarshal(*v, &firewallRuleProperties) + if err != nil { + return err + } + fr.FirewallRuleProperties = &firewallRuleProperties + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + fr.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + fr.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + fr.Type = &typeVar + } + } + } + + return nil +} + +// FirewallRuleListResult data Lake Analytics firewall rule list information. +type FirewallRuleListResult struct { + autorest.Response `json:"-"` + // Value - The results of the list operation. + Value *[]FirewallRule `json:"value,omitempty"` + // NextLink - The link (url) to the next page of results. + NextLink *string `json:"nextLink,omitempty"` +} + +// FirewallRuleListResultIterator provides access to a complete listing of FirewallRule values. +type FirewallRuleListResultIterator struct { + i int + page FirewallRuleListResultPage +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *FirewallRuleListResultIterator) Next() error { + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err := iter.page.Next() + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter FirewallRuleListResultIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter FirewallRuleListResultIterator) Response() FirewallRuleListResult { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter FirewallRuleListResultIterator) Value() FirewallRule { + if !iter.page.NotDone() { + return FirewallRule{} + } + return iter.page.Values()[iter.i] +} + +// IsEmpty returns true if the ListResult contains no values. +func (frlr FirewallRuleListResult) IsEmpty() bool { + return frlr.Value == nil || len(*frlr.Value) == 0 +} + +// firewallRuleListResultPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (frlr FirewallRuleListResult) firewallRuleListResultPreparer() (*http.Request, error) { + if frlr.NextLink == nil || len(to.String(frlr.NextLink)) < 1 { + return nil, nil + } + return autorest.Prepare(&http.Request{}, + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(frlr.NextLink))) +} + +// FirewallRuleListResultPage contains a page of FirewallRule values. +type FirewallRuleListResultPage struct { + fn func(FirewallRuleListResult) (FirewallRuleListResult, error) + frlr FirewallRuleListResult +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *FirewallRuleListResultPage) Next() error { + next, err := page.fn(page.frlr) + if err != nil { + return err + } + page.frlr = next + return nil +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page FirewallRuleListResultPage) NotDone() bool { + return !page.frlr.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page FirewallRuleListResultPage) Response() FirewallRuleListResult { + return page.frlr +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page FirewallRuleListResultPage) Values() []FirewallRule { + if page.frlr.IsEmpty() { + return nil + } + return *page.frlr.Value +} + +// FirewallRuleProperties the firewall rule properties. +type FirewallRuleProperties struct { + // StartIPAddress - The start IP address for the firewall rule. This can be either ipv4 or ipv6. Start and End should be in the same protocol. + StartIPAddress *string `json:"startIpAddress,omitempty"` + // EndIPAddress - The end IP address for the firewall rule. This can be either ipv4 or ipv6. Start and End should be in the same protocol. + EndIPAddress *string `json:"endIpAddress,omitempty"` +} + +// NameAvailabilityInformation data Lake Analytics account name availability result information. +type NameAvailabilityInformation struct { + autorest.Response `json:"-"` + // NameAvailable - The Boolean value of true or false to indicate whether the Data Lake Analytics account name is available or not. + NameAvailable *bool `json:"nameAvailable,omitempty"` + // Reason - The reason why the Data Lake Analytics account name is not available, if nameAvailable is false. + Reason *string `json:"reason,omitempty"` + // Message - The message describing why the Data Lake Analytics account name is not available, if nameAvailable is false. + Message *string `json:"message,omitempty"` +} + +// Operation an available operation for Data Lake Analytics. +type Operation struct { + // Name - The name of the operation. + Name *string `json:"name,omitempty"` + // Display - The display information for the operation. + Display *OperationDisplay `json:"display,omitempty"` + // Origin - The intended executor of the operation. Possible values include: 'OperationOriginUser', 'OperationOriginSystem', 'OperationOriginUsersystem' + Origin OperationOrigin `json:"origin,omitempty"` +} + +// OperationDisplay the display information for a particular operation. +type OperationDisplay struct { + // Provider - The resource provider of the operation. + Provider *string `json:"provider,omitempty"` + // Resource - The resource type of the operation. + Resource *string `json:"resource,omitempty"` + // Operation - A friendly name of the operation. + Operation *string `json:"operation,omitempty"` + // Description - A friendly description of the operation. + Description *string `json:"description,omitempty"` +} + +// OperationListResult the list of available operations for Data Lake Analytics. +type OperationListResult struct { + autorest.Response `json:"-"` + // Value - The results of the list operation. + Value *[]Operation `json:"value,omitempty"` + // NextLink - The link (url) to the next page of results. + NextLink *string `json:"nextLink,omitempty"` +} + +// Resource the resource model definition. +type Resource struct { + // ID - The resource identifer. + ID *string `json:"id,omitempty"` + // Name - The resource name. + Name *string `json:"name,omitempty"` + // Type - The resource type. + Type *string `json:"type,omitempty"` + // Location - The resource location. + Location *string `json:"location,omitempty"` + // Tags - The resource tags. + Tags map[string]*string `json:"tags"` +} + +// MarshalJSON is the custom marshaler for Resource. +func (r Resource) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if r.ID != nil { + objectMap["id"] = r.ID + } + if r.Name != nil { + objectMap["name"] = r.Name + } + if r.Type != nil { + objectMap["type"] = r.Type + } + if r.Location != nil { + objectMap["location"] = r.Location + } + if r.Tags != nil { + objectMap["tags"] = r.Tags + } + return json.Marshal(objectMap) +} + +// SasTokenInformation SAS token information. +type SasTokenInformation struct { + // AccessToken - The access token for the associated Azure Storage Container. + AccessToken *string `json:"accessToken,omitempty"` +} + +// SasTokenInformationListResult the SAS response that contains the storage account, container and associated SAS +// token for connection use. +type SasTokenInformationListResult struct { + autorest.Response `json:"-"` + // Value - The results of the list operation. + Value *[]SasTokenInformation `json:"value,omitempty"` + // NextLink - The link (url) to the next page of results. + NextLink *string `json:"nextLink,omitempty"` +} + +// SasTokenInformationListResultIterator provides access to a complete listing of SasTokenInformation values. +type SasTokenInformationListResultIterator struct { + i int + page SasTokenInformationListResultPage +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *SasTokenInformationListResultIterator) Next() error { + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err := iter.page.Next() + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter SasTokenInformationListResultIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter SasTokenInformationListResultIterator) Response() SasTokenInformationListResult { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter SasTokenInformationListResultIterator) Value() SasTokenInformation { + if !iter.page.NotDone() { + return SasTokenInformation{} + } + return iter.page.Values()[iter.i] +} + +// IsEmpty returns true if the ListResult contains no values. +func (stilr SasTokenInformationListResult) IsEmpty() bool { + return stilr.Value == nil || len(*stilr.Value) == 0 +} + +// sasTokenInformationListResultPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (stilr SasTokenInformationListResult) sasTokenInformationListResultPreparer() (*http.Request, error) { + if stilr.NextLink == nil || len(to.String(stilr.NextLink)) < 1 { + return nil, nil + } + return autorest.Prepare(&http.Request{}, + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(stilr.NextLink))) +} + +// SasTokenInformationListResultPage contains a page of SasTokenInformation values. +type SasTokenInformationListResultPage struct { + fn func(SasTokenInformationListResult) (SasTokenInformationListResult, error) + stilr SasTokenInformationListResult +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *SasTokenInformationListResultPage) Next() error { + next, err := page.fn(page.stilr) + if err != nil { + return err + } + page.stilr = next + return nil +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page SasTokenInformationListResultPage) NotDone() bool { + return !page.stilr.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page SasTokenInformationListResultPage) Response() SasTokenInformationListResult { + return page.stilr +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page SasTokenInformationListResultPage) Values() []SasTokenInformation { + if page.stilr.IsEmpty() { + return nil + } + return *page.stilr.Value +} + +// StorageAccountInformation azure Storage account information. +type StorageAccountInformation struct { + autorest.Response `json:"-"` + // StorageAccountInformationProperties - The Azure Storage account properties. + *StorageAccountInformationProperties `json:"properties,omitempty"` + // ID - The resource identifier. + ID *string `json:"id,omitempty"` + // Name - The resource name. + Name *string `json:"name,omitempty"` + // Type - The resource type. + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for StorageAccountInformation. +func (sai StorageAccountInformation) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if sai.StorageAccountInformationProperties != nil { + objectMap["properties"] = sai.StorageAccountInformationProperties + } + if sai.ID != nil { + objectMap["id"] = sai.ID + } + if sai.Name != nil { + objectMap["name"] = sai.Name + } + if sai.Type != nil { + objectMap["type"] = sai.Type + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for StorageAccountInformation struct. +func (sai *StorageAccountInformation) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var storageAccountInformationProperties StorageAccountInformationProperties + err = json.Unmarshal(*v, &storageAccountInformationProperties) + if err != nil { + return err + } + sai.StorageAccountInformationProperties = &storageAccountInformationProperties + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + sai.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + sai.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + sai.Type = &typeVar + } + } + } + + return nil +} + +// StorageAccountInformationListResult azure Storage account list information. +type StorageAccountInformationListResult struct { + autorest.Response `json:"-"` + // Value - The results of the list operation. + Value *[]StorageAccountInformation `json:"value,omitempty"` + // NextLink - The link (url) to the next page of results. + NextLink *string `json:"nextLink,omitempty"` +} + +// StorageAccountInformationListResultIterator provides access to a complete listing of StorageAccountInformation +// values. +type StorageAccountInformationListResultIterator struct { + i int + page StorageAccountInformationListResultPage +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *StorageAccountInformationListResultIterator) Next() error { + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err := iter.page.Next() + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter StorageAccountInformationListResultIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter StorageAccountInformationListResultIterator) Response() StorageAccountInformationListResult { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter StorageAccountInformationListResultIterator) Value() StorageAccountInformation { + if !iter.page.NotDone() { + return StorageAccountInformation{} + } + return iter.page.Values()[iter.i] +} + +// IsEmpty returns true if the ListResult contains no values. +func (sailr StorageAccountInformationListResult) IsEmpty() bool { + return sailr.Value == nil || len(*sailr.Value) == 0 +} + +// storageAccountInformationListResultPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (sailr StorageAccountInformationListResult) storageAccountInformationListResultPreparer() (*http.Request, error) { + if sailr.NextLink == nil || len(to.String(sailr.NextLink)) < 1 { + return nil, nil + } + return autorest.Prepare(&http.Request{}, + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(sailr.NextLink))) +} + +// StorageAccountInformationListResultPage contains a page of StorageAccountInformation values. +type StorageAccountInformationListResultPage struct { + fn func(StorageAccountInformationListResult) (StorageAccountInformationListResult, error) + sailr StorageAccountInformationListResult +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *StorageAccountInformationListResultPage) Next() error { + next, err := page.fn(page.sailr) + if err != nil { + return err + } + page.sailr = next + return nil +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page StorageAccountInformationListResultPage) NotDone() bool { + return !page.sailr.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page StorageAccountInformationListResultPage) Response() StorageAccountInformationListResult { + return page.sailr +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page StorageAccountInformationListResultPage) Values() []StorageAccountInformation { + if page.sailr.IsEmpty() { + return nil + } + return *page.sailr.Value +} + +// StorageAccountInformationProperties the Azure Storage account properties. +type StorageAccountInformationProperties struct { + // Suffix - The optional suffix for the storage account. + Suffix *string `json:"suffix,omitempty"` +} + +// StorageContainer azure Storage blob container information. +type StorageContainer struct { + autorest.Response `json:"-"` + // StorageContainerProperties - The properties of the blob container. + *StorageContainerProperties `json:"properties,omitempty"` + // ID - The resource identifier. + ID *string `json:"id,omitempty"` + // Name - The resource name. + Name *string `json:"name,omitempty"` + // Type - The resource type. + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for StorageContainer. +func (sc StorageContainer) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if sc.StorageContainerProperties != nil { + objectMap["properties"] = sc.StorageContainerProperties + } + if sc.ID != nil { + objectMap["id"] = sc.ID + } + if sc.Name != nil { + objectMap["name"] = sc.Name + } + if sc.Type != nil { + objectMap["type"] = sc.Type + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for StorageContainer struct. +func (sc *StorageContainer) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var storageContainerProperties StorageContainerProperties + err = json.Unmarshal(*v, &storageContainerProperties) + if err != nil { + return err + } + sc.StorageContainerProperties = &storageContainerProperties + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + sc.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + sc.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + sc.Type = &typeVar + } + } + } + + return nil +} + +// StorageContainerListResult the list of blob containers associated with the storage account attached to the Data +// Lake Analytics account. +type StorageContainerListResult struct { + autorest.Response `json:"-"` + // Value - The results of the list operation. + Value *[]StorageContainer `json:"value,omitempty"` + // NextLink - The link (url) to the next page of results. + NextLink *string `json:"nextLink,omitempty"` +} + +// StorageContainerListResultIterator provides access to a complete listing of StorageContainer values. +type StorageContainerListResultIterator struct { + i int + page StorageContainerListResultPage +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *StorageContainerListResultIterator) Next() error { + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err := iter.page.Next() + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter StorageContainerListResultIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter StorageContainerListResultIterator) Response() StorageContainerListResult { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter StorageContainerListResultIterator) Value() StorageContainer { + if !iter.page.NotDone() { + return StorageContainer{} + } + return iter.page.Values()[iter.i] +} + +// IsEmpty returns true if the ListResult contains no values. +func (sclr StorageContainerListResult) IsEmpty() bool { + return sclr.Value == nil || len(*sclr.Value) == 0 +} + +// storageContainerListResultPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (sclr StorageContainerListResult) storageContainerListResultPreparer() (*http.Request, error) { + if sclr.NextLink == nil || len(to.String(sclr.NextLink)) < 1 { + return nil, nil + } + return autorest.Prepare(&http.Request{}, + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(sclr.NextLink))) +} + +// StorageContainerListResultPage contains a page of StorageContainer values. +type StorageContainerListResultPage struct { + fn func(StorageContainerListResult) (StorageContainerListResult, error) + sclr StorageContainerListResult +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *StorageContainerListResultPage) Next() error { + next, err := page.fn(page.sclr) + if err != nil { + return err + } + page.sclr = next + return nil +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page StorageContainerListResultPage) NotDone() bool { + return !page.sclr.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page StorageContainerListResultPage) Response() StorageContainerListResult { + return page.sclr +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page StorageContainerListResultPage) Values() []StorageContainer { + if page.sclr.IsEmpty() { + return nil + } + return *page.sclr.Value +} + +// StorageContainerProperties azure Storage blob container properties information. +type StorageContainerProperties struct { + // LastModifiedTime - The last modified time of the blob container. + LastModifiedTime *date.Time `json:"lastModifiedTime,omitempty"` +} + +// SubResource the resource model definition for a nested resource. +type SubResource struct { + // ID - The resource identifier. + ID *string `json:"id,omitempty"` + // Name - The resource name. + Name *string `json:"name,omitempty"` + // Type - The resource type. + Type *string `json:"type,omitempty"` +} + +// UpdateComputePolicyParameters the parameters used to update a compute policy. +type UpdateComputePolicyParameters struct { + // UpdateComputePolicyProperties - The compute policy properties to use when updating a compute policy. + *UpdateComputePolicyProperties `json:"properties,omitempty"` +} + +// MarshalJSON is the custom marshaler for UpdateComputePolicyParameters. +func (ucpp UpdateComputePolicyParameters) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if ucpp.UpdateComputePolicyProperties != nil { + objectMap["properties"] = ucpp.UpdateComputePolicyProperties + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for UpdateComputePolicyParameters struct. +func (ucpp *UpdateComputePolicyParameters) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var updateComputePolicyProperties UpdateComputePolicyProperties + err = json.Unmarshal(*v, &updateComputePolicyProperties) + if err != nil { + return err + } + ucpp.UpdateComputePolicyProperties = &updateComputePolicyProperties + } + } + } + + return nil +} + +// UpdateComputePolicyProperties the compute policy properties to use when updating a compute policy. +type UpdateComputePolicyProperties struct { + // ObjectID - The AAD object identifier for the entity to create a policy for. + ObjectID *uuid.UUID `json:"objectId,omitempty"` + // ObjectType - The type of AAD object the object identifier refers to. Possible values include: 'User', 'Group', 'ServicePrincipal' + ObjectType AADObjectType `json:"objectType,omitempty"` + // MaxDegreeOfParallelismPerJob - The maximum degree of parallelism per job this user can use to submit jobs. This property, the min priority per job property, or both must be passed. + MaxDegreeOfParallelismPerJob *int32 `json:"maxDegreeOfParallelismPerJob,omitempty"` + // MinPriorityPerJob - The minimum priority per job this user can use to submit jobs. This property, the max degree of parallelism per job property, or both must be passed. + MinPriorityPerJob *int32 `json:"minPriorityPerJob,omitempty"` +} + +// UpdateComputePolicyWithAccountParameters the parameters used to update a compute policy while updating a Data +// Lake Analytics account. +type UpdateComputePolicyWithAccountParameters struct { + // Name - The unique name of the compute policy to update. + Name *string `json:"name,omitempty"` + // UpdateComputePolicyProperties - The compute policy properties to use when updating a compute policy. + *UpdateComputePolicyProperties `json:"properties,omitempty"` +} + +// MarshalJSON is the custom marshaler for UpdateComputePolicyWithAccountParameters. +func (ucpwap UpdateComputePolicyWithAccountParameters) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if ucpwap.Name != nil { + objectMap["name"] = ucpwap.Name + } + if ucpwap.UpdateComputePolicyProperties != nil { + objectMap["properties"] = ucpwap.UpdateComputePolicyProperties + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for UpdateComputePolicyWithAccountParameters struct. +func (ucpwap *UpdateComputePolicyWithAccountParameters) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + ucpwap.Name = &name + } + case "properties": + if v != nil { + var updateComputePolicyProperties UpdateComputePolicyProperties + err = json.Unmarshal(*v, &updateComputePolicyProperties) + if err != nil { + return err + } + ucpwap.UpdateComputePolicyProperties = &updateComputePolicyProperties + } + } + } + + return nil +} + +// UpdateDataLakeAnalyticsAccountParameters the parameters that can be used to update an existing Data Lake +// Analytics account. +type UpdateDataLakeAnalyticsAccountParameters struct { + // Tags - The resource tags. + Tags map[string]*string `json:"tags"` + // UpdateDataLakeAnalyticsAccountProperties - The properties that can be updated in an existing Data Lake Analytics account. + *UpdateDataLakeAnalyticsAccountProperties `json:"properties,omitempty"` +} + +// MarshalJSON is the custom marshaler for UpdateDataLakeAnalyticsAccountParameters. +func (udlaap UpdateDataLakeAnalyticsAccountParameters) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if udlaap.Tags != nil { + objectMap["tags"] = udlaap.Tags + } + if udlaap.UpdateDataLakeAnalyticsAccountProperties != nil { + objectMap["properties"] = udlaap.UpdateDataLakeAnalyticsAccountProperties + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for UpdateDataLakeAnalyticsAccountParameters struct. +func (udlaap *UpdateDataLakeAnalyticsAccountParameters) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "tags": + if v != nil { + var tags map[string]*string + err = json.Unmarshal(*v, &tags) + if err != nil { + return err + } + udlaap.Tags = tags + } + case "properties": + if v != nil { + var updateDataLakeAnalyticsAccountProperties UpdateDataLakeAnalyticsAccountProperties + err = json.Unmarshal(*v, &updateDataLakeAnalyticsAccountProperties) + if err != nil { + return err + } + udlaap.UpdateDataLakeAnalyticsAccountProperties = &updateDataLakeAnalyticsAccountProperties + } + } + } + + return nil +} + +// UpdateDataLakeAnalyticsAccountProperties the properties to update that are associated with an underlying Data +// Lake Analytics account. +type UpdateDataLakeAnalyticsAccountProperties struct { + // DataLakeStoreAccounts - The list of Data Lake Store accounts associated with this account. + DataLakeStoreAccounts *[]UpdateDataLakeStoreWithAccountParameters `json:"dataLakeStoreAccounts,omitempty"` + // StorageAccounts - The list of Azure Blob storage accounts associated with this account. + StorageAccounts *[]UpdateStorageAccountWithAccountParameters `json:"storageAccounts,omitempty"` + // ComputePolicies - The list of compute policies associated with this account. + ComputePolicies *[]UpdateComputePolicyWithAccountParameters `json:"computePolicies,omitempty"` + // FirewallRules - The list of firewall rules associated with this account. + FirewallRules *[]UpdateFirewallRuleWithAccountParameters `json:"firewallRules,omitempty"` + // FirewallState - The current state of the IP address firewall for this account. Disabling the firewall does not remove existing rules, they will just be ignored until the firewall is re-enabled. Possible values include: 'FirewallStateEnabled', 'FirewallStateDisabled' + FirewallState FirewallState `json:"firewallState,omitempty"` + // FirewallAllowAzureIps - The current state of allowing or disallowing IPs originating within Azure through the firewall. If the firewall is disabled, this is not enforced. Possible values include: 'Enabled', 'Disabled' + FirewallAllowAzureIps FirewallAllowAzureIpsState `json:"firewallAllowAzureIps,omitempty"` + // NewTier - The commitment tier to use for next month. Possible values include: 'Consumption', 'Commitment100AUHours', 'Commitment500AUHours', 'Commitment1000AUHours', 'Commitment5000AUHours', 'Commitment10000AUHours', 'Commitment50000AUHours', 'Commitment100000AUHours', 'Commitment500000AUHours' + NewTier TierType `json:"newTier,omitempty"` + // MaxJobCount - The maximum supported jobs running under the account at the same time. + MaxJobCount *int32 `json:"maxJobCount,omitempty"` + // MaxDegreeOfParallelism - The maximum supported degree of parallelism for this account. + MaxDegreeOfParallelism *int32 `json:"maxDegreeOfParallelism,omitempty"` + // MaxDegreeOfParallelismPerJob - The maximum supported degree of parallelism per job for this account. + MaxDegreeOfParallelismPerJob *int32 `json:"maxDegreeOfParallelismPerJob,omitempty"` + // MinPriorityPerJob - The minimum supported priority per job for this account. + MinPriorityPerJob *int32 `json:"minPriorityPerJob,omitempty"` + // QueryStoreRetention - The number of days that job metadata is retained. + QueryStoreRetention *int32 `json:"queryStoreRetention,omitempty"` +} + +// UpdateDataLakeStoreProperties the Data Lake Store account properties to use when updating a Data Lake Store +// account. +type UpdateDataLakeStoreProperties struct { + // Suffix - The optional suffix for the Data Lake Store account. + Suffix *string `json:"suffix,omitempty"` +} + +// UpdateDataLakeStoreWithAccountParameters the parameters used to update a Data Lake Store account while updating +// a Data Lake Analytics account. +type UpdateDataLakeStoreWithAccountParameters struct { + // Name - The unique name of the Data Lake Store account to update. + Name *string `json:"name,omitempty"` + // UpdateDataLakeStoreProperties - The Data Lake Store account properties to use when updating a Data Lake Store account. + *UpdateDataLakeStoreProperties `json:"properties,omitempty"` +} + +// MarshalJSON is the custom marshaler for UpdateDataLakeStoreWithAccountParameters. +func (udlswap UpdateDataLakeStoreWithAccountParameters) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if udlswap.Name != nil { + objectMap["name"] = udlswap.Name + } + if udlswap.UpdateDataLakeStoreProperties != nil { + objectMap["properties"] = udlswap.UpdateDataLakeStoreProperties + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for UpdateDataLakeStoreWithAccountParameters struct. +func (udlswap *UpdateDataLakeStoreWithAccountParameters) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + udlswap.Name = &name + } + case "properties": + if v != nil { + var updateDataLakeStoreProperties UpdateDataLakeStoreProperties + err = json.Unmarshal(*v, &updateDataLakeStoreProperties) + if err != nil { + return err + } + udlswap.UpdateDataLakeStoreProperties = &updateDataLakeStoreProperties + } + } + } + + return nil +} + +// UpdateFirewallRuleParameters the parameters used to update a firewall rule. +type UpdateFirewallRuleParameters struct { + // UpdateFirewallRuleProperties - The firewall rule properties to use when updating a firewall rule. + *UpdateFirewallRuleProperties `json:"properties,omitempty"` +} + +// MarshalJSON is the custom marshaler for UpdateFirewallRuleParameters. +func (ufrp UpdateFirewallRuleParameters) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if ufrp.UpdateFirewallRuleProperties != nil { + objectMap["properties"] = ufrp.UpdateFirewallRuleProperties + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for UpdateFirewallRuleParameters struct. +func (ufrp *UpdateFirewallRuleParameters) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var updateFirewallRuleProperties UpdateFirewallRuleProperties + err = json.Unmarshal(*v, &updateFirewallRuleProperties) + if err != nil { + return err + } + ufrp.UpdateFirewallRuleProperties = &updateFirewallRuleProperties + } + } + } + + return nil +} + +// UpdateFirewallRuleProperties the firewall rule properties to use when updating a firewall rule. +type UpdateFirewallRuleProperties struct { + // StartIPAddress - The start IP address for the firewall rule. This can be either ipv4 or ipv6. Start and End should be in the same protocol. + StartIPAddress *string `json:"startIpAddress,omitempty"` + // EndIPAddress - The end IP address for the firewall rule. This can be either ipv4 or ipv6. Start and End should be in the same protocol. + EndIPAddress *string `json:"endIpAddress,omitempty"` +} + +// UpdateFirewallRuleWithAccountParameters the parameters used to update a firewall rule while updating a Data Lake +// Analytics account. +type UpdateFirewallRuleWithAccountParameters struct { + // Name - The unique name of the firewall rule to update. + Name *string `json:"name,omitempty"` + // UpdateFirewallRuleProperties - The firewall rule properties to use when updating a firewall rule. + *UpdateFirewallRuleProperties `json:"properties,omitempty"` +} + +// MarshalJSON is the custom marshaler for UpdateFirewallRuleWithAccountParameters. +func (ufrwap UpdateFirewallRuleWithAccountParameters) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if ufrwap.Name != nil { + objectMap["name"] = ufrwap.Name + } + if ufrwap.UpdateFirewallRuleProperties != nil { + objectMap["properties"] = ufrwap.UpdateFirewallRuleProperties + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for UpdateFirewallRuleWithAccountParameters struct. +func (ufrwap *UpdateFirewallRuleWithAccountParameters) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + ufrwap.Name = &name + } + case "properties": + if v != nil { + var updateFirewallRuleProperties UpdateFirewallRuleProperties + err = json.Unmarshal(*v, &updateFirewallRuleProperties) + if err != nil { + return err + } + ufrwap.UpdateFirewallRuleProperties = &updateFirewallRuleProperties + } + } + } + + return nil +} + +// UpdateStorageAccountParameters the parameters used to update an Azure Storage account. +type UpdateStorageAccountParameters struct { + // UpdateStorageAccountProperties - The Azure Storage account properties to use when updating an Azure Storage account. + *UpdateStorageAccountProperties `json:"properties,omitempty"` +} + +// MarshalJSON is the custom marshaler for UpdateStorageAccountParameters. +func (usap UpdateStorageAccountParameters) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if usap.UpdateStorageAccountProperties != nil { + objectMap["properties"] = usap.UpdateStorageAccountProperties + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for UpdateStorageAccountParameters struct. +func (usap *UpdateStorageAccountParameters) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var updateStorageAccountProperties UpdateStorageAccountProperties + err = json.Unmarshal(*v, &updateStorageAccountProperties) + if err != nil { + return err + } + usap.UpdateStorageAccountProperties = &updateStorageAccountProperties + } + } + } + + return nil +} + +// UpdateStorageAccountProperties the Azure Storage account properties to use when updating an Azure Storage +// account. +type UpdateStorageAccountProperties struct { + // AccessKey - The updated access key associated with this Azure Storage account that will be used to connect to it. + AccessKey *string `json:"accessKey,omitempty"` + // Suffix - The optional suffix for the storage account. + Suffix *string `json:"suffix,omitempty"` +} + +// UpdateStorageAccountWithAccountParameters the parameters used to update an Azure Storage account while updating +// a Data Lake Analytics account. +type UpdateStorageAccountWithAccountParameters struct { + // Name - The unique name of the Azure Storage account to update. + Name *string `json:"name,omitempty"` + // UpdateStorageAccountProperties - The Azure Storage account properties to use when updating an Azure Storage account. + *UpdateStorageAccountProperties `json:"properties,omitempty"` +} + +// MarshalJSON is the custom marshaler for UpdateStorageAccountWithAccountParameters. +func (usawap UpdateStorageAccountWithAccountParameters) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if usawap.Name != nil { + objectMap["name"] = usawap.Name + } + if usawap.UpdateStorageAccountProperties != nil { + objectMap["properties"] = usawap.UpdateStorageAccountProperties + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for UpdateStorageAccountWithAccountParameters struct. +func (usawap *UpdateStorageAccountWithAccountParameters) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + usawap.Name = &name + } + case "properties": + if v != nil { + var updateStorageAccountProperties UpdateStorageAccountProperties + err = json.Unmarshal(*v, &updateStorageAccountProperties) + if err != nil { + return err + } + usawap.UpdateStorageAccountProperties = &updateStorageAccountProperties + } + } + } + + return nil +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/operations.go b/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/operations.go new file mode 100644 index 000000000000..f91cf3757448 --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/operations.go @@ -0,0 +1,98 @@ +package account + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "net/http" +) + +// OperationsClient is the creates an Azure Data Lake Analytics account management client. +type OperationsClient struct { + BaseClient +} + +// NewOperationsClient creates an instance of the OperationsClient client. +func NewOperationsClient(subscriptionID string) OperationsClient { + return NewOperationsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewOperationsClientWithBaseURI creates an instance of the OperationsClient client. +func NewOperationsClientWithBaseURI(baseURI string, subscriptionID string) OperationsClient { + return OperationsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// List lists all of the available Data Lake Analytics REST API operations. +func (client OperationsClient) List(ctx context.Context) (result OperationListResult, err error) { + req, err := client.ListPreparer(ctx) + if err != nil { + err = autorest.NewErrorWithError(err, "account.OperationsClient", "List", nil, "Failure preparing request") + return + } + + resp, err := client.ListSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "account.OperationsClient", "List", resp, "Failure sending request") + return + } + + result, err = client.ListResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.OperationsClient", "List", resp, "Failure responding to request") + } + + return +} + +// ListPreparer prepares the List request. +func (client OperationsClient) ListPreparer(ctx context.Context) (*http.Request, error) { + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPath("/providers/Microsoft.DataLakeAnalytics/operations"), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListSender sends the List request. The method will close the +// http.Response Body if it receives an error. +func (client OperationsClient) ListSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) +} + +// ListResponder handles the response to the List request. The method always +// closes the http.Response Body. +func (client OperationsClient) ListResponder(resp *http.Response) (result OperationListResult, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/storageaccounts.go b/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/storageaccounts.go new file mode 100644 index 000000000000..63882466a8d6 --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/storageaccounts.go @@ -0,0 +1,736 @@ +package account + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/autorest/validation" + "net/http" +) + +// StorageAccountsClient is the creates an Azure Data Lake Analytics account management client. +type StorageAccountsClient struct { + BaseClient +} + +// NewStorageAccountsClient creates an instance of the StorageAccountsClient client. +func NewStorageAccountsClient(subscriptionID string) StorageAccountsClient { + return NewStorageAccountsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewStorageAccountsClientWithBaseURI creates an instance of the StorageAccountsClient client. +func NewStorageAccountsClientWithBaseURI(baseURI string, subscriptionID string) StorageAccountsClient { + return StorageAccountsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// Add updates the specified Data Lake Analytics account to add an Azure Storage account. +// Parameters: +// resourceGroupName - the name of the Azure resource group. +// accountName - the name of the Data Lake Analytics account. +// storageAccountName - the name of the Azure Storage account to add +// parameters - the parameters containing the access key and optional suffix for the Azure Storage Account. +func (client StorageAccountsClient) Add(ctx context.Context, resourceGroupName string, accountName string, storageAccountName string, parameters AddStorageAccountParameters) (result autorest.Response, err error) { + if err := validation.Validate([]validation.Validation{ + {TargetValue: parameters, + Constraints: []validation.Constraint{{Target: "parameters.AddStorageAccountProperties", Name: validation.Null, Rule: true, + Chain: []validation.Constraint{{Target: "parameters.AddStorageAccountProperties.AccessKey", Name: validation.Null, Rule: true, Chain: nil}}}}}}); err != nil { + return result, validation.NewError("account.StorageAccountsClient", "Add", err.Error()) + } + + req, err := client.AddPreparer(ctx, resourceGroupName, accountName, storageAccountName, parameters) + if err != nil { + err = autorest.NewErrorWithError(err, "account.StorageAccountsClient", "Add", nil, "Failure preparing request") + return + } + + resp, err := client.AddSender(req) + if err != nil { + result.Response = resp + err = autorest.NewErrorWithError(err, "account.StorageAccountsClient", "Add", resp, "Failure sending request") + return + } + + result, err = client.AddResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.StorageAccountsClient", "Add", resp, "Failure responding to request") + } + + return +} + +// AddPreparer prepares the Add request. +func (client StorageAccountsClient) AddPreparer(ctx context.Context, resourceGroupName string, accountName string, storageAccountName string, parameters AddStorageAccountParameters) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "storageAccountName": autorest.Encode("path", storageAccountName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/storageAccounts/{storageAccountName}", pathParameters), + autorest.WithJSON(parameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// AddSender sends the Add request. The method will close the +// http.Response Body if it receives an error. +func (client StorageAccountsClient) AddSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// AddResponder handles the response to the Add request. The method always +// closes the http.Response Body. +func (client StorageAccountsClient) AddResponder(resp *http.Response) (result autorest.Response, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByClosing()) + result.Response = resp + return +} + +// Delete updates the specified Data Lake Analytics account to remove an Azure Storage account. +// Parameters: +// resourceGroupName - the name of the Azure resource group. +// accountName - the name of the Data Lake Analytics account. +// storageAccountName - the name of the Azure Storage account to remove +func (client StorageAccountsClient) Delete(ctx context.Context, resourceGroupName string, accountName string, storageAccountName string) (result autorest.Response, err error) { + req, err := client.DeletePreparer(ctx, resourceGroupName, accountName, storageAccountName) + if err != nil { + err = autorest.NewErrorWithError(err, "account.StorageAccountsClient", "Delete", nil, "Failure preparing request") + return + } + + resp, err := client.DeleteSender(req) + if err != nil { + result.Response = resp + err = autorest.NewErrorWithError(err, "account.StorageAccountsClient", "Delete", resp, "Failure sending request") + return + } + + result, err = client.DeleteResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.StorageAccountsClient", "Delete", resp, "Failure responding to request") + } + + return +} + +// DeletePreparer prepares the Delete request. +func (client StorageAccountsClient) DeletePreparer(ctx context.Context, resourceGroupName string, accountName string, storageAccountName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "storageAccountName": autorest.Encode("path", storageAccountName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsDelete(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/storageAccounts/{storageAccountName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// DeleteSender sends the Delete request. The method will close the +// http.Response Body if it receives an error. +func (client StorageAccountsClient) DeleteSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// DeleteResponder handles the response to the Delete request. The method always +// closes the http.Response Body. +func (client StorageAccountsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByClosing()) + result.Response = resp + return +} + +// Get gets the specified Azure Storage account linked to the given Data Lake Analytics account. +// Parameters: +// resourceGroupName - the name of the Azure resource group. +// accountName - the name of the Data Lake Analytics account. +// storageAccountName - the name of the Azure Storage account for which to retrieve the details. +func (client StorageAccountsClient) Get(ctx context.Context, resourceGroupName string, accountName string, storageAccountName string) (result StorageAccountInformation, err error) { + req, err := client.GetPreparer(ctx, resourceGroupName, accountName, storageAccountName) + if err != nil { + err = autorest.NewErrorWithError(err, "account.StorageAccountsClient", "Get", nil, "Failure preparing request") + return + } + + resp, err := client.GetSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "account.StorageAccountsClient", "Get", resp, "Failure sending request") + return + } + + result, err = client.GetResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.StorageAccountsClient", "Get", resp, "Failure responding to request") + } + + return +} + +// GetPreparer prepares the Get request. +func (client StorageAccountsClient) GetPreparer(ctx context.Context, resourceGroupName string, accountName string, storageAccountName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "storageAccountName": autorest.Encode("path", storageAccountName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/storageAccounts/{storageAccountName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetSender sends the Get request. The method will close the +// http.Response Body if it receives an error. +func (client StorageAccountsClient) GetSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// GetResponder handles the response to the Get request. The method always +// closes the http.Response Body. +func (client StorageAccountsClient) GetResponder(resp *http.Response) (result StorageAccountInformation, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// GetStorageContainer gets the specified Azure Storage container associated with the given Data Lake Analytics and +// Azure Storage accounts. +// Parameters: +// resourceGroupName - the name of the Azure resource group. +// accountName - the name of the Data Lake Analytics account. +// storageAccountName - the name of the Azure storage account from which to retrieve the blob container. +// containerName - the name of the Azure storage container to retrieve +func (client StorageAccountsClient) GetStorageContainer(ctx context.Context, resourceGroupName string, accountName string, storageAccountName string, containerName string) (result StorageContainer, err error) { + req, err := client.GetStorageContainerPreparer(ctx, resourceGroupName, accountName, storageAccountName, containerName) + if err != nil { + err = autorest.NewErrorWithError(err, "account.StorageAccountsClient", "GetStorageContainer", nil, "Failure preparing request") + return + } + + resp, err := client.GetStorageContainerSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "account.StorageAccountsClient", "GetStorageContainer", resp, "Failure sending request") + return + } + + result, err = client.GetStorageContainerResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.StorageAccountsClient", "GetStorageContainer", resp, "Failure responding to request") + } + + return +} + +// GetStorageContainerPreparer prepares the GetStorageContainer request. +func (client StorageAccountsClient) GetStorageContainerPreparer(ctx context.Context, resourceGroupName string, accountName string, storageAccountName string, containerName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "containerName": autorest.Encode("path", containerName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "storageAccountName": autorest.Encode("path", storageAccountName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/storageAccounts/{storageAccountName}/containers/{containerName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetStorageContainerSender sends the GetStorageContainer request. The method will close the +// http.Response Body if it receives an error. +func (client StorageAccountsClient) GetStorageContainerSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// GetStorageContainerResponder handles the response to the GetStorageContainer request. The method always +// closes the http.Response Body. +func (client StorageAccountsClient) GetStorageContainerResponder(resp *http.Response) (result StorageContainer, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// ListByAccount gets the first page of Azure Storage accounts, if any, linked to the specified Data Lake Analytics +// account. The response includes a link to the next page, if any. +// Parameters: +// resourceGroupName - the name of the Azure resource group. +// accountName - the name of the Data Lake Analytics account. +// filter - the OData filter. Optional. +// top - the number of items to return. Optional. +// skip - the number of items to skip over before returning elements. Optional. +// selectParameter - oData Select statement. Limits the properties on each entry to just those requested, e.g. +// Categories?$select=CategoryName,Description. Optional. +// orderby - orderBy clause. One or more comma-separated expressions with an optional "asc" (the default) or +// "desc" depending on the order you'd like the values sorted, e.g. Categories?$orderby=CategoryName desc. +// Optional. +// count - the Boolean value of true or false to request a count of the matching resources included with the +// resources in the response, e.g. Categories?$count=true. Optional. +func (client StorageAccountsClient) ListByAccount(ctx context.Context, resourceGroupName string, accountName string, filter string, top *int32, skip *int32, selectParameter string, orderby string, count *bool) (result StorageAccountInformationListResultPage, err error) { + if err := validation.Validate([]validation.Validation{ + {TargetValue: top, + Constraints: []validation.Constraint{{Target: "top", Name: validation.Null, Rule: false, + Chain: []validation.Constraint{{Target: "top", Name: validation.InclusiveMinimum, Rule: 1, Chain: nil}}}}}, + {TargetValue: skip, + Constraints: []validation.Constraint{{Target: "skip", Name: validation.Null, Rule: false, + Chain: []validation.Constraint{{Target: "skip", Name: validation.InclusiveMinimum, Rule: 1, Chain: nil}}}}}}); err != nil { + return result, validation.NewError("account.StorageAccountsClient", "ListByAccount", err.Error()) + } + + result.fn = client.listByAccountNextResults + req, err := client.ListByAccountPreparer(ctx, resourceGroupName, accountName, filter, top, skip, selectParameter, orderby, count) + if err != nil { + err = autorest.NewErrorWithError(err, "account.StorageAccountsClient", "ListByAccount", nil, "Failure preparing request") + return + } + + resp, err := client.ListByAccountSender(req) + if err != nil { + result.sailr.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "account.StorageAccountsClient", "ListByAccount", resp, "Failure sending request") + return + } + + result.sailr, err = client.ListByAccountResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.StorageAccountsClient", "ListByAccount", resp, "Failure responding to request") + } + + return +} + +// ListByAccountPreparer prepares the ListByAccount request. +func (client StorageAccountsClient) ListByAccountPreparer(ctx context.Context, resourceGroupName string, accountName string, filter string, top *int32, skip *int32, selectParameter string, orderby string, count *bool) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(filter) > 0 { + queryParameters["$filter"] = autorest.Encode("query", filter) + } + if top != nil { + queryParameters["$top"] = autorest.Encode("query", *top) + } + if skip != nil { + queryParameters["$skip"] = autorest.Encode("query", *skip) + } + if len(selectParameter) > 0 { + queryParameters["$select"] = autorest.Encode("query", selectParameter) + } + if len(orderby) > 0 { + queryParameters["$orderby"] = autorest.Encode("query", orderby) + } + if count != nil { + queryParameters["$count"] = autorest.Encode("query", *count) + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/storageAccounts", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListByAccountSender sends the ListByAccount request. The method will close the +// http.Response Body if it receives an error. +func (client StorageAccountsClient) ListByAccountSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// ListByAccountResponder handles the response to the ListByAccount request. The method always +// closes the http.Response Body. +func (client StorageAccountsClient) ListByAccountResponder(resp *http.Response) (result StorageAccountInformationListResult, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listByAccountNextResults retrieves the next set of results, if any. +func (client StorageAccountsClient) listByAccountNextResults(lastResults StorageAccountInformationListResult) (result StorageAccountInformationListResult, err error) { + req, err := lastResults.storageAccountInformationListResultPreparer() + if err != nil { + return result, autorest.NewErrorWithError(err, "account.StorageAccountsClient", "listByAccountNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListByAccountSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "account.StorageAccountsClient", "listByAccountNextResults", resp, "Failure sending next results request") + } + result, err = client.ListByAccountResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.StorageAccountsClient", "listByAccountNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListByAccountComplete enumerates all values, automatically crossing page boundaries as required. +func (client StorageAccountsClient) ListByAccountComplete(ctx context.Context, resourceGroupName string, accountName string, filter string, top *int32, skip *int32, selectParameter string, orderby string, count *bool) (result StorageAccountInformationListResultIterator, err error) { + result.page, err = client.ListByAccount(ctx, resourceGroupName, accountName, filter, top, skip, selectParameter, orderby, count) + return +} + +// ListSasTokens gets the SAS token associated with the specified Data Lake Analytics and Azure Storage account and +// container combination. +// Parameters: +// resourceGroupName - the name of the Azure resource group. +// accountName - the name of the Data Lake Analytics account. +// storageAccountName - the name of the Azure storage account for which the SAS token is being requested. +// containerName - the name of the Azure storage container for which the SAS token is being requested. +func (client StorageAccountsClient) ListSasTokens(ctx context.Context, resourceGroupName string, accountName string, storageAccountName string, containerName string) (result SasTokenInformationListResultPage, err error) { + result.fn = client.listSasTokensNextResults + req, err := client.ListSasTokensPreparer(ctx, resourceGroupName, accountName, storageAccountName, containerName) + if err != nil { + err = autorest.NewErrorWithError(err, "account.StorageAccountsClient", "ListSasTokens", nil, "Failure preparing request") + return + } + + resp, err := client.ListSasTokensSender(req) + if err != nil { + result.stilr.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "account.StorageAccountsClient", "ListSasTokens", resp, "Failure sending request") + return + } + + result.stilr, err = client.ListSasTokensResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.StorageAccountsClient", "ListSasTokens", resp, "Failure responding to request") + } + + return +} + +// ListSasTokensPreparer prepares the ListSasTokens request. +func (client StorageAccountsClient) ListSasTokensPreparer(ctx context.Context, resourceGroupName string, accountName string, storageAccountName string, containerName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "containerName": autorest.Encode("path", containerName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "storageAccountName": autorest.Encode("path", storageAccountName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsPost(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/storageAccounts/{storageAccountName}/containers/{containerName}/listSasTokens", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListSasTokensSender sends the ListSasTokens request. The method will close the +// http.Response Body if it receives an error. +func (client StorageAccountsClient) ListSasTokensSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// ListSasTokensResponder handles the response to the ListSasTokens request. The method always +// closes the http.Response Body. +func (client StorageAccountsClient) ListSasTokensResponder(resp *http.Response) (result SasTokenInformationListResult, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listSasTokensNextResults retrieves the next set of results, if any. +func (client StorageAccountsClient) listSasTokensNextResults(lastResults SasTokenInformationListResult) (result SasTokenInformationListResult, err error) { + req, err := lastResults.sasTokenInformationListResultPreparer() + if err != nil { + return result, autorest.NewErrorWithError(err, "account.StorageAccountsClient", "listSasTokensNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListSasTokensSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "account.StorageAccountsClient", "listSasTokensNextResults", resp, "Failure sending next results request") + } + result, err = client.ListSasTokensResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.StorageAccountsClient", "listSasTokensNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListSasTokensComplete enumerates all values, automatically crossing page boundaries as required. +func (client StorageAccountsClient) ListSasTokensComplete(ctx context.Context, resourceGroupName string, accountName string, storageAccountName string, containerName string) (result SasTokenInformationListResultIterator, err error) { + result.page, err = client.ListSasTokens(ctx, resourceGroupName, accountName, storageAccountName, containerName) + return +} + +// ListStorageContainers lists the Azure Storage containers, if any, associated with the specified Data Lake Analytics +// and Azure Storage account combination. The response includes a link to the next page of results, if any. +// Parameters: +// resourceGroupName - the name of the Azure resource group. +// accountName - the name of the Data Lake Analytics account. +// storageAccountName - the name of the Azure storage account from which to list blob containers. +func (client StorageAccountsClient) ListStorageContainers(ctx context.Context, resourceGroupName string, accountName string, storageAccountName string) (result StorageContainerListResultPage, err error) { + result.fn = client.listStorageContainersNextResults + req, err := client.ListStorageContainersPreparer(ctx, resourceGroupName, accountName, storageAccountName) + if err != nil { + err = autorest.NewErrorWithError(err, "account.StorageAccountsClient", "ListStorageContainers", nil, "Failure preparing request") + return + } + + resp, err := client.ListStorageContainersSender(req) + if err != nil { + result.sclr.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "account.StorageAccountsClient", "ListStorageContainers", resp, "Failure sending request") + return + } + + result.sclr, err = client.ListStorageContainersResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.StorageAccountsClient", "ListStorageContainers", resp, "Failure responding to request") + } + + return +} + +// ListStorageContainersPreparer prepares the ListStorageContainers request. +func (client StorageAccountsClient) ListStorageContainersPreparer(ctx context.Context, resourceGroupName string, accountName string, storageAccountName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "storageAccountName": autorest.Encode("path", storageAccountName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/storageAccounts/{storageAccountName}/containers", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListStorageContainersSender sends the ListStorageContainers request. The method will close the +// http.Response Body if it receives an error. +func (client StorageAccountsClient) ListStorageContainersSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// ListStorageContainersResponder handles the response to the ListStorageContainers request. The method always +// closes the http.Response Body. +func (client StorageAccountsClient) ListStorageContainersResponder(resp *http.Response) (result StorageContainerListResult, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listStorageContainersNextResults retrieves the next set of results, if any. +func (client StorageAccountsClient) listStorageContainersNextResults(lastResults StorageContainerListResult) (result StorageContainerListResult, err error) { + req, err := lastResults.storageContainerListResultPreparer() + if err != nil { + return result, autorest.NewErrorWithError(err, "account.StorageAccountsClient", "listStorageContainersNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListStorageContainersSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "account.StorageAccountsClient", "listStorageContainersNextResults", resp, "Failure sending next results request") + } + result, err = client.ListStorageContainersResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.StorageAccountsClient", "listStorageContainersNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListStorageContainersComplete enumerates all values, automatically crossing page boundaries as required. +func (client StorageAccountsClient) ListStorageContainersComplete(ctx context.Context, resourceGroupName string, accountName string, storageAccountName string) (result StorageContainerListResultIterator, err error) { + result.page, err = client.ListStorageContainers(ctx, resourceGroupName, accountName, storageAccountName) + return +} + +// Update updates the Data Lake Analytics account to replace Azure Storage blob account details, such as the access key +// and/or suffix. +// Parameters: +// resourceGroupName - the name of the Azure resource group. +// accountName - the name of the Data Lake Analytics account. +// storageAccountName - the Azure Storage account to modify +// parameters - the parameters containing the access key and suffix to update the storage account with, if any. +// Passing nothing results in no change. +func (client StorageAccountsClient) Update(ctx context.Context, resourceGroupName string, accountName string, storageAccountName string, parameters *UpdateStorageAccountParameters) (result autorest.Response, err error) { + req, err := client.UpdatePreparer(ctx, resourceGroupName, accountName, storageAccountName, parameters) + if err != nil { + err = autorest.NewErrorWithError(err, "account.StorageAccountsClient", "Update", nil, "Failure preparing request") + return + } + + resp, err := client.UpdateSender(req) + if err != nil { + result.Response = resp + err = autorest.NewErrorWithError(err, "account.StorageAccountsClient", "Update", resp, "Failure sending request") + return + } + + result, err = client.UpdateResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "account.StorageAccountsClient", "Update", resp, "Failure responding to request") + } + + return +} + +// UpdatePreparer prepares the Update request. +func (client StorageAccountsClient) UpdatePreparer(ctx context.Context, resourceGroupName string, accountName string, storageAccountName string, parameters *UpdateStorageAccountParameters) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "storageAccountName": autorest.Encode("path", storageAccountName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPatch(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/storageAccounts/{storageAccountName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + if parameters != nil { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithJSON(parameters)) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// UpdateSender sends the Update request. The method will close the +// http.Response Body if it receives an error. +func (client StorageAccountsClient) UpdateSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// UpdateResponder handles the response to the Update request. The method always +// closes the http.Response Body. +func (client StorageAccountsClient) UpdateResponder(resp *http.Response) (result autorest.Response, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByClosing()) + result.Response = resp + return +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/version.go b/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/version.go new file mode 100644 index 000000000000..1f7dbe76a206 --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account/version.go @@ -0,0 +1,30 @@ +package account + +import "github.com/Azure/azure-sdk-for-go/version" + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +// UserAgent returns the UserAgent string to use when sending http.Requests. +func UserAgent() string { + return "Azure-SDK-For-Go/" + version.Number + " account/2016-11-01" +} + +// Version returns the semantic version (see http://semver.org) of the client. +func Version() string { + return version.Number +} diff --git a/vendor/vendor.json b/vendor/vendor.json index d039934f1ea0..3f50a5088e23 100644 --- a/vendor/vendor.json +++ b/vendor/vendor.json @@ -66,6 +66,14 @@ "version": "=v18.0.0", "versionExact": "v18.0.0" }, + { + "checksumSHA1": "9oNfXIzF5S8ninqAOSfpVf5rNEY=", + "path": "github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account", + "revision": "fbe7db0e3f9793ba3e5704efbab84f51436c136e", + "revisionTime": "2018-07-03T19:15:42Z", + "version": "=v18.0.0", + "versionExact": "v18.0.0" + }, { "checksumSHA1": "koKSQ4PgeyIgykEPNEeE2tzRm18=", "path": "github.com/Azure/azure-sdk-for-go/services/datalake/store/mgmt/2016-11-01/account", diff --git a/website/azurerm.erb b/website/azurerm.erb index 79136d1caebd..db7ba8164715 100644 --- a/website/azurerm.erb +++ b/website/azurerm.erb @@ -434,6 +434,14 @@ azurerm_data_lake_store_firewall_rule + > + azurerm_data_lake_analytics_account + + + > + azurerm_data_lake_analytics_firewall_rule + + diff --git a/website/docs/r/data_lake_analytics_account.html.markdown b/website/docs/r/data_lake_analytics_account.html.markdown new file mode 100644 index 000000000000..ca97cfebe9fc --- /dev/null +++ b/website/docs/r/data_lake_analytics_account.html.markdown @@ -0,0 +1,65 @@ +--- +layout: "azurerm" +page_title: "Azure Resource Manager: azurerm_data_lake_analytics_account" +sidebar_current: "docs-azurerm-resource-data-lake-analytics-account-x" +description: |- + Manage an Azure Data Lake Analytics Account. +--- + +# azurerm_data_lake_analytics_account + +Manage an Azure Data Lake Analytics Account. + +## Example Usage + +```hcl +resource "azurerm_resource_group" "example" { + name = "example" + location = "northeurope" +} + +resource "azurerm_data_lake_store" "example" { + name = "consumptiondatalake" + resource_group_name = "${azurerm_resource_group.example.name}" + location = "${azurerm_resource_group.example.location}" +} + +resource "azurerm_data_lake_analytics_account" "example" { + name = "acctest%s" + resource_group_name = "${azurerm_resource_group.example.name}" + location = "${azurerm_resource_group.example.location}" + + default_store_account_name = "${azurerm_data_lake_store.example.name}" +} + +``` + +## Argument Reference + +The following arguments are supported: + +* `name` - (Required) Specifies the name of the Data Lake Analytics Account. Changing this forces a new resource to be created. Has to be between 3 to 24 characters. + +* `resource_group_name` - (Required) The name of the resource group in which to create the Data Lake Analytics Account. + +* `location` - (Required) Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created. + +* `default_store_account_name` - (Required) Specifies the data lake store to use by default. Changing this forces a new resource to be created. + +* `tier` - (Optional) The monthly commitment tier for Data Lake Analytics Account. Accepted values are `Consumption`, `Commitment_100000AUHours`, `Commitment_10000AUHours`, `Commitment_1000AUHours`, `Commitment_100AUHours`, `Commitment_500000AUHours`, `Commitment_50000AUHours`, `Commitment_5000AUHours`, or `Commitment_500AUHours`. + +* `tags` - (Optional) A mapping of tags to assign to the resource. + +## Attributes Reference + +The following attributes are exported: + +* `id` - The Date Lake Store ID. + +## Import + +Date Lake Analytics Account can be imported using the `resource id`, e.g. + +```shell +terraform import azurerm_data_lake_analytics_account.test /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.DataLakeAnalytics/accounts/mydatalakeaccount +``` diff --git a/website/docs/r/data_lake_analytics_firewall_rule.html.markdown b/website/docs/r/data_lake_analytics_firewall_rule.html.markdown new file mode 100644 index 000000000000..cceeae36d808 --- /dev/null +++ b/website/docs/r/data_lake_analytics_firewall_rule.html.markdown @@ -0,0 +1,70 @@ +--- +layout: "azurerm" +page_title: "Azure Resource Manager: azurerm_data_lake_analytics_firewall_rule" +sidebar_current: "docs-azurerm-resource-data-lake-store-firewall-rule" +description: |- + Manage a Azure Data Lake Analytics Firewall Rule. +--- + +# azurerm_data_lake_analytics_firewall_rule + +Manage a Azure Data Lake Analytics Firewall Rule. + +## Example Usage + +```hcl +resource "azurerm_resource_group" "example" { + name = "example" + location = "northeurope" +} + +resource "azurerm_data_lake_store" "example" { + name = "consumptiondatalake" + resource_group_name = "${azurerm_resource_group.example.name}" + location = "${azurerm_resource_group.example.location}" +} + +resource "azurerm_data_lake_analytics_account" "example" { + name = "acctest%s" + resource_group_name = "${azurerm_resource_group.example.name}" + location = "${azurerm_resource_group.example.location}" + + default_store_account_name = "${azurerm_data_lake_store.example.name}" +} + +resource "azurerm_data_lake_analytics_firewall_rule" "example" { + name = "office-ip-range" + account_name = "${azurerm_data_lake_analytics.example.name}" + resource_group_name = "${azurerm_resource_group.example.name}" + start_ip_address = "1.2.3.4" + end_ip_address = "2.3.4.5" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `name` - (Required) Specifies the name of the Data Lake Analytics. Changing this forces a new resource to be created. Has to be between 3 to 24 characters. + +* `resource_group_name` - (Required) The name of the resource group in which to create the Data Lake Analytics. + +* `account_name` - (Required) Specifies the name of the Data Lake Analytics for which the Firewall Rule should take effect. + +* `start_ip_address` - (Required) The Start IP address for the firewall rule. + +* `end_ip_address` - (Required) The End IP Address for the firewall rule. + +## Attributes Reference + +The following attributes are exported: + +* `id` - The Date Lake Store Firewall Rule ID. + +## Import + +Date Lake Store Firewall Rules can be imported using the `resource id`, e.g. + +```shell +terraform import azurerm_data_lake_analytics_firewall_rule.rule1 /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.DataLakeAnalytics/accounts/mydatalakeaccount/firewallRules/rule1 +``` diff --git a/website/docs/r/data_lake_store.html.markdown b/website/docs/r/data_lake_store.html.markdown index d36296e063bf..3a421467c774 100644 --- a/website/docs/r/data_lake_store.html.markdown +++ b/website/docs/r/data_lake_store.html.markdown @@ -13,15 +13,15 @@ Manage an Azure Data Lake Store. ## Example Usage ```hcl -resource "azurerm_resource_group" "test" { - name = "test" +resource "azurerm_resource_group" "example" { + name = "example" location = "northeurope" } -resource "azurerm_data_lake_store" "consumption" { +resource "azurerm_data_lake_store" "example" { name = "consumptiondatalake" - resource_group_name = "${azurerm_resource_group.test.name}" - location = "${azurerm_resource_group.test.location}" + resource_group_name = "${azurerm_resource_group.example.name}" + location = "${azurerm_resource_group.example.location}" } ``` diff --git a/website/docs/r/data_lake_store_firewall_rule.html.markdown b/website/docs/r/data_lake_store_firewall_rule.html.markdown index 69018d1d4b3f..b25446e7c691 100644 --- a/website/docs/r/data_lake_store_firewall_rule.html.markdown +++ b/website/docs/r/data_lake_store_firewall_rule.html.markdown @@ -13,23 +13,23 @@ Manage a Azure Data Lake Store Firewall Rule. ## Example Usage ```hcl -resource "azurerm_resource_group" "test" { - name = "test" +resource "azurerm_resource_group" "example" { + name = "example" location = "northeurope" } -resource "azurerm_data_lake_store" "test" { +resource "azurerm_data_lake_store" "example" { name = "consumptiondatalake" - resource_group_name = "${azurerm_resource_group.test.name}" - location = "${azurerm_resource_group.test.location}" + resource_group_name = "${azurerm_resource_group.example.name}" + location = "${azurerm_resource_group.example.location}" } -resource "azurerm_data_lake_store_firewall_rule" "test" { - name = "office-ip-range" - account_name = "${azurerm_data_lake_store.test.name}" - resource_group_name = "${azurerm_resource_group.test.name}" - start_ip_address = "1.2.3.4" - end_ip_address = "2.3.4.5" +resource "azurerm_data_lake_store_firewall_rule" "example" { + name = "office-ip-range" + account_name = "${azurerm_data_lake_store.example.name}" + resource_group_name = "${azurerm_resource_group.example.name}" + start_ip_address = "1.2.3.4" + end_ip_address = "2.3.4.5" } ```