diff --git a/sdk/datafactory/azure-mgmt-datafactory/README.md b/sdk/datafactory/azure-mgmt-datafactory/README.md index 3e020b923eff..69a8e167839c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/README.md +++ b/sdk/datafactory/azure-mgmt-datafactory/README.md @@ -2,7 +2,7 @@ This is the Microsoft Azure Data Factory Management Client Library. This package has been tested with Python 2.7, 3.5, 3.6, 3.7 and 3.8. -For a more complete view of Azure libraries, see the [Github repo](https://github.com/Azure/azure-sdk-for-python/sdk) +For a more complete view of Azure libraries, see the [Github repo](https://github.com/Azure/azure-sdk-for-python/) # Usage diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 3ca7ea25399a..9b42ef789e63 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -103,6 +103,7 @@ from ._models_py3 import AzureTableSource from ._models_py3 import AzureTableStorageLinkedService from ._models_py3 import BinaryDataset + from ._models_py3 import BinaryReadSettings from ._models_py3 import BinarySink from ._models_py3 import BinarySource from ._models_py3 import BlobEventsTrigger @@ -119,6 +120,7 @@ from ._models_py3 import CommonDataServiceForAppsSink from ._models_py3 import CommonDataServiceForAppsSource from ._models_py3 import ComponentSetup + from ._models_py3 import CompressionReadSettings from ._models_py3 import ConcurLinkedService from ._models_py3 import ConcurObjectDataset from ._models_py3 import ConcurSource @@ -218,6 +220,7 @@ from ._models_py3 import ExecutePipelineActivity from ._models_py3 import ExecuteSSISPackageActivity from ._models_py3 import ExecutionActivity + from ._models_py3 import ExportSettings from ._models_py3 import ExposureControlRequest from ._models_py3 import ExposureControlResponse from ._models_py3 import Expression @@ -288,6 +291,7 @@ from ._models_py3 import ImpalaLinkedService from ._models_py3 import ImpalaObjectDataset from ._models_py3 import ImpalaSource + from ._models_py3 import ImportSettings from ._models_py3 import InformixLinkedService from ._models_py3 import InformixSink from ._models_py3 import InformixSource @@ -317,6 +321,7 @@ from ._models_py3 import JiraSource from ._models_py3 import JsonDataset from ._models_py3 import JsonFormat + from ._models_py3 import JsonReadSettings from ._models_py3 import JsonSink from ._models_py3 import JsonSource from ._models_py3 import JsonWriteSettings @@ -394,6 +399,7 @@ from ._models_py3 import OrcFormat from ._models_py3 import OrcSink from ._models_py3 import OrcSource + from ._models_py3 import PackageStore from ._models_py3 import ParameterSpecification from ._models_py3 import ParquetDataset from ._models_py3 import ParquetFormat @@ -488,10 +494,19 @@ from ._models_py3 import SftpReadSettings from ._models_py3 import SftpServerLinkedService from ._models_py3 import SftpWriteSettings + from ._models_py3 import SharePointOnlineListLinkedService + from ._models_py3 import SharePointOnlineListResourceDataset + from ._models_py3 import SharePointOnlineListSource from ._models_py3 import ShopifyLinkedService from ._models_py3 import ShopifyObjectDataset from ._models_py3 import ShopifySource from ._models_py3 import SkipErrorFile + from ._models_py3 import SnowflakeDataset + from ._models_py3 import SnowflakeExportCopyCommand + from ._models_py3 import SnowflakeImportCopyCommand + from ._models_py3 import SnowflakeLinkedService + from ._models_py3 import SnowflakeSink + from ._models_py3 import SnowflakeSource from ._models_py3 import SparkLinkedService from ._models_py3 import SparkObjectDataset from ._models_py3 import SparkSource @@ -499,6 +514,7 @@ from ._models_py3 import SqlDWSource from ._models_py3 import SqlMISink from ._models_py3 import SqlMISource + from ._models_py3 import SqlPartitionSettings from ._models_py3 import SqlServerLinkedService from ._models_py3 import SqlServerSink from ._models_py3 import SqlServerSource @@ -580,6 +596,7 @@ from ._models_py3 import XeroLinkedService from ._models_py3 import XeroObjectDataset from ._models_py3 import XeroSource + from ._models_py3 import ZipDeflateReadSettings from ._models_py3 import ZohoLinkedService from ._models_py3 import ZohoObjectDataset from ._models_py3 import ZohoSource @@ -677,6 +694,7 @@ from ._models import AzureTableSource from ._models import AzureTableStorageLinkedService from ._models import BinaryDataset + from ._models import BinaryReadSettings from ._models import BinarySink from ._models import BinarySource from ._models import BlobEventsTrigger @@ -693,6 +711,7 @@ from ._models import CommonDataServiceForAppsSink from ._models import CommonDataServiceForAppsSource from ._models import ComponentSetup + from ._models import CompressionReadSettings from ._models import ConcurLinkedService from ._models import ConcurObjectDataset from ._models import ConcurSource @@ -792,6 +811,7 @@ from ._models import ExecutePipelineActivity from ._models import ExecuteSSISPackageActivity from ._models import ExecutionActivity + from ._models import ExportSettings from ._models import ExposureControlRequest from ._models import ExposureControlResponse from ._models import Expression @@ -862,6 +882,7 @@ from ._models import ImpalaLinkedService from ._models import ImpalaObjectDataset from ._models import ImpalaSource + from ._models import ImportSettings from ._models import InformixLinkedService from ._models import InformixSink from ._models import InformixSource @@ -891,6 +912,7 @@ from ._models import JiraSource from ._models import JsonDataset from ._models import JsonFormat + from ._models import JsonReadSettings from ._models import JsonSink from ._models import JsonSource from ._models import JsonWriteSettings @@ -968,6 +990,7 @@ from ._models import OrcFormat from ._models import OrcSink from ._models import OrcSource + from ._models import PackageStore from ._models import ParameterSpecification from ._models import ParquetDataset from ._models import ParquetFormat @@ -1062,10 +1085,19 @@ from ._models import SftpReadSettings from ._models import SftpServerLinkedService from ._models import SftpWriteSettings + from ._models import SharePointOnlineListLinkedService + from ._models import SharePointOnlineListResourceDataset + from ._models import SharePointOnlineListSource from ._models import ShopifyLinkedService from ._models import ShopifyObjectDataset from ._models import ShopifySource from ._models import SkipErrorFile + from ._models import SnowflakeDataset + from ._models import SnowflakeExportCopyCommand + from ._models import SnowflakeImportCopyCommand + from ._models import SnowflakeLinkedService + from ._models import SnowflakeSink + from ._models import SnowflakeSource from ._models import SparkLinkedService from ._models import SparkObjectDataset from ._models import SparkSource @@ -1073,6 +1105,7 @@ from ._models import SqlDWSource from ._models import SqlMISink from ._models import SqlMISource + from ._models import SqlPartitionSettings from ._models import SqlServerLinkedService from ._models import SqlServerSink from ._models import SqlServerSource @@ -1154,6 +1187,7 @@ from ._models import XeroLinkedService from ._models import XeroObjectDataset from ._models import XeroSource + from ._models import ZipDeflateReadSettings from ._models import ZohoLinkedService from ._models import ZohoObjectDataset from ._models import ZohoSource @@ -1221,6 +1255,7 @@ NetezzaPartitionOption, CassandraSourceReadConsistencyLevels, TeradataPartitionOption, + SqlPartitionOption, StoredProcedureParameterType, SapTablePartitionOption, SapHanaPartitionOption, @@ -1339,6 +1374,7 @@ 'AzureTableSource', 'AzureTableStorageLinkedService', 'BinaryDataset', + 'BinaryReadSettings', 'BinarySink', 'BinarySource', 'BlobEventsTrigger', @@ -1355,6 +1391,7 @@ 'CommonDataServiceForAppsSink', 'CommonDataServiceForAppsSource', 'ComponentSetup', + 'CompressionReadSettings', 'ConcurLinkedService', 'ConcurObjectDataset', 'ConcurSource', @@ -1454,6 +1491,7 @@ 'ExecutePipelineActivity', 'ExecuteSSISPackageActivity', 'ExecutionActivity', + 'ExportSettings', 'ExposureControlRequest', 'ExposureControlResponse', 'Expression', @@ -1524,6 +1562,7 @@ 'ImpalaLinkedService', 'ImpalaObjectDataset', 'ImpalaSource', + 'ImportSettings', 'InformixLinkedService', 'InformixSink', 'InformixSource', @@ -1553,6 +1592,7 @@ 'JiraSource', 'JsonDataset', 'JsonFormat', + 'JsonReadSettings', 'JsonSink', 'JsonSource', 'JsonWriteSettings', @@ -1630,6 +1670,7 @@ 'OrcFormat', 'OrcSink', 'OrcSource', + 'PackageStore', 'ParameterSpecification', 'ParquetDataset', 'ParquetFormat', @@ -1724,10 +1765,19 @@ 'SftpReadSettings', 'SftpServerLinkedService', 'SftpWriteSettings', + 'SharePointOnlineListLinkedService', + 'SharePointOnlineListResourceDataset', + 'SharePointOnlineListSource', 'ShopifyLinkedService', 'ShopifyObjectDataset', 'ShopifySource', 'SkipErrorFile', + 'SnowflakeDataset', + 'SnowflakeExportCopyCommand', + 'SnowflakeImportCopyCommand', + 'SnowflakeLinkedService', + 'SnowflakeSink', + 'SnowflakeSource', 'SparkLinkedService', 'SparkObjectDataset', 'SparkSource', @@ -1735,6 +1785,7 @@ 'SqlDWSource', 'SqlMISink', 'SqlMISource', + 'SqlPartitionSettings', 'SqlServerLinkedService', 'SqlServerSink', 'SqlServerSource', @@ -1816,6 +1867,7 @@ 'XeroLinkedService', 'XeroObjectDataset', 'XeroSource', + 'ZipDeflateReadSettings', 'ZohoLinkedService', 'ZohoObjectDataset', 'ZohoSource', @@ -1882,6 +1934,7 @@ 'NetezzaPartitionOption', 'CassandraSourceReadConsistencyLevels', 'TeradataPartitionOption', + 'SqlPartitionOption', 'StoredProcedureParameterType', 'SapTablePartitionOption', 'SapHanaPartitionOption', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py index e30a4c509021..df4eaae84bb6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py @@ -433,6 +433,13 @@ class TeradataPartitionOption(str, Enum): dynamic_range = "DynamicRange" +class SqlPartitionOption(str, Enum): + + none = "None" + physical_partitions_of_table = "PhysicalPartitionsOfTable" + dynamic_range = "DynamicRange" + + class StoredProcedureParameterType(str, Enum): string = "String" @@ -466,6 +473,7 @@ class SsisPackageLocationType(str, Enum): ssisdb = "SSISDB" file = "File" inline_package = "InlinePackage" + package_store = "PackageStore" class HDInsightActivityDebugInfoOption(str, Enum): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py index 7b71e5868005..93bf2d77c2a5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py @@ -330,24 +330,25 @@ class LinkedService(Model): resource. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureFunctionLinkedService, - AzureDataExplorerLinkedService, SapTableLinkedService, - GoogleAdWordsLinkedService, OracleServiceCloudLinkedService, - DynamicsAXLinkedService, ResponsysLinkedService, - AzureDatabricksLinkedService, AzureDataLakeAnalyticsLinkedService, - HDInsightOnDemandLinkedService, SalesforceMarketingCloudLinkedService, - NetezzaLinkedService, VerticaLinkedService, ZohoLinkedService, - XeroLinkedService, SquareLinkedService, SparkLinkedService, - ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, - PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, - MarketoLinkedService, AzureMariaDBLinkedService, MariaDBLinkedService, - MagentoLinkedService, JiraLinkedService, ImpalaLinkedService, - HubspotLinkedService, HiveLinkedService, HBaseLinkedService, - GreenplumLinkedService, GoogleBigQueryLinkedService, EloquaLinkedService, - DrillLinkedService, CouchbaseLinkedService, ConcurLinkedService, - AzurePostgreSqlLinkedService, AmazonMWSLinkedService, SapHanaLinkedService, - SapBWLinkedService, SftpServerLinkedService, FtpServerLinkedService, - HttpLinkedService, AzureSearchLinkedService, CustomDataSourceLinkedService, + sub-classes are: SharePointOnlineListLinkedService, SnowflakeLinkedService, + AzureFunctionLinkedService, AzureDataExplorerLinkedService, + SapTableLinkedService, GoogleAdWordsLinkedService, + OracleServiceCloudLinkedService, DynamicsAXLinkedService, + ResponsysLinkedService, AzureDatabricksLinkedService, + AzureDataLakeAnalyticsLinkedService, HDInsightOnDemandLinkedService, + SalesforceMarketingCloudLinkedService, NetezzaLinkedService, + VerticaLinkedService, ZohoLinkedService, XeroLinkedService, + SquareLinkedService, SparkLinkedService, ShopifyLinkedService, + ServiceNowLinkedService, QuickBooksLinkedService, PrestoLinkedService, + PhoenixLinkedService, PaypalLinkedService, MarketoLinkedService, + AzureMariaDBLinkedService, MariaDBLinkedService, MagentoLinkedService, + JiraLinkedService, ImpalaLinkedService, HubspotLinkedService, + HiveLinkedService, HBaseLinkedService, GreenplumLinkedService, + GoogleBigQueryLinkedService, EloquaLinkedService, DrillLinkedService, + CouchbaseLinkedService, ConcurLinkedService, AzurePostgreSqlLinkedService, + AmazonMWSLinkedService, SapHanaLinkedService, SapBWLinkedService, + SftpServerLinkedService, FtpServerLinkedService, HttpLinkedService, + AzureSearchLinkedService, CustomDataSourceLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, SapCloudForCustomerLinkedService, SalesforceServiceCloudLinkedService, @@ -402,7 +403,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureMLService': 'AzureMLServiceLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureMLService': 'AzureMLServiceLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, **kwargs): @@ -514,7 +515,8 @@ class Dataset(Model): data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, + sub-classes are: SharePointOnlineListResourceDataset, SnowflakeDataset, + GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, OracleServiceCloudObjectDataset, DynamicsAXResourceDataset, ResponsysObjectDataset, SalesforceMarketingCloudObjectDataset, VerticaTableDataset, NetezzaTableDataset, ZohoObjectDataset, @@ -595,7 +597,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Orc': 'OrcDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'SnowflakeTable': 'SnowflakeDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Orc': 'OrcDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, **kwargs): @@ -675,10 +677,11 @@ class CopySource(Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: HttpSource, AzureBlobFSSource, AzureDataLakeStoreSource, - Office365Source, CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, - WebSource, OracleSource, AzureDataExplorerSource, HdfsSource, - FileSystemSource, RestSource, SalesforceServiceCloudSource, ODataSource, + sub-classes are: SharePointOnlineListSource, SnowflakeSource, HttpSource, + AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, + CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, WebSource, + OracleSource, AzureDataExplorerSource, HdfsSource, FileSystemSource, + RestSource, SalesforceServiceCloudSource, ODataSource, MicrosoftAccessSource, RelationalSource, CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, CosmosDbSqlApiSource, DocumentDbCollectionSource, BlobSource, TabularSource, BinarySource, @@ -717,7 +720,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'ODataSource': 'ODataSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'TabularSource': 'TabularSource', 'BinarySource': 'BinarySource', 'OrcSource': 'OrcSource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} + 'type': {'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'ODataSource': 'ODataSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'TabularSource': 'TabularSource', 'BinarySource': 'BinarySource', 'OrcSource': 'OrcSource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} } def __init__(self, **kwargs): @@ -1395,6 +1398,10 @@ class AmazonS3ReadSettings(StoreReadSettings): :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition + discovery starts from. Type: string (or Expression with resultType + string). + :type partition_root_path: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -1417,6 +1424,7 @@ class AmazonS3ReadSettings(StoreReadSettings): 'prefix': {'key': 'prefix', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } @@ -1429,6 +1437,7 @@ def __init__(self, **kwargs): self.prefix = kwargs.get('prefix', None) self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.partition_root_path = kwargs.get('partition_root_path', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) self.type = 'AmazonS3ReadSettings' @@ -1689,7 +1698,7 @@ class CopySink(Model): SalesforceSink, AzureDataExplorerSink, CommonDataServiceForAppsSink, DynamicsCrmSink, DynamicsSink, MicrosoftAccessSink, InformixSink, OdbcSink, AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, - SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, + SnowflakeSink, SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, CosmosDbSqlApiSink, DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, SapCloudForCustomerSink, AzureMySqlSink, AzurePostgreSqlSink, OrcSink, @@ -1737,7 +1746,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'OrcSink': 'OrcSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'OrcSink': 'OrcSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, **kwargs): @@ -2310,6 +2319,10 @@ class AzureBlobFSReadSettings(StoreReadSettings): :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition + discovery starts from. Type: string (or Expression with resultType + string). + :type partition_root_path: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -2331,6 +2344,7 @@ class AzureBlobFSReadSettings(StoreReadSettings): 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } @@ -2342,6 +2356,7 @@ def __init__(self, **kwargs): self.wildcard_file_name = kwargs.get('wildcard_file_name', None) self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.partition_root_path = kwargs.get('partition_root_path', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) self.type = 'AzureBlobFSReadSettings' @@ -2705,6 +2720,10 @@ class AzureBlobStorageReadSettings(StoreReadSettings): :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition + discovery starts from. Type: string (or Expression with resultType + string). + :type partition_root_path: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -2727,6 +2746,7 @@ class AzureBlobStorageReadSettings(StoreReadSettings): 'prefix': {'key': 'prefix', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } @@ -2739,6 +2759,7 @@ def __init__(self, **kwargs): self.prefix = kwargs.get('prefix', None) self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.partition_root_path = kwargs.get('partition_root_path', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) self.type = 'AzureBlobStorageReadSettings' @@ -3613,6 +3634,10 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition + discovery starts from. Type: string (or Expression with resultType + string). + :type partition_root_path: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -3634,6 +3659,7 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } @@ -3645,6 +3671,7 @@ def __init__(self, **kwargs): self.wildcard_file_name = kwargs.get('wildcard_file_name', None) self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.partition_root_path = kwargs.get('partition_root_path', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) self.type = 'AzureDataLakeStoreReadSettings' @@ -3815,14 +3842,34 @@ class AzureFileStorageLinkedService(LinkedService): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str - :param host: Required. Host name of the server. Type: string (or - Expression with resultType string). + :param host: Host name of the server. Type: string (or Expression with + resultType string). :type host: object :param user_id: User ID to logon the server. Type: string (or Expression with resultType string). :type user_id: object :param password: Password to logon the server. :type password: ~azure.mgmt.datafactory.models.SecretBase + :param connection_string: The connection string. It is mutually exclusive + with sasUri property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure File resource. It is mutually + exclusive with connectionString property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas + uri. + :type sas_token: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param file_share: The azure file share name. It is required when auth + with accountKey/sasToken. Type: string (or Expression with resultType + string). + :type file_share: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -3831,7 +3878,6 @@ class AzureFileStorageLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'host': {'required': True}, } _attribute_map = { @@ -3844,6 +3890,11 @@ class AzureFileStorageLinkedService(LinkedService): 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'file_share': {'key': 'typeProperties.fileShare', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -3852,6 +3903,11 @@ def __init__(self, **kwargs): self.host = kwargs.get('host', None) self.user_id = kwargs.get('user_id', None) self.password = kwargs.get('password', None) + self.connection_string = kwargs.get('connection_string', None) + self.account_key = kwargs.get('account_key', None) + self.sas_uri = kwargs.get('sas_uri', None) + self.sas_token = kwargs.get('sas_token', None) + self.file_share = kwargs.get('file_share', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AzureFileStorage' @@ -3914,6 +3970,9 @@ class AzureFileStorageReadSettings(StoreReadSettings): :param wildcard_file_name: Azure File Storage wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param prefix: The prefix filter for the Azure File name starting from + root path. Type: string (or Expression with resultType string). + :type prefix: object :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). @@ -3921,6 +3980,10 @@ class AzureFileStorageReadSettings(StoreReadSettings): :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition + discovery starts from. Type: string (or Expression with resultType + string). + :type partition_root_path: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -3940,8 +4003,10 @@ class AzureFileStorageReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } @@ -3951,8 +4016,10 @@ def __init__(self, **kwargs): self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.prefix = kwargs.get('prefix', None) self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.partition_root_path = kwargs.get('partition_root_path', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) self.type = 'AzureFileStorageReadSettings' @@ -5980,6 +6047,15 @@ class AzureSqlSource(TabularSource): ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param produce_additional_types: Which additional types to produce. :type produce_additional_types: object + :param partition_option: The partition mechanism that will be used for Sql + read in parallel. Possible values include: 'None', + 'PhysicalPartitionsOfTable', 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.SqlPartitionOption + :param partition_settings: The settings that will be leveraged for Sql + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -5998,6 +6074,8 @@ class AzureSqlSource(TabularSource): 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__(self, **kwargs): @@ -6006,6 +6084,8 @@ def __init__(self, **kwargs): self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) self.produce_additional_types = kwargs.get('produce_additional_types', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) self.type = 'AzureSqlSource' @@ -6478,6 +6558,72 @@ def __init__(self, **kwargs): self.type = 'Binary' +class FormatReadSettings(Model): + """Format read settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: JsonReadSettings, DelimitedTextReadSettings, + BinaryReadSettings + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'JsonReadSettings': 'JsonReadSettings', 'DelimitedTextReadSettings': 'DelimitedTextReadSettings', 'BinaryReadSettings': 'BinaryReadSettings'} + } + + def __init__(self, **kwargs): + super(FormatReadSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = None + + +class BinaryReadSettings(FormatReadSettings): + """Binary read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param compression_properties: Compression settings. + :type compression_properties: + ~azure.mgmt.datafactory.models.CompressionReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'compression_properties': {'key': 'compressionProperties', 'type': 'CompressionReadSettings'}, + } + + def __init__(self, **kwargs): + super(BinaryReadSettings, self).__init__(**kwargs) + self.compression_properties = kwargs.get('compression_properties', None) + self.type = 'BinaryReadSettings' + + class BinarySink(CopySink): """A copy activity Binary sink. @@ -6554,6 +6700,8 @@ class BinarySource(CopySource): :type type: str :param store_settings: Binary store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :param format_settings: Binary format settings. + :type format_settings: ~azure.mgmt.datafactory.models.BinaryReadSettings """ _validation = { @@ -6567,11 +6715,13 @@ class BinarySource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'BinaryReadSettings'}, } def __init__(self, **kwargs): super(BinarySource, self).__init__(**kwargs) self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) self.type = 'BinarySource' @@ -7682,6 +7832,40 @@ def __init__(self, **kwargs): self.type = 'ComponentSetup' +class CompressionReadSettings(Model): + """Compression read settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ZipDeflateReadSettings + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} + } + + def __init__(self, **kwargs): + super(CompressionReadSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = None + + class ConcurLinkedService(LinkedService): """Concur Service linked service. @@ -10503,40 +10687,6 @@ def __init__(self, **kwargs): self.type = 'DelimitedText' -class FormatReadSettings(Model): - """Format read settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DelimitedTextReadSettings - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'DelimitedTextReadSettings': 'DelimitedTextReadSettings'} - } - - def __init__(self, **kwargs): - super(FormatReadSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = None - - class DelimitedTextReadSettings(FormatReadSettings): """Delimited text read settings. @@ -10551,6 +10701,9 @@ class DelimitedTextReadSettings(FormatReadSettings): when reading data from input files. Type: integer (or Expression with resultType integer). :type skip_line_count: object + :param compression_properties: Compression settings. + :type compression_properties: + ~azure.mgmt.datafactory.models.CompressionReadSettings """ _validation = { @@ -10561,11 +10714,13 @@ class DelimitedTextReadSettings(FormatReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + 'compression_properties': {'key': 'compressionProperties', 'type': 'CompressionReadSettings'}, } def __init__(self, **kwargs): super(DelimitedTextReadSettings, self).__init__(**kwargs) self.skip_line_count = kwargs.get('skip_line_count', None) + self.compression_properties = kwargs.get('compression_properties', None) self.type = 'DelimitedTextReadSettings' @@ -11379,6 +11534,12 @@ class DynamicsAXSource(TabularSource): :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP + response. It is the timeout to get a response, not the timeout to read + response data. Default value: 00:05:00. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object """ _validation = { @@ -11394,11 +11555,13 @@ class DynamicsAXSource(TabularSource): 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } def __init__(self, **kwargs): super(DynamicsAXSource, self).__init__(**kwargs) self.query = kwargs.get('query', None) + self.http_request_timeout = kwargs.get('http_request_timeout', None) self.type = 'DynamicsAXSource' @@ -12520,6 +12683,40 @@ def __init__(self, **kwargs): self.type = 'ExecuteSSISPackage' +class ExportSettings(Model): + """Export command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SnowflakeExportCopyCommand + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} + } + + def __init__(self, **kwargs): + super(ExportSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = None + + class ExposureControlRequest(Model): """The exposure control request. @@ -13068,12 +13265,20 @@ class FileServerReadSettings(StoreReadSettings): :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition + discovery starts from. Type: string (or Expression with resultType + string). + :type partition_root_path: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_end: object + :param file_filter: Specify a filter to be used to select a subset of + files in the folderPath rather than all files. Type: string (or Expression + with resultType string). + :type file_filter: object """ _validation = { @@ -13089,8 +13294,10 @@ class FileServerReadSettings(StoreReadSettings): 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + 'file_filter': {'key': 'fileFilter', 'type': 'object'}, } def __init__(self, **kwargs): @@ -13100,8 +13307,10 @@ def __init__(self, **kwargs): self.wildcard_file_name = kwargs.get('wildcard_file_name', None) self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.partition_root_path = kwargs.get('partition_root_path', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.file_filter = kwargs.get('file_filter', None) self.type = 'FileServerReadSettings' @@ -13468,6 +13677,13 @@ class FtpReadSettings(StoreReadSettings): :param wildcard_file_name: Ftp wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition + discovery starts from. Type: string (or Expression with resultType + string). + :type partition_root_path: object :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). @@ -13488,6 +13704,8 @@ class FtpReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, } @@ -13497,6 +13715,8 @@ def __init__(self, **kwargs): self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.partition_root_path = kwargs.get('partition_root_path', None) self.file_list_path = kwargs.get('file_list_path', None) self.use_binary_transfer = kwargs.get('use_binary_transfer', None) self.type = 'FtpReadSettings' @@ -14367,6 +14587,10 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition + discovery starts from. Type: string (or Expression with resultType + string). + :type partition_root_path: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -14389,6 +14613,7 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): 'prefix': {'key': 'prefix', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } @@ -14401,6 +14626,7 @@ def __init__(self, **kwargs): self.prefix = kwargs.get('prefix', None) self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.partition_root_path = kwargs.get('partition_root_path', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) self.type = 'GoogleCloudStorageReadSettings' @@ -14941,6 +15167,10 @@ class HdfsReadSettings(StoreReadSettings): :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition + discovery starts from. Type: string (or Expression with resultType + string). + :type partition_root_path: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -14964,6 +15194,7 @@ class HdfsReadSettings(StoreReadSettings): 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, @@ -14976,6 +15207,7 @@ def __init__(self, **kwargs): self.wildcard_file_name = kwargs.get('wildcard_file_name', None) self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.partition_root_path = kwargs.get('partition_root_path', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) self.distcp_settings = kwargs.get('distcp_settings', None) @@ -16254,6 +16486,13 @@ class HttpReadSettings(StoreReadSettings): :param request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP server. :type request_timeout: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition + discovery starts from. Type: string (or Expression with resultType + string). + :type partition_root_path: object """ _validation = { @@ -16268,6 +16507,8 @@ class HttpReadSettings(StoreReadSettings): 'request_body': {'key': 'requestBody', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, } def __init__(self, **kwargs): @@ -16276,6 +16517,8 @@ def __init__(self, **kwargs): self.request_body = kwargs.get('request_body', None) self.additional_headers = kwargs.get('additional_headers', None) self.request_timeout = kwargs.get('request_timeout', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.partition_root_path = kwargs.get('partition_root_path', None) self.type = 'HttpReadSettings' @@ -16857,6 +17100,40 @@ def __init__(self, **kwargs): self.type = 'ImpalaSource' +class ImportSettings(Model): + """Import command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SnowflakeImportCopyCommand + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} + } + + def __init__(self, **kwargs): + super(ImportSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = None + + class InformixLinkedService(LinkedService): """Informix linked service. @@ -17660,6 +17937,8 @@ class IntegrationRuntimeSsisProperties(Model): properties for a SSIS integration runtime. :type express_custom_setup_properties: list[~azure.mgmt.datafactory.models.CustomSetupBase] + :param package_stores: Package stores for the SSIS Integration Runtime. + :type package_stores: list[~azure.mgmt.datafactory.models.PackageStore] """ _attribute_map = { @@ -17670,6 +17949,7 @@ class IntegrationRuntimeSsisProperties(Model): 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, 'edition': {'key': 'edition', 'type': 'str'}, 'express_custom_setup_properties': {'key': 'expressCustomSetupProperties', 'type': '[CustomSetupBase]'}, + 'package_stores': {'key': 'packageStores', 'type': '[PackageStore]'}, } def __init__(self, **kwargs): @@ -17681,6 +17961,7 @@ def __init__(self, **kwargs): self.data_proxy_properties = kwargs.get('data_proxy_properties', None) self.edition = kwargs.get('edition', None) self.express_custom_setup_properties = kwargs.get('express_custom_setup_properties', None) + self.package_stores = kwargs.get('package_stores', None) class IntegrationRuntimeStatus(Model): @@ -18169,24 +18450,55 @@ def __init__(self, **kwargs): self.type = 'JsonFormat' -class JsonSink(CopySink): - """A copy activity Json sink. +class JsonReadSettings(FormatReadSettings): + """Json read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object + :param type: Required. Constant filled by server. + :type type: str + :param compression_properties: Compression settings. + :type compression_properties: + ~azure.mgmt.datafactory.models.CompressionReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'compression_properties': {'key': 'compressionProperties', 'type': 'CompressionReadSettings'}, + } + + def __init__(self, **kwargs): + super(JsonReadSettings, self).__init__(**kwargs) + self.compression_properties = kwargs.get('compression_properties', None) + self.type = 'JsonReadSettings' + + +class JsonSink(CopySink): + """A copy activity Json sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). @@ -18249,6 +18561,8 @@ class JsonSource(CopySource): :type type: str :param store_settings: Json store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :param format_settings: Json format settings. + :type format_settings: ~azure.mgmt.datafactory.models.JsonReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). @@ -18267,12 +18581,14 @@ class JsonSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'JsonReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(JsonSource, self).__init__(**kwargs) self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) self.additional_columns = kwargs.get('additional_columns', None) self.type = 'JsonSource' @@ -20801,6 +21117,12 @@ class ODataSource(CopySource): :param query: OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP + response. It is the timeout to get a response, not the timeout to read + response data. Default value: 00:05:00. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). @@ -20819,12 +21141,14 @@ class ODataSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(ODataSource, self).__init__(**kwargs) self.query = kwargs.get('query', None) + self.http_request_timeout = kwargs.get('http_request_timeout', None) self.additional_columns = kwargs.get('additional_columns', None) self.type = 'ODataSource' @@ -22181,6 +22505,35 @@ def __init__(self, **kwargs): self.type = 'OrcSource' +class PackageStore(Model): + """Package store for the SSIS integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The name of the package store + :type name: str + :param package_store_linked_service: Required. The package store linked + service reference. + :type package_store_linked_service: + ~azure.mgmt.datafactory.models.EntityReference + """ + + _validation = { + 'name': {'required': True}, + 'package_store_linked_service': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'package_store_linked_service': {'key': 'packageStoreLinkedService', 'type': 'EntityReference'}, + } + + def __init__(self, **kwargs): + super(PackageStore, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.package_store_linked_service = kwargs.get('package_store_linked_service', None) + + class ParameterSpecification(Model): """Definition of a single parameter for an entity. @@ -25787,6 +26140,12 @@ class SapCloudForCustomerSink(CopySink): 'Insert'. Possible values include: 'Insert', 'Update' :type write_behavior: str or ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP + response. It is the timeout to get a response, not the timeout to read + response data. Default value: 00:05:00. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object """ _validation = { @@ -25802,11 +26161,13 @@ class SapCloudForCustomerSink(CopySink): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } def __init__(self, **kwargs): super(SapCloudForCustomerSink, self).__init__(**kwargs) self.write_behavior = kwargs.get('write_behavior', None) + self.http_request_timeout = kwargs.get('http_request_timeout', None) self.type = 'SapCloudForCustomerSink' @@ -25843,6 +26204,12 @@ class SapCloudForCustomerSource(TabularSource): :param query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP + response. It is the timeout to get a response, not the timeout to read + response data. Default value: 00:05:00. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object """ _validation = { @@ -25858,11 +26225,13 @@ class SapCloudForCustomerSource(TabularSource): 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } def __init__(self, **kwargs): super(SapCloudForCustomerSource, self).__init__(**kwargs) self.query = kwargs.get('query', None) + self.http_request_timeout = kwargs.get('http_request_timeout', None) self.type = 'SapCloudForCustomerSource' @@ -26024,6 +26393,12 @@ class SapEccSource(TabularSource): :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP + response. It is the timeout to get a response, not the timeout to read + response data. Default value: 00:05:00. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object """ _validation = { @@ -26039,11 +26414,13 @@ class SapEccSource(TabularSource): 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } def __init__(self, **kwargs): super(SapEccSource, self).__init__(**kwargs) self.query = kwargs.get('query', None) + self.http_request_timeout = kwargs.get('http_request_timeout', None) self.type = 'SapEccSource' @@ -26071,8 +26448,8 @@ class SapHanaLinkedService(LinkedService): :param connection_string: SAP HANA ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object - :param server: Required. Host name of the SAP HANA server. Type: string - (or Expression with resultType string). + :param server: Host name of the SAP HANA server. Type: string (or + Expression with resultType string). :type server: object :param authentication_type: The authentication type to be used to connect to the SAP HANA server. Possible values include: 'Basic', 'Windows' @@ -26091,7 +26468,6 @@ class SapHanaLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'server': {'required': True}, } _attribute_map = { @@ -27630,6 +28006,13 @@ class SftpReadSettings(StoreReadSettings): :param wildcard_file_name: Sftp wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition + discovery starts from. Type: string (or Expression with resultType + string). + :type partition_root_path: object :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). @@ -27653,6 +28036,8 @@ class SftpReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, @@ -27663,6 +28048,8 @@ def __init__(self, **kwargs): self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.partition_root_path = kwargs.get('partition_root_path', None) self.file_list_path = kwargs.get('file_list_path', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) @@ -27822,6 +28209,195 @@ def __init__(self, **kwargs): self.type = 'SftpWriteSettings' +class SharePointOnlineListLinkedService(LinkedService): + """SharePoint Online List linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param site_url: Required. The URL of the SharePoint Online site. For + example, https://contoso.sharepoint.com/sites/siteName. Type: string (or + Expression with resultType string). + :type site_url: object + :param tenant_id: Required. The tenant ID under which your application + resides. You can find it from Azure portal Active Directory overview page. + Type: string (or Expression with resultType string). + :type tenant_id: object + :param service_principal_id: Required. The application (client) ID of your + application registered in Azure Active Directory. Make sure to grant + SharePoint site permission to this application. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. The client secret of your + application registered in Azure Active Directory. Type: string (or + Expression with resultType string). + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'site_url': {'required': True}, + 'tenant_id': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'site_url': {'key': 'typeProperties.siteUrl', 'type': 'object'}, + 'tenant_id': {'key': 'typeProperties.tenantId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SharePointOnlineListLinkedService, self).__init__(**kwargs) + self.site_url = kwargs.get('site_url', None) + self.tenant_id = kwargs.get('tenant_id', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SharePointOnlineList' + + +class SharePointOnlineListResourceDataset(Dataset): + """The sharepoint online list resource dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param list_name: The name of the SharePoint Online list. Type: string (or + Expression with resultType string). + :type list_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'list_name': {'key': 'typeProperties.listName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SharePointOnlineListResourceDataset, self).__init__(**kwargs) + self.list_name = kwargs.get('list_name', None) + self.type = 'SharePointOnlineListResource' + + +class SharePointOnlineListSource(CopySource): + """A copy activity source for sharePoint online list source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: The OData query to filter the data in SharePoint Online + list. For example, "$top=1". Type: string (or Expression with resultType + string). + :type query: object + :param http_request_timeout: The wait time to get a response from + SharePoint Online. Default value is 5 minutes (00:05:00). Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SharePointOnlineListSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.http_request_timeout = kwargs.get('http_request_timeout', None) + self.type = 'SharePointOnlineListSource' + + class ShopifyLinkedService(LinkedService): """Shopify Service linked service. @@ -28035,22 +28611,339 @@ def __init__(self, **kwargs): self.data_inconsistency = kwargs.get('data_inconsistency', None) -class SparkLinkedService(LinkedService): - """Spark Server linked service. +class SnowflakeDataset(Dataset): + """The snowflake dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Dataset description. :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param snowflake_dataset_schema: The schema name of the Snowflake + database. Type: string (or Expression with resultType string). + :type snowflake_dataset_schema: object + :param table: The table name of the Snowflake database. Type: string (or + Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'snowflake_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SnowflakeDataset, self).__init__(**kwargs) + self.snowflake_dataset_schema = kwargs.get('snowflake_dataset_schema', None) + self.table = kwargs.get('table', None) + self.type = 'SnowflakeTable' + + +class SnowflakeExportCopyCommand(ExportSettings): + """Snowflake export command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param additional_copy_options: Additional copy options directly passed to + snowflake Copy Command. Type: key value pairs (value should be string + type) (or Expression with resultType object). Example: + "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": + "'HH24:MI:SS.FF'" } + :type additional_copy_options: dict[str, object] + :param additional_format_options: Additional format options directly + passed to snowflake Copy Command. Type: key value pairs (value should be + string type) (or Expression with resultType object). Example: + "additionalFormatOptions": { "OVERWRITE": "TRUE", "MAX_FILE_SIZE": + "'FALSE'" } + :type additional_format_options: dict[str, object] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'additional_copy_options': {'key': 'additionalCopyOptions', 'type': '{object}'}, + 'additional_format_options': {'key': 'additionalFormatOptions', 'type': '{object}'}, + } + + def __init__(self, **kwargs): + super(SnowflakeExportCopyCommand, self).__init__(**kwargs) + self.additional_copy_options = kwargs.get('additional_copy_options', None) + self.additional_format_options = kwargs.get('additional_format_options', None) + self.type = 'SnowflakeExportCopyCommand' + + +class SnowflakeImportCopyCommand(ImportSettings): + """Snowflake import command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param additional_copy_options: Additional copy options directly passed to + snowflake Copy Command. Type: key value pairs (value should be string + type) (or Expression with resultType object). Example: + "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": + "'HH24:MI:SS.FF'" } + :type additional_copy_options: dict[str, object] + :param additional_format_options: Additional format options directly + passed to snowflake Copy Command. Type: key value pairs (value should be + string type) (or Expression with resultType object). Example: + "additionalFormatOptions": { "FORCE": "TRUE", "LOAD_UNCERTAIN_FILES": + "'FALSE'" } + :type additional_format_options: dict[str, object] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'additional_copy_options': {'key': 'additionalCopyOptions', 'type': '{object}'}, + 'additional_format_options': {'key': 'additionalFormatOptions', 'type': '{object}'}, + } + + def __init__(self, **kwargs): + super(SnowflakeImportCopyCommand, self).__init__(**kwargs) + self.additional_copy_options = kwargs.get('additional_copy_options', None) + self.additional_format_options = kwargs.get('additional_format_options', None) + self.type = 'SnowflakeImportCopyCommand' + + +class SnowflakeLinkedService(LinkedService): + """Snowflake linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string of snowflake. + Type: string, SecureString. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SnowflakeLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Snowflake' + + +class SnowflakeSink(CopySink): + """A copy activity snowflake sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param import_settings: Snowflake import settings. + :type import_settings: + ~azure.mgmt.datafactory.models.SnowflakeImportCopyCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'import_settings': {'key': 'importSettings', 'type': 'SnowflakeImportCopyCommand'}, + } + + def __init__(self, **kwargs): + super(SnowflakeSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.import_settings = kwargs.get('import_settings', None) + self.type = 'SnowflakeSink' + + +class SnowflakeSource(CopySource): + """A copy activity snowflake source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Snowflake Sql query. Type: string (or Expression with + resultType string). + :type query: object + :param export_settings: Snowflake export settings. + :type export_settings: + ~azure.mgmt.datafactory.models.SnowflakeExportCopyCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'export_settings': {'key': 'exportSettings', 'type': 'SnowflakeExportCopyCommand'}, + } + + def __init__(self, **kwargs): + super(SnowflakeSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.export_settings = kwargs.get('export_settings', None) + self.type = 'SnowflakeSource' + + +class SparkLinkedService(LinkedService): + """Spark Server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] @@ -28404,6 +29297,15 @@ class SqlDWSource(TabularSource): Type: object (or Expression with resultType object), itemType: StoredProcedureParameter. :type stored_procedure_parameters: object + :param partition_option: The partition mechanism that will be used for Sql + read in parallel. Possible values include: 'None', + 'PhysicalPartitionsOfTable', 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.SqlPartitionOption + :param partition_settings: The settings that will be leveraged for Sql + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -28421,6 +29323,8 @@ class SqlDWSource(TabularSource): 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__(self, **kwargs): @@ -28428,6 +29332,8 @@ def __init__(self, **kwargs): self.sql_reader_query = kwargs.get('sql_reader_query', None) self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) self.type = 'SqlDWSource' @@ -28555,6 +29461,15 @@ class SqlMISource(TabularSource): ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param produce_additional_types: Which additional types to produce. :type produce_additional_types: object + :param partition_option: The partition mechanism that will be used for Sql + read in parallel. Possible values include: 'None', + 'PhysicalPartitionsOfTable', 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.SqlPartitionOption + :param partition_settings: The settings that will be leveraged for Sql + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -28573,6 +29488,8 @@ class SqlMISource(TabularSource): 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__(self, **kwargs): @@ -28581,9 +29498,42 @@ def __init__(self, **kwargs): self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) self.produce_additional_types = kwargs.get('produce_additional_types', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) self.type = 'SqlMISource' +class SqlPartitionSettings(Model): + """The settings that will be leveraged for Sql source partitioning. + + :param partition_column_name: The name of the column in integer or + datetime type that will be used for proceeding partitioning. If not + specified, the primary key of the table is auto-detected and used as the + partition column. Type: string (or Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) + + class SqlServerLinkedService(LinkedService): """SQL Server linked service. @@ -28770,6 +29720,15 @@ class SqlServerSource(TabularSource): ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param produce_additional_types: Which additional types to produce. :type produce_additional_types: object + :param partition_option: The partition mechanism that will be used for Sql + read in parallel. Possible values include: 'None', + 'PhysicalPartitionsOfTable', 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.SqlPartitionOption + :param partition_settings: The settings that will be leveraged for Sql + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -28788,6 +29747,8 @@ class SqlServerSource(TabularSource): 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__(self, **kwargs): @@ -28796,6 +29757,8 @@ def __init__(self, **kwargs): self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) self.produce_additional_types = kwargs.get('produce_additional_types', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) self.type = 'SqlServerSource' @@ -29055,6 +30018,15 @@ class SqlSource(TabularSource): default value is ReadCommitted. Type: string (or Expression with resultType string). :type isolation_level: object + :param partition_option: The partition mechanism that will be used for Sql + read in parallel. Possible values include: 'None', + 'PhysicalPartitionsOfTable', 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.SqlPartitionOption + :param partition_settings: The settings that will be leveraged for Sql + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -29073,6 +30045,8 @@ class SqlSource(TabularSource): 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'isolation_level': {'key': 'isolationLevel', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__(self, **kwargs): @@ -29081,6 +30055,8 @@ def __init__(self, **kwargs): self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) self.isolation_level = kwargs.get('isolation_level', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) self.type = 'SqlSource' @@ -29698,7 +30674,7 @@ class SSISPackageLocation(Model): with resultType string). :type package_path: object :param type: The type of SSIS package location. Possible values include: - 'SSISDB', 'File', 'InlinePackage' + 'SSISDB', 'File', 'InlinePackage', 'PackageStore' :type type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType :param package_password: Password of the package. :type package_password: ~azure.mgmt.datafactory.models.SecretBase @@ -29708,6 +30684,10 @@ class SSISPackageLocation(Model): :param configuration_path: The configuration file of the package execution. Type: string (or Expression with resultType string). :type configuration_path: object + :param configuration_access_credential: The configuration file access + credential. + :type configuration_access_credential: + ~azure.mgmt.datafactory.models.SSISAccessCredential :param package_name: The package name. :type package_name: str :param package_content: The embedded package content. Type: string (or @@ -29727,6 +30707,7 @@ class SSISPackageLocation(Model): 'package_password': {'key': 'typeProperties.packagePassword', 'type': 'SecretBase'}, 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, 'configuration_path': {'key': 'typeProperties.configurationPath', 'type': 'object'}, + 'configuration_access_credential': {'key': 'typeProperties.configurationAccessCredential', 'type': 'SSISAccessCredential'}, 'package_name': {'key': 'typeProperties.packageName', 'type': 'str'}, 'package_content': {'key': 'typeProperties.packageContent', 'type': 'object'}, 'package_last_modified_date': {'key': 'typeProperties.packageLastModifiedDate', 'type': 'str'}, @@ -29740,6 +30721,7 @@ def __init__(self, **kwargs): self.package_password = kwargs.get('package_password', None) self.access_credential = kwargs.get('access_credential', None) self.configuration_path = kwargs.get('configuration_path', None) + self.configuration_access_credential = kwargs.get('configuration_access_credential', None) self.package_name = kwargs.get('package_name', None) self.package_content = kwargs.get('package_content', None) self.package_last_modified_date = kwargs.get('package_last_modified_date', None) @@ -32254,6 +33236,37 @@ def __init__(self, **kwargs): self.type = 'XeroSource' +class ZipDeflateReadSettings(CompressionReadSettings): + """The ZipDeflate compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param preserve_zip_file_name_as_folder: Preserve the zip file name as + folder path. Type: boolean (or Expression with resultType boolean). + :type preserve_zip_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_zip_file_name_as_folder': {'key': 'preserveZipFileNameAsFolder', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ZipDeflateReadSettings, self).__init__(**kwargs) + self.preserve_zip_file_name_as_folder = kwargs.get('preserve_zip_file_name_as_folder', None) + self.type = 'ZipDeflateReadSettings' + + class ZohoLinkedService(LinkedService): """Zoho server linked service. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py index 170bbebf3deb..150f10a1ca0b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -330,24 +330,25 @@ class LinkedService(Model): resource. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureFunctionLinkedService, - AzureDataExplorerLinkedService, SapTableLinkedService, - GoogleAdWordsLinkedService, OracleServiceCloudLinkedService, - DynamicsAXLinkedService, ResponsysLinkedService, - AzureDatabricksLinkedService, AzureDataLakeAnalyticsLinkedService, - HDInsightOnDemandLinkedService, SalesforceMarketingCloudLinkedService, - NetezzaLinkedService, VerticaLinkedService, ZohoLinkedService, - XeroLinkedService, SquareLinkedService, SparkLinkedService, - ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, - PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, - MarketoLinkedService, AzureMariaDBLinkedService, MariaDBLinkedService, - MagentoLinkedService, JiraLinkedService, ImpalaLinkedService, - HubspotLinkedService, HiveLinkedService, HBaseLinkedService, - GreenplumLinkedService, GoogleBigQueryLinkedService, EloquaLinkedService, - DrillLinkedService, CouchbaseLinkedService, ConcurLinkedService, - AzurePostgreSqlLinkedService, AmazonMWSLinkedService, SapHanaLinkedService, - SapBWLinkedService, SftpServerLinkedService, FtpServerLinkedService, - HttpLinkedService, AzureSearchLinkedService, CustomDataSourceLinkedService, + sub-classes are: SharePointOnlineListLinkedService, SnowflakeLinkedService, + AzureFunctionLinkedService, AzureDataExplorerLinkedService, + SapTableLinkedService, GoogleAdWordsLinkedService, + OracleServiceCloudLinkedService, DynamicsAXLinkedService, + ResponsysLinkedService, AzureDatabricksLinkedService, + AzureDataLakeAnalyticsLinkedService, HDInsightOnDemandLinkedService, + SalesforceMarketingCloudLinkedService, NetezzaLinkedService, + VerticaLinkedService, ZohoLinkedService, XeroLinkedService, + SquareLinkedService, SparkLinkedService, ShopifyLinkedService, + ServiceNowLinkedService, QuickBooksLinkedService, PrestoLinkedService, + PhoenixLinkedService, PaypalLinkedService, MarketoLinkedService, + AzureMariaDBLinkedService, MariaDBLinkedService, MagentoLinkedService, + JiraLinkedService, ImpalaLinkedService, HubspotLinkedService, + HiveLinkedService, HBaseLinkedService, GreenplumLinkedService, + GoogleBigQueryLinkedService, EloquaLinkedService, DrillLinkedService, + CouchbaseLinkedService, ConcurLinkedService, AzurePostgreSqlLinkedService, + AmazonMWSLinkedService, SapHanaLinkedService, SapBWLinkedService, + SftpServerLinkedService, FtpServerLinkedService, HttpLinkedService, + AzureSearchLinkedService, CustomDataSourceLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, SapCloudForCustomerLinkedService, SalesforceServiceCloudLinkedService, @@ -402,7 +403,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureMLService': 'AzureMLServiceLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureMLService': 'AzureMLServiceLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: @@ -514,7 +515,8 @@ class Dataset(Model): data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, + sub-classes are: SharePointOnlineListResourceDataset, SnowflakeDataset, + GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, OracleServiceCloudObjectDataset, DynamicsAXResourceDataset, ResponsysObjectDataset, SalesforceMarketingCloudObjectDataset, VerticaTableDataset, NetezzaTableDataset, ZohoObjectDataset, @@ -595,7 +597,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Orc': 'OrcDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'SnowflakeTable': 'SnowflakeDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Orc': 'OrcDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: @@ -675,10 +677,11 @@ class CopySource(Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: HttpSource, AzureBlobFSSource, AzureDataLakeStoreSource, - Office365Source, CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, - WebSource, OracleSource, AzureDataExplorerSource, HdfsSource, - FileSystemSource, RestSource, SalesforceServiceCloudSource, ODataSource, + sub-classes are: SharePointOnlineListSource, SnowflakeSource, HttpSource, + AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, + CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, WebSource, + OracleSource, AzureDataExplorerSource, HdfsSource, FileSystemSource, + RestSource, SalesforceServiceCloudSource, ODataSource, MicrosoftAccessSource, RelationalSource, CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, CosmosDbSqlApiSource, DocumentDbCollectionSource, BlobSource, TabularSource, BinarySource, @@ -717,7 +720,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'ODataSource': 'ODataSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'TabularSource': 'TabularSource', 'BinarySource': 'BinarySource', 'OrcSource': 'OrcSource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} + 'type': {'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'ODataSource': 'ODataSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'TabularSource': 'TabularSource', 'BinarySource': 'BinarySource', 'OrcSource': 'OrcSource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: @@ -1395,6 +1398,10 @@ class AmazonS3ReadSettings(StoreReadSettings): :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition + discovery starts from. Type: string (or Expression with resultType + string). + :type partition_root_path: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -1417,11 +1424,12 @@ class AmazonS3ReadSettings(StoreReadSettings): 'prefix': {'key': 'prefix', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, file_list_path=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, file_list_path=None, enable_partition_discovery: bool=None, partition_root_path=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -1429,6 +1437,7 @@ def __init__(self, *, additional_properties=None, max_concurrent_connections=Non self.prefix = prefix self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery + self.partition_root_path = partition_root_path self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end self.type = 'AmazonS3ReadSettings' @@ -1689,7 +1698,7 @@ class CopySink(Model): SalesforceSink, AzureDataExplorerSink, CommonDataServiceForAppsSink, DynamicsCrmSink, DynamicsSink, MicrosoftAccessSink, InformixSink, OdbcSink, AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, - SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, + SnowflakeSink, SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, CosmosDbSqlApiSink, DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, SapCloudForCustomerSink, AzureMySqlSink, AzurePostgreSqlSink, OrcSink, @@ -1737,7 +1746,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'OrcSink': 'OrcSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'OrcSink': 'OrcSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: @@ -2310,6 +2319,10 @@ class AzureBlobFSReadSettings(StoreReadSettings): :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition + discovery starts from. Type: string (or Expression with resultType + string). + :type partition_root_path: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -2331,17 +2344,19 @@ class AzureBlobFSReadSettings(StoreReadSettings): 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, enable_partition_discovery: bool=None, partition_root_path=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(AzureBlobFSReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery + self.partition_root_path = partition_root_path self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end self.type = 'AzureBlobFSReadSettings' @@ -2705,6 +2720,10 @@ class AzureBlobStorageReadSettings(StoreReadSettings): :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition + discovery starts from. Type: string (or Expression with resultType + string). + :type partition_root_path: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -2727,11 +2746,12 @@ class AzureBlobStorageReadSettings(StoreReadSettings): 'prefix': {'key': 'prefix', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, file_list_path=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, file_list_path=None, enable_partition_discovery: bool=None, partition_root_path=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -2739,6 +2759,7 @@ def __init__(self, *, additional_properties=None, max_concurrent_connections=Non self.prefix = prefix self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery + self.partition_root_path = partition_root_path self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end self.type = 'AzureBlobStorageReadSettings' @@ -3613,6 +3634,10 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition + discovery starts from. Type: string (or Expression with resultType + string). + :type partition_root_path: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -3634,17 +3659,19 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, enable_partition_discovery: bool=None, partition_root_path=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery + self.partition_root_path = partition_root_path self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end self.type = 'AzureDataLakeStoreReadSettings' @@ -3815,14 +3842,34 @@ class AzureFileStorageLinkedService(LinkedService): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str - :param host: Required. Host name of the server. Type: string (or - Expression with resultType string). + :param host: Host name of the server. Type: string (or Expression with + resultType string). :type host: object :param user_id: User ID to logon the server. Type: string (or Expression with resultType string). :type user_id: object :param password: Password to logon the server. :type password: ~azure.mgmt.datafactory.models.SecretBase + :param connection_string: The connection string. It is mutually exclusive + with sasUri property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure File resource. It is mutually + exclusive with connectionString property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas + uri. + :type sas_token: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param file_share: The azure file share name. It is required when auth + with accountKey/sasToken. Type: string (or Expression with resultType + string). + :type file_share: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -3831,7 +3878,6 @@ class AzureFileStorageLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'host': {'required': True}, } _attribute_map = { @@ -3844,14 +3890,24 @@ class AzureFileStorageLinkedService(LinkedService): 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'file_share': {'key': 'typeProperties.fileShare', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_id=None, password=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host=None, user_id=None, password=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, file_share=None, encrypted_credential=None, **kwargs) -> None: super(AzureFileStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.host = host self.user_id = user_id self.password = password + self.connection_string = connection_string + self.account_key = account_key + self.sas_uri = sas_uri + self.sas_token = sas_token + self.file_share = file_share self.encrypted_credential = encrypted_credential self.type = 'AzureFileStorage' @@ -3914,6 +3970,9 @@ class AzureFileStorageReadSettings(StoreReadSettings): :param wildcard_file_name: Azure File Storage wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param prefix: The prefix filter for the Azure File name starting from + root path. Type: string (or Expression with resultType string). + :type prefix: object :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). @@ -3921,6 +3980,10 @@ class AzureFileStorageReadSettings(StoreReadSettings): :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition + discovery starts from. Type: string (or Expression with resultType + string). + :type partition_root_path: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -3940,19 +4003,23 @@ class AzureFileStorageReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, file_list_path=None, enable_partition_discovery: bool=None, partition_root_path=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(AzureFileStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name + self.prefix = prefix self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery + self.partition_root_path = partition_root_path self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end self.type = 'AzureFileStorageReadSettings' @@ -5980,6 +6047,15 @@ class AzureSqlSource(TabularSource): ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param produce_additional_types: Which additional types to produce. :type produce_additional_types: object + :param partition_option: The partition mechanism that will be used for Sql + read in parallel. Possible values include: 'None', + 'PhysicalPartitionsOfTable', 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.SqlPartitionOption + :param partition_settings: The settings that will be leveraged for Sql + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -5998,14 +6074,18 @@ class AzureSqlSource(TabularSource): 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, partition_option=None, partition_settings=None, **kwargs) -> None: super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters self.produce_additional_types = produce_additional_types + self.partition_option = partition_option + self.partition_settings = partition_settings self.type = 'AzureSqlSource' @@ -6478,6 +6558,72 @@ def __init__(self, *, linked_service_name, location, additional_properties=None, self.type = 'Binary' +class FormatReadSettings(Model): + """Format read settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: JsonReadSettings, DelimitedTextReadSettings, + BinaryReadSettings + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'JsonReadSettings': 'JsonReadSettings', 'DelimitedTextReadSettings': 'DelimitedTextReadSettings', 'BinaryReadSettings': 'BinaryReadSettings'} + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(FormatReadSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = None + + +class BinaryReadSettings(FormatReadSettings): + """Binary read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param compression_properties: Compression settings. + :type compression_properties: + ~azure.mgmt.datafactory.models.CompressionReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'compression_properties': {'key': 'compressionProperties', 'type': 'CompressionReadSettings'}, + } + + def __init__(self, *, additional_properties=None, compression_properties=None, **kwargs) -> None: + super(BinaryReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.compression_properties = compression_properties + self.type = 'BinaryReadSettings' + + class BinarySink(CopySink): """A copy activity Binary sink. @@ -6554,6 +6700,8 @@ class BinarySource(CopySource): :type type: str :param store_settings: Binary store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :param format_settings: Binary format settings. + :type format_settings: ~azure.mgmt.datafactory.models.BinaryReadSettings """ _validation = { @@ -6567,11 +6715,13 @@ class BinarySource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'BinaryReadSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings + self.format_settings = format_settings self.type = 'BinarySource' @@ -7682,6 +7832,40 @@ def __init__(self, *, component_name: str, license_key=None, **kwargs) -> None: self.type = 'ComponentSetup' +class CompressionReadSettings(Model): + """Compression read settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ZipDeflateReadSettings + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(CompressionReadSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = None + + class ConcurLinkedService(LinkedService): """Concur Service linked service. @@ -10503,40 +10687,6 @@ def __init__(self, *, linked_service_name, location, additional_properties=None, self.type = 'DelimitedText' -class FormatReadSettings(Model): - """Format read settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DelimitedTextReadSettings - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'DelimitedTextReadSettings': 'DelimitedTextReadSettings'} - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(FormatReadSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = None - - class DelimitedTextReadSettings(FormatReadSettings): """Delimited text read settings. @@ -10551,6 +10701,9 @@ class DelimitedTextReadSettings(FormatReadSettings): when reading data from input files. Type: integer (or Expression with resultType integer). :type skip_line_count: object + :param compression_properties: Compression settings. + :type compression_properties: + ~azure.mgmt.datafactory.models.CompressionReadSettings """ _validation = { @@ -10561,11 +10714,13 @@ class DelimitedTextReadSettings(FormatReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + 'compression_properties': {'key': 'compressionProperties', 'type': 'CompressionReadSettings'}, } - def __init__(self, *, additional_properties=None, skip_line_count=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, skip_line_count=None, compression_properties=None, **kwargs) -> None: super(DelimitedTextReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.skip_line_count = skip_line_count + self.compression_properties = compression_properties self.type = 'DelimitedTextReadSettings' @@ -11379,6 +11534,12 @@ class DynamicsAXSource(TabularSource): :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP + response. It is the timeout to get a response, not the timeout to read + response data. Default value: 00:05:00. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object """ _validation = { @@ -11394,11 +11555,13 @@ class DynamicsAXSource(TabularSource): 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, http_request_timeout=None, **kwargs) -> None: super(DynamicsAXSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query + self.http_request_timeout = http_request_timeout self.type = 'DynamicsAXSource' @@ -12520,6 +12683,40 @@ def __init__(self, *, name: str, package_location, connect_via, additional_prope self.type = 'ExecuteSSISPackage' +class ExportSettings(Model): + """Export command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SnowflakeExportCopyCommand + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(ExportSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = None + + class ExposureControlRequest(Model): """The exposure control request. @@ -13068,12 +13265,20 @@ class FileServerReadSettings(StoreReadSettings): :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition + discovery starts from. Type: string (or Expression with resultType + string). + :type partition_root_path: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_end: object + :param file_filter: Specify a filter to be used to select a subset of + files in the folderPath rather than all files. Type: string (or Expression + with resultType string). + :type file_filter: object """ _validation = { @@ -13089,19 +13294,23 @@ class FileServerReadSettings(StoreReadSettings): 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + 'file_filter': {'key': 'fileFilter', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, enable_partition_discovery: bool=None, partition_root_path=None, modified_datetime_start=None, modified_datetime_end=None, file_filter=None, **kwargs) -> None: super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery + self.partition_root_path = partition_root_path self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end + self.file_filter = file_filter self.type = 'FileServerReadSettings' @@ -13468,6 +13677,13 @@ class FtpReadSettings(StoreReadSettings): :param wildcard_file_name: Ftp wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition + discovery starts from. Type: string (or Expression with resultType + string). + :type partition_root_path: object :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). @@ -13488,15 +13704,19 @@ class FtpReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, use_binary_transfer: bool=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, partition_root_path=None, file_list_path=None, use_binary_transfer: bool=None, **kwargs) -> None: super(FtpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.partition_root_path = partition_root_path self.file_list_path = file_list_path self.use_binary_transfer = use_binary_transfer self.type = 'FtpReadSettings' @@ -14367,6 +14587,10 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition + discovery starts from. Type: string (or Expression with resultType + string). + :type partition_root_path: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -14389,11 +14613,12 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): 'prefix': {'key': 'prefix', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, file_list_path=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, file_list_path=None, enable_partition_discovery: bool=None, partition_root_path=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(GoogleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -14401,6 +14626,7 @@ def __init__(self, *, additional_properties=None, max_concurrent_connections=Non self.prefix = prefix self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery + self.partition_root_path = partition_root_path self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end self.type = 'GoogleCloudStorageReadSettings' @@ -14941,6 +15167,10 @@ class HdfsReadSettings(StoreReadSettings): :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition + discovery starts from. Type: string (or Expression with resultType + string). + :type partition_root_path: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -14964,18 +15194,20 @@ class HdfsReadSettings(StoreReadSettings): 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, distcp_settings=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, enable_partition_discovery: bool=None, partition_root_path=None, modified_datetime_start=None, modified_datetime_end=None, distcp_settings=None, **kwargs) -> None: super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery + self.partition_root_path = partition_root_path self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end self.distcp_settings = distcp_settings @@ -16254,6 +16486,13 @@ class HttpReadSettings(StoreReadSettings): :param request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP server. :type request_timeout: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition + discovery starts from. Type: string (or Expression with resultType + string). + :type partition_root_path: object """ _validation = { @@ -16268,14 +16507,18 @@ class HttpReadSettings(StoreReadSettings): 'request_body': {'key': 'requestBody', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, request_timeout=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, request_timeout=None, enable_partition_discovery: bool=None, partition_root_path=None, **kwargs) -> None: super(HttpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.request_method = request_method self.request_body = request_body self.additional_headers = additional_headers self.request_timeout = request_timeout + self.enable_partition_discovery = enable_partition_discovery + self.partition_root_path = partition_root_path self.type = 'HttpReadSettings' @@ -16857,6 +17100,40 @@ def __init__(self, *, additional_properties=None, source_retry_count=None, sourc self.type = 'ImpalaSource' +class ImportSettings(Model): + """Import command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SnowflakeImportCopyCommand + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(ImportSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = None + + class InformixLinkedService(LinkedService): """Informix linked service. @@ -17660,6 +17937,8 @@ class IntegrationRuntimeSsisProperties(Model): properties for a SSIS integration runtime. :type express_custom_setup_properties: list[~azure.mgmt.datafactory.models.CustomSetupBase] + :param package_stores: Package stores for the SSIS Integration Runtime. + :type package_stores: list[~azure.mgmt.datafactory.models.PackageStore] """ _attribute_map = { @@ -17670,9 +17949,10 @@ class IntegrationRuntimeSsisProperties(Model): 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, 'edition': {'key': 'edition', 'type': 'str'}, 'express_custom_setup_properties': {'key': 'expressCustomSetupProperties', 'type': '[CustomSetupBase]'}, + 'package_stores': {'key': 'packageStores', 'type': '[PackageStore]'}, } - def __init__(self, *, additional_properties=None, catalog_info=None, license_type=None, custom_setup_script_properties=None, data_proxy_properties=None, edition=None, express_custom_setup_properties=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, catalog_info=None, license_type=None, custom_setup_script_properties=None, data_proxy_properties=None, edition=None, express_custom_setup_properties=None, package_stores=None, **kwargs) -> None: super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) self.additional_properties = additional_properties self.catalog_info = catalog_info @@ -17681,6 +17961,7 @@ def __init__(self, *, additional_properties=None, catalog_info=None, license_typ self.data_proxy_properties = data_proxy_properties self.edition = edition self.express_custom_setup_properties = express_custom_setup_properties + self.package_stores = package_stores class IntegrationRuntimeStatus(Model): @@ -18169,23 +18450,54 @@ def __init__(self, *, additional_properties=None, serializer=None, deserializer= self.type = 'JsonFormat' -class JsonSink(CopySink): - """A copy activity Json sink. +class JsonReadSettings(FormatReadSettings): + """Json read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). + :param type: Required. Constant filled by server. + :type type: str + :param compression_properties: Compression settings. + :type compression_properties: + ~azure.mgmt.datafactory.models.CompressionReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'compression_properties': {'key': 'compressionProperties', 'type': 'CompressionReadSettings'}, + } + + def __init__(self, *, additional_properties=None, compression_properties=None, **kwargs) -> None: + super(JsonReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.compression_properties = compression_properties + self.type = 'JsonReadSettings' + + +class JsonSink(CopySink): + """A copy activity Json sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: @@ -18249,6 +18561,8 @@ class JsonSource(CopySource): :type type: str :param store_settings: Json store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :param format_settings: Json format settings. + :type format_settings: ~azure.mgmt.datafactory.models.JsonReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). @@ -18267,12 +18581,14 @@ class JsonSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'JsonReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, additional_columns=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, additional_columns=None, **kwargs) -> None: super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings + self.format_settings = format_settings self.additional_columns = additional_columns self.type = 'JsonSource' @@ -20801,6 +21117,12 @@ class ODataSource(CopySource): :param query: OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP + response. It is the timeout to get a response, not the timeout to read + response data. Default value: 00:05:00. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). @@ -20819,12 +21141,14 @@ class ODataSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, additional_columns=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, http_request_timeout=None, additional_columns=None, **kwargs) -> None: super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query + self.http_request_timeout = http_request_timeout self.additional_columns = additional_columns self.type = 'ODataSource' @@ -22181,6 +22505,35 @@ def __init__(self, *, additional_properties=None, source_retry_count=None, sourc self.type = 'OrcSource' +class PackageStore(Model): + """Package store for the SSIS integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The name of the package store + :type name: str + :param package_store_linked_service: Required. The package store linked + service reference. + :type package_store_linked_service: + ~azure.mgmt.datafactory.models.EntityReference + """ + + _validation = { + 'name': {'required': True}, + 'package_store_linked_service': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'package_store_linked_service': {'key': 'packageStoreLinkedService', 'type': 'EntityReference'}, + } + + def __init__(self, *, name: str, package_store_linked_service, **kwargs) -> None: + super(PackageStore, self).__init__(**kwargs) + self.name = name + self.package_store_linked_service = package_store_linked_service + + class ParameterSpecification(Model): """Definition of a single parameter for an entity. @@ -25787,6 +26140,12 @@ class SapCloudForCustomerSink(CopySink): 'Insert'. Possible values include: 'Insert', 'Update' :type write_behavior: str or ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP + response. It is the timeout to get a response, not the timeout to read + response data. Default value: 00:05:00. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object """ _validation = { @@ -25802,11 +26161,13 @@ class SapCloudForCustomerSink(CopySink): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, http_request_timeout=None, **kwargs) -> None: super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.write_behavior = write_behavior + self.http_request_timeout = http_request_timeout self.type = 'SapCloudForCustomerSink' @@ -25843,6 +26204,12 @@ class SapCloudForCustomerSource(TabularSource): :param query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP + response. It is the timeout to get a response, not the timeout to read + response data. Default value: 00:05:00. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object """ _validation = { @@ -25858,11 +26225,13 @@ class SapCloudForCustomerSource(TabularSource): 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, http_request_timeout=None, **kwargs) -> None: super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query + self.http_request_timeout = http_request_timeout self.type = 'SapCloudForCustomerSource' @@ -26024,6 +26393,12 @@ class SapEccSource(TabularSource): :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP + response. It is the timeout to get a response, not the timeout to read + response data. Default value: 00:05:00. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object """ _validation = { @@ -26039,11 +26414,13 @@ class SapEccSource(TabularSource): 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, http_request_timeout=None, **kwargs) -> None: super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query + self.http_request_timeout = http_request_timeout self.type = 'SapEccSource' @@ -26071,8 +26448,8 @@ class SapHanaLinkedService(LinkedService): :param connection_string: SAP HANA ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object - :param server: Required. Host name of the SAP HANA server. Type: string - (or Expression with resultType string). + :param server: Host name of the SAP HANA server. Type: string (or + Expression with resultType string). :type server: object :param authentication_type: The authentication type to be used to connect to the SAP HANA server. Possible values include: 'Basic', 'Windows' @@ -26091,7 +26468,6 @@ class SapHanaLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'server': {'required': True}, } _attribute_map = { @@ -26109,7 +26485,7 @@ class SapHanaLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, server, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, server=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: super(SapHanaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.server = server @@ -27630,6 +28006,13 @@ class SftpReadSettings(StoreReadSettings): :param wildcard_file_name: Sftp wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition + discovery starts from. Type: string (or Expression with resultType + string). + :type partition_root_path: object :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). @@ -27653,16 +28036,20 @@ class SftpReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, partition_root_path=None, file_list_path=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(SftpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.partition_root_path = partition_root_path self.file_list_path = file_list_path self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end @@ -27822,6 +28209,195 @@ def __init__(self, *, additional_properties=None, max_concurrent_connections=Non self.type = 'SftpWriteSettings' +class SharePointOnlineListLinkedService(LinkedService): + """SharePoint Online List linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param site_url: Required. The URL of the SharePoint Online site. For + example, https://contoso.sharepoint.com/sites/siteName. Type: string (or + Expression with resultType string). + :type site_url: object + :param tenant_id: Required. The tenant ID under which your application + resides. You can find it from Azure portal Active Directory overview page. + Type: string (or Expression with resultType string). + :type tenant_id: object + :param service_principal_id: Required. The application (client) ID of your + application registered in Azure Active Directory. Make sure to grant + SharePoint site permission to this application. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. The client secret of your + application registered in Azure Active Directory. Type: string (or + Expression with resultType string). + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'site_url': {'required': True}, + 'tenant_id': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'site_url': {'key': 'typeProperties.siteUrl', 'type': 'object'}, + 'tenant_id': {'key': 'typeProperties.tenantId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, site_url, tenant_id, service_principal_id, service_principal_key, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, encrypted_credential=None, **kwargs) -> None: + super(SharePointOnlineListLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.site_url = site_url + self.tenant_id = tenant_id + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.encrypted_credential = encrypted_credential + self.type = 'SharePointOnlineList' + + +class SharePointOnlineListResourceDataset(Dataset): + """The sharepoint online list resource dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param list_name: The name of the SharePoint Online list. Type: string (or + Expression with resultType string). + :type list_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'list_name': {'key': 'typeProperties.listName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, list_name=None, **kwargs) -> None: + super(SharePointOnlineListResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.list_name = list_name + self.type = 'SharePointOnlineListResource' + + +class SharePointOnlineListSource(CopySource): + """A copy activity source for sharePoint online list source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: The OData query to filter the data in SharePoint Online + list. For example, "$top=1". Type: string (or Expression with resultType + string). + :type query: object + :param http_request_timeout: The wait time to get a response from + SharePoint Online. Default value is 5 minutes (00:05:00). Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, http_request_timeout=None, **kwargs) -> None: + super(SharePointOnlineListSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.http_request_timeout = http_request_timeout + self.type = 'SharePointOnlineListSource' + + class ShopifyLinkedService(LinkedService): """Shopify Service linked service. @@ -28035,22 +28611,339 @@ def __init__(self, *, file_missing=None, data_inconsistency=None, **kwargs) -> N self.data_inconsistency = data_inconsistency -class SparkLinkedService(LinkedService): - """Spark Server linked service. +class SnowflakeDataset(Dataset): + """The snowflake dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Dataset description. :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param snowflake_dataset_schema: The schema name of the Snowflake + database. Type: string (or Expression with resultType string). + :type snowflake_dataset_schema: object + :param table: The table name of the Snowflake database. Type: string (or + Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'snowflake_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, snowflake_dataset_schema=None, table=None, **kwargs) -> None: + super(SnowflakeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.snowflake_dataset_schema = snowflake_dataset_schema + self.table = table + self.type = 'SnowflakeTable' + + +class SnowflakeExportCopyCommand(ExportSettings): + """Snowflake export command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param additional_copy_options: Additional copy options directly passed to + snowflake Copy Command. Type: key value pairs (value should be string + type) (or Expression with resultType object). Example: + "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": + "'HH24:MI:SS.FF'" } + :type additional_copy_options: dict[str, object] + :param additional_format_options: Additional format options directly + passed to snowflake Copy Command. Type: key value pairs (value should be + string type) (or Expression with resultType object). Example: + "additionalFormatOptions": { "OVERWRITE": "TRUE", "MAX_FILE_SIZE": + "'FALSE'" } + :type additional_format_options: dict[str, object] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'additional_copy_options': {'key': 'additionalCopyOptions', 'type': '{object}'}, + 'additional_format_options': {'key': 'additionalFormatOptions', 'type': '{object}'}, + } + + def __init__(self, *, additional_properties=None, additional_copy_options=None, additional_format_options=None, **kwargs) -> None: + super(SnowflakeExportCopyCommand, self).__init__(additional_properties=additional_properties, **kwargs) + self.additional_copy_options = additional_copy_options + self.additional_format_options = additional_format_options + self.type = 'SnowflakeExportCopyCommand' + + +class SnowflakeImportCopyCommand(ImportSettings): + """Snowflake import command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param additional_copy_options: Additional copy options directly passed to + snowflake Copy Command. Type: key value pairs (value should be string + type) (or Expression with resultType object). Example: + "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": + "'HH24:MI:SS.FF'" } + :type additional_copy_options: dict[str, object] + :param additional_format_options: Additional format options directly + passed to snowflake Copy Command. Type: key value pairs (value should be + string type) (or Expression with resultType object). Example: + "additionalFormatOptions": { "FORCE": "TRUE", "LOAD_UNCERTAIN_FILES": + "'FALSE'" } + :type additional_format_options: dict[str, object] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'additional_copy_options': {'key': 'additionalCopyOptions', 'type': '{object}'}, + 'additional_format_options': {'key': 'additionalFormatOptions', 'type': '{object}'}, + } + + def __init__(self, *, additional_properties=None, additional_copy_options=None, additional_format_options=None, **kwargs) -> None: + super(SnowflakeImportCopyCommand, self).__init__(additional_properties=additional_properties, **kwargs) + self.additional_copy_options = additional_copy_options + self.additional_format_options = additional_format_options + self.type = 'SnowflakeImportCopyCommand' + + +class SnowflakeLinkedService(LinkedService): + """Snowflake linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string of snowflake. + Type: string, SecureString. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SnowflakeLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Snowflake' + + +class SnowflakeSink(CopySink): + """A copy activity snowflake sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param import_settings: Snowflake import settings. + :type import_settings: + ~azure.mgmt.datafactory.models.SnowflakeImportCopyCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'import_settings': {'key': 'importSettings', 'type': 'SnowflakeImportCopyCommand'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, import_settings=None, **kwargs) -> None: + super(SnowflakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.import_settings = import_settings + self.type = 'SnowflakeSink' + + +class SnowflakeSource(CopySource): + """A copy activity snowflake source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Snowflake Sql query. Type: string (or Expression with + resultType string). + :type query: object + :param export_settings: Snowflake export settings. + :type export_settings: + ~azure.mgmt.datafactory.models.SnowflakeExportCopyCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'export_settings': {'key': 'exportSettings', 'type': 'SnowflakeExportCopyCommand'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, export_settings=None, **kwargs) -> None: + super(SnowflakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.export_settings = export_settings + self.type = 'SnowflakeSource' + + +class SparkLinkedService(LinkedService): + """Spark Server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] @@ -28404,6 +29297,15 @@ class SqlDWSource(TabularSource): Type: object (or Expression with resultType object), itemType: StoredProcedureParameter. :type stored_procedure_parameters: object + :param partition_option: The partition mechanism that will be used for Sql + read in parallel. Possible values include: 'None', + 'PhysicalPartitionsOfTable', 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.SqlPartitionOption + :param partition_settings: The settings that will be leveraged for Sql + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -28421,13 +29323,17 @@ class SqlDWSource(TabularSource): 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, partition_option=None, partition_settings=None, **kwargs) -> None: super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters + self.partition_option = partition_option + self.partition_settings = partition_settings self.type = 'SqlDWSource' @@ -28555,6 +29461,15 @@ class SqlMISource(TabularSource): ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param produce_additional_types: Which additional types to produce. :type produce_additional_types: object + :param partition_option: The partition mechanism that will be used for Sql + read in parallel. Possible values include: 'None', + 'PhysicalPartitionsOfTable', 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.SqlPartitionOption + :param partition_settings: The settings that will be leveraged for Sql + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -28573,17 +29488,52 @@ class SqlMISource(TabularSource): 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, partition_option=None, partition_settings=None, **kwargs) -> None: super(SqlMISource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters self.produce_additional_types = produce_additional_types + self.partition_option = partition_option + self.partition_settings = partition_settings self.type = 'SqlMISource' +class SqlPartitionSettings(Model): + """The settings that will be leveraged for Sql source partitioning. + + :param partition_column_name: The name of the column in integer or + datetime type that will be used for proceeding partitioning. If not + specified, the primary key of the table is auto-detected and used as the + partition column. Type: string (or Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: + super(SqlPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound + + class SqlServerLinkedService(LinkedService): """SQL Server linked service. @@ -28770,6 +29720,15 @@ class SqlServerSource(TabularSource): ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param produce_additional_types: Which additional types to produce. :type produce_additional_types: object + :param partition_option: The partition mechanism that will be used for Sql + read in parallel. Possible values include: 'None', + 'PhysicalPartitionsOfTable', 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.SqlPartitionOption + :param partition_settings: The settings that will be leveraged for Sql + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -28788,14 +29747,18 @@ class SqlServerSource(TabularSource): 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, partition_option=None, partition_settings=None, **kwargs) -> None: super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters self.produce_additional_types = produce_additional_types + self.partition_option = partition_option + self.partition_settings = partition_settings self.type = 'SqlServerSource' @@ -29055,6 +30018,15 @@ class SqlSource(TabularSource): default value is ReadCommitted. Type: string (or Expression with resultType string). :type isolation_level: object + :param partition_option: The partition mechanism that will be used for Sql + read in parallel. Possible values include: 'None', + 'PhysicalPartitionsOfTable', 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.SqlPartitionOption + :param partition_settings: The settings that will be leveraged for Sql + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -29073,14 +30045,18 @@ class SqlSource(TabularSource): 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'isolation_level': {'key': 'isolationLevel', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, isolation_level=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, isolation_level=None, partition_option=None, partition_settings=None, **kwargs) -> None: super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters self.isolation_level = isolation_level + self.partition_option = partition_option + self.partition_settings = partition_settings self.type = 'SqlSource' @@ -29698,7 +30674,7 @@ class SSISPackageLocation(Model): with resultType string). :type package_path: object :param type: The type of SSIS package location. Possible values include: - 'SSISDB', 'File', 'InlinePackage' + 'SSISDB', 'File', 'InlinePackage', 'PackageStore' :type type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType :param package_password: Password of the package. :type package_password: ~azure.mgmt.datafactory.models.SecretBase @@ -29708,6 +30684,10 @@ class SSISPackageLocation(Model): :param configuration_path: The configuration file of the package execution. Type: string (or Expression with resultType string). :type configuration_path: object + :param configuration_access_credential: The configuration file access + credential. + :type configuration_access_credential: + ~azure.mgmt.datafactory.models.SSISAccessCredential :param package_name: The package name. :type package_name: str :param package_content: The embedded package content. Type: string (or @@ -29727,19 +30707,21 @@ class SSISPackageLocation(Model): 'package_password': {'key': 'typeProperties.packagePassword', 'type': 'SecretBase'}, 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, 'configuration_path': {'key': 'typeProperties.configurationPath', 'type': 'object'}, + 'configuration_access_credential': {'key': 'typeProperties.configurationAccessCredential', 'type': 'SSISAccessCredential'}, 'package_name': {'key': 'typeProperties.packageName', 'type': 'str'}, 'package_content': {'key': 'typeProperties.packageContent', 'type': 'object'}, 'package_last_modified_date': {'key': 'typeProperties.packageLastModifiedDate', 'type': 'str'}, 'child_packages': {'key': 'typeProperties.childPackages', 'type': '[SSISChildPackage]'}, } - def __init__(self, *, package_path=None, type=None, package_password=None, access_credential=None, configuration_path=None, package_name: str=None, package_content=None, package_last_modified_date: str=None, child_packages=None, **kwargs) -> None: + def __init__(self, *, package_path=None, type=None, package_password=None, access_credential=None, configuration_path=None, configuration_access_credential=None, package_name: str=None, package_content=None, package_last_modified_date: str=None, child_packages=None, **kwargs) -> None: super(SSISPackageLocation, self).__init__(**kwargs) self.package_path = package_path self.type = type self.package_password = package_password self.access_credential = access_credential self.configuration_path = configuration_path + self.configuration_access_credential = configuration_access_credential self.package_name = package_name self.package_content = package_content self.package_last_modified_date = package_last_modified_date @@ -32254,6 +33236,37 @@ def __init__(self, *, additional_properties=None, source_retry_count=None, sourc self.type = 'XeroSource' +class ZipDeflateReadSettings(CompressionReadSettings): + """The ZipDeflate compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param preserve_zip_file_name_as_folder: Preserve the zip file name as + folder path. Type: boolean (or Expression with resultType boolean). + :type preserve_zip_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_zip_file_name_as_folder': {'key': 'preserveZipFileNameAsFolder', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, preserve_zip_file_name_as_folder=None, **kwargs) -> None: + super(ZipDeflateReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.preserve_zip_file_name_as_folder = preserve_zip_file_name_as_folder + self.type = 'ZipDeflateReadSettings' + + class ZohoLinkedService(LinkedService): """Zoho server linked service. diff --git a/sdk/datafactory/azure-mgmt-datafactory/setup.py b/sdk/datafactory/azure-mgmt-datafactory/setup.py index cce4f9d8657c..3b3fd5c68d7e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/setup.py +++ b/sdk/datafactory/azure-mgmt-datafactory/setup.py @@ -36,7 +36,9 @@ pass # Version extraction inspired from 'requests' -with open(os.path.join(package_folder_path, 'version.py'), 'r') as fd: +with open(os.path.join(package_folder_path, 'version.py') + if os.path.exists(os.path.join(package_folder_path, 'version.py')) + else os.path.join(package_folder_path, '_version.py'), 'r') as fd: version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE).group(1)