From 2e08b0944b38f398b26f9921ff490ca8b94a0afe Mon Sep 17 00:00:00 2001 From: SDK Automation Date: Tue, 8 Sep 2020 07:00:11 +0000 Subject: [PATCH] Generated from 1e218c46fde5a28eeb43e7b6ea1d80fb96edc3ef add maxrowperfile/filenameprefix --- .../azure/mgmt/datafactory/models/__init__.py | 18 ++ .../azure/mgmt/datafactory/models/_models.py | 246 ++++++++++++++++- .../mgmt/datafactory/models/_models_py3.py | 254 +++++++++++++++++- .../azure-mgmt-datafactory/setup.py | 2 +- 4 files changed, 503 insertions(+), 17 deletions(-) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 95dc22d97aad..62401004a327 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -181,6 +181,8 @@ from ._models_py3 import DatasetReference from ._models_py3 import DatasetResource from ._models_py3 import DatasetStorageFormat + from ._models_py3 import DatasetTarCompression + from ._models_py3 import DatasetTarGZipCompression from ._models_py3 import DatasetZipDeflateCompression from ._models_py3 import Db2LinkedService from ._models_py3 import Db2Source @@ -411,12 +413,14 @@ from ._models_py3 import OrcFormat from ._models_py3 import OrcSink from ._models_py3 import OrcSource + from ._models_py3 import OrcWriteSettings from ._models_py3 import PackageStore from ._models_py3 import ParameterSpecification from ._models_py3 import ParquetDataset from ._models_py3 import ParquetFormat from ._models_py3 import ParquetSink from ._models_py3 import ParquetSource + from ._models_py3 import ParquetWriteSettings from ._models_py3 import PaypalLinkedService from ._models_py3 import PaypalObjectDataset from ._models_py3 import PaypalSource @@ -567,6 +571,8 @@ from ._models_py3 import SybaseSource from ._models_py3 import SybaseTableDataset from ._models_py3 import TabularSource + from ._models_py3 import TarGZipReadSettings + from ._models_py3 import TarReadSettings from ._models_py3 import TeradataLinkedService from ._models_py3 import TeradataPartitionSettings from ._models_py3 import TeradataSource @@ -788,6 +794,8 @@ from ._models import DatasetReference from ._models import DatasetResource from ._models import DatasetStorageFormat + from ._models import DatasetTarCompression + from ._models import DatasetTarGZipCompression from ._models import DatasetZipDeflateCompression from ._models import Db2LinkedService from ._models import Db2Source @@ -1018,12 +1026,14 @@ from ._models import OrcFormat from ._models import OrcSink from ._models import OrcSource + from ._models import OrcWriteSettings from ._models import PackageStore from ._models import ParameterSpecification from ._models import ParquetDataset from ._models import ParquetFormat from ._models import ParquetSink from ._models import ParquetSource + from ._models import ParquetWriteSettings from ._models import PaypalLinkedService from ._models import PaypalObjectDataset from ._models import PaypalSource @@ -1174,6 +1184,8 @@ from ._models import SybaseSource from ._models import SybaseTableDataset from ._models import TabularSource + from ._models import TarGZipReadSettings + from ._models import TarReadSettings from ._models import TeradataLinkedService from ._models import TeradataPartitionSettings from ._models import TeradataSource @@ -1487,6 +1499,8 @@ 'DatasetReference', 'DatasetResource', 'DatasetStorageFormat', + 'DatasetTarCompression', + 'DatasetTarGZipCompression', 'DatasetZipDeflateCompression', 'Db2LinkedService', 'Db2Source', @@ -1717,12 +1731,14 @@ 'OrcFormat', 'OrcSink', 'OrcSource', + 'OrcWriteSettings', 'PackageStore', 'ParameterSpecification', 'ParquetDataset', 'ParquetFormat', 'ParquetSink', 'ParquetSource', + 'ParquetWriteSettings', 'PaypalLinkedService', 'PaypalObjectDataset', 'PaypalSource', @@ -1873,6 +1889,8 @@ 'SybaseSource', 'SybaseTableDataset', 'TabularSource', + 'TarGZipReadSettings', + 'TarReadSettings', 'TeradataLinkedService', 'TeradataPartitionSettings', 'TeradataSource', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py index d712ea8c161c..b252a73bce47 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py @@ -1880,7 +1880,7 @@ class FormatWriteSettings(Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: JsonWriteSettings, DelimitedTextWriteSettings, - AvroWriteSettings + OrcWriteSettings, AvroWriteSettings, ParquetWriteSettings All required parameters must be populated in order to send to Azure. @@ -1901,7 +1901,7 @@ class FormatWriteSettings(Model): } _subtype_map = { - 'type': {'JsonWriteSettings': 'JsonWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'AvroWriteSettings': 'AvroWriteSettings'} + 'type': {'JsonWriteSettings': 'JsonWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'OrcWriteSettings': 'OrcWriteSettings', 'AvroWriteSettings': 'AvroWriteSettings', 'ParquetWriteSettings': 'ParquetWriteSettings'} } def __init__(self, **kwargs): @@ -1925,6 +1925,15 @@ class AvroWriteSettings(FormatWriteSettings): :type record_name: str :param record_namespace: Record namespace in the write result. :type record_namespace: str + :param max_rows_per_file: Limit the written file's row count to be smaller + than or equal to the specified count. Type: integer (or Expression with + resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + _. when copy from non-file based + store without partitionOptions. Type: string (or Expression with + resultType string). + :type file_name_prefix: object """ _validation = { @@ -1936,12 +1945,16 @@ class AvroWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'record_name': {'key': 'recordName', 'type': 'str'}, 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } def __init__(self, **kwargs): super(AvroWriteSettings, self).__init__(**kwargs) self.record_name = kwargs.get('record_name', None) self.record_namespace = kwargs.get('record_namespace', None) + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) self.type = 'AvroWriteSettings' @@ -7998,7 +8011,8 @@ class CompressionReadSettings(Model): """Compression read settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ZipDeflateReadSettings + sub-classes are: TarGZipReadSettings, TarReadSettings, + ZipDeflateReadSettings All required parameters must be populated in order to send to Azure. @@ -8019,7 +8033,7 @@ class CompressionReadSettings(Model): } _subtype_map = { - 'type': {'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} + 'type': {'TarGZipReadSettings': 'TarGZipReadSettings', 'TarReadSettings': 'TarReadSettings', 'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} } def __init__(self, **kwargs): @@ -10220,7 +10234,8 @@ class DatasetCompression(Model): """The compression method used on a dataset. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DatasetZipDeflateCompression, DatasetDeflateCompression, + sub-classes are: DatasetTarGZipCompression, DatasetTarCompression, + DatasetZipDeflateCompression, DatasetDeflateCompression, DatasetGZipCompression, DatasetBZip2Compression All required parameters must be populated in order to send to Azure. @@ -10242,7 +10257,7 @@ class DatasetCompression(Model): } _subtype_map = { - 'type': {'ZipDeflate': 'DatasetZipDeflateCompression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'BZip2': 'DatasetBZip2Compression'} + 'type': {'TarGZip': 'DatasetTarGZipCompression', 'Tar': 'DatasetTarCompression', 'ZipDeflate': 'DatasetZipDeflateCompression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'BZip2': 'DatasetBZip2Compression'} } def __init__(self, **kwargs): @@ -10456,6 +10471,62 @@ def __init__(self, **kwargs): self.properties = kwargs.get('properties', None) +class DatasetTarCompression(DatasetCompression): + """The Tar archive method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DatasetTarCompression, self).__init__(**kwargs) + self.type = 'Tar' + + +class DatasetTarGZipCompression(DatasetCompression): + """The TarGZip compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param level: The TarGZip compression level. + :type level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DatasetTarGZipCompression, self).__init__(**kwargs) + self.level = kwargs.get('level', None) + self.type = 'TarGZip' + + class DatasetZipDeflateCompression(DatasetCompression): """The ZipDeflate compression method used on a dataset. @@ -11081,6 +11152,15 @@ class DelimitedTextWriteSettings(FormatWriteSettings): :param file_extension: Required. The file extension used to create the files. Type: string (or Expression with resultType string). :type file_extension: object + :param max_rows_per_file: Limit the written file's row count to be smaller + than or equal to the specified count. Type: integer (or Expression with + resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + _. when copy from non-file based + store without partitionOptions. Type: string (or Expression with + resultType string). + :type file_name_prefix: object """ _validation = { @@ -11093,12 +11173,16 @@ class DelimitedTextWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } def __init__(self, **kwargs): super(DelimitedTextWriteSettings, self).__init__(**kwargs) self.quote_all_text = kwargs.get('quote_all_text', None) self.file_extension = kwargs.get('file_extension', None) + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) self.type = 'DelimitedTextWriteSettings' @@ -23093,6 +23177,8 @@ class OrcSink(CopySink): :type type: str :param store_settings: ORC store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: ORC format settings. + :type format_settings: ~azure.mgmt.datafactory.models.OrcWriteSettings """ _validation = { @@ -23108,11 +23194,13 @@ class OrcSink(CopySink): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'}, } def __init__(self, **kwargs): super(OrcSink, self).__init__(**kwargs) self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) self.type = 'OrcSink' @@ -23167,6 +23255,45 @@ def __init__(self, **kwargs): self.type = 'OrcSource' +class OrcWriteSettings(FormatWriteSettings): + """Orc write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller + than or equal to the specified count. Type: integer (or Expression with + resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + _. when copy from non-file based + store without partitionOptions. Type: string (or Expression with + resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OrcWriteSettings, self).__init__(**kwargs) + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) + self.type = 'OrcWriteSettings' + + class PackageStore(Model): """Package store for the SSIS integration runtime. @@ -23351,6 +23478,8 @@ class ParquetSink(CopySink): :type type: str :param store_settings: Parquet store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: Parquet format settings. + :type format_settings: ~azure.mgmt.datafactory.models.ParquetWriteSettings """ _validation = { @@ -23366,11 +23495,13 @@ class ParquetSink(CopySink): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'}, } def __init__(self, **kwargs): super(ParquetSink, self).__init__(**kwargs) self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) self.type = 'ParquetSink' @@ -23425,6 +23556,45 @@ def __init__(self, **kwargs): self.type = 'ParquetSource' +class ParquetWriteSettings(FormatWriteSettings): + """Parquet write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller + than or equal to the specified count. Type: integer (or Expression with + resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + _. when copy from non-file based + store without partitionOptions. Type: string (or Expression with + resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ParquetWriteSettings, self).__init__(**kwargs) + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) + self.type = 'ParquetWriteSettings' + + class PaypalLinkedService(LinkedService): """Paypal Service linked service. @@ -32041,6 +32211,70 @@ def __init__(self, **kwargs): self.type = 'SybaseTable' +class TarGZipReadSettings(CompressionReadSettings): + """The TarGZip compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression + file name as folder path. Type: boolean (or Expression with resultType + boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(TarGZipReadSettings, self).__init__(**kwargs) + self.preserve_compression_file_name_as_folder = kwargs.get('preserve_compression_file_name_as_folder', None) + self.type = 'TarGZipReadSettings' + + +class TarReadSettings(CompressionReadSettings): + """The Tar compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression + file name as folder path. Type: boolean (or Expression with resultType + boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(TarReadSettings, self).__init__(**kwargs) + self.preserve_compression_file_name_as_folder = kwargs.get('preserve_compression_file_name_as_folder', None) + self.type = 'TarReadSettings' + + class TeradataLinkedService(LinkedService): """Linked service for Teradata data source. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py index 8723c634a3e4..c996a26c169e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -1880,7 +1880,7 @@ class FormatWriteSettings(Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: JsonWriteSettings, DelimitedTextWriteSettings, - AvroWriteSettings + OrcWriteSettings, AvroWriteSettings, ParquetWriteSettings All required parameters must be populated in order to send to Azure. @@ -1901,7 +1901,7 @@ class FormatWriteSettings(Model): } _subtype_map = { - 'type': {'JsonWriteSettings': 'JsonWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'AvroWriteSettings': 'AvroWriteSettings'} + 'type': {'JsonWriteSettings': 'JsonWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'OrcWriteSettings': 'OrcWriteSettings', 'AvroWriteSettings': 'AvroWriteSettings', 'ParquetWriteSettings': 'ParquetWriteSettings'} } def __init__(self, *, additional_properties=None, **kwargs) -> None: @@ -1925,6 +1925,15 @@ class AvroWriteSettings(FormatWriteSettings): :type record_name: str :param record_namespace: Record namespace in the write result. :type record_namespace: str + :param max_rows_per_file: Limit the written file's row count to be smaller + than or equal to the specified count. Type: integer (or Expression with + resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + _. when copy from non-file based + store without partitionOptions. Type: string (or Expression with + resultType string). + :type file_name_prefix: object """ _validation = { @@ -1936,12 +1945,16 @@ class AvroWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'record_name': {'key': 'recordName', 'type': 'str'}, 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, record_name: str=None, record_namespace: str=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, record_name: str=None, record_namespace: str=None, max_rows_per_file=None, file_name_prefix=None, **kwargs) -> None: super(AvroWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.record_name = record_name self.record_namespace = record_namespace + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix self.type = 'AvroWriteSettings' @@ -7998,7 +8011,8 @@ class CompressionReadSettings(Model): """Compression read settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ZipDeflateReadSettings + sub-classes are: TarGZipReadSettings, TarReadSettings, + ZipDeflateReadSettings All required parameters must be populated in order to send to Azure. @@ -8019,7 +8033,7 @@ class CompressionReadSettings(Model): } _subtype_map = { - 'type': {'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} + 'type': {'TarGZipReadSettings': 'TarGZipReadSettings', 'TarReadSettings': 'TarReadSettings', 'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} } def __init__(self, *, additional_properties=None, **kwargs) -> None: @@ -10220,7 +10234,8 @@ class DatasetCompression(Model): """The compression method used on a dataset. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DatasetZipDeflateCompression, DatasetDeflateCompression, + sub-classes are: DatasetTarGZipCompression, DatasetTarCompression, + DatasetZipDeflateCompression, DatasetDeflateCompression, DatasetGZipCompression, DatasetBZip2Compression All required parameters must be populated in order to send to Azure. @@ -10242,7 +10257,7 @@ class DatasetCompression(Model): } _subtype_map = { - 'type': {'ZipDeflate': 'DatasetZipDeflateCompression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'BZip2': 'DatasetBZip2Compression'} + 'type': {'TarGZip': 'DatasetTarGZipCompression', 'Tar': 'DatasetTarCompression', 'ZipDeflate': 'DatasetZipDeflateCompression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'BZip2': 'DatasetBZip2Compression'} } def __init__(self, *, additional_properties=None, **kwargs) -> None: @@ -10456,6 +10471,62 @@ def __init__(self, *, properties, **kwargs) -> None: self.properties = properties +class DatasetTarCompression(DatasetCompression): + """The Tar archive method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(DatasetTarCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'Tar' + + +class DatasetTarGZipCompression(DatasetCompression): + """The TarGZip compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param level: The TarGZip compression level. + :type level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: + super(DatasetTarGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.level = level + self.type = 'TarGZip' + + class DatasetZipDeflateCompression(DatasetCompression): """The ZipDeflate compression method used on a dataset. @@ -11081,6 +11152,15 @@ class DelimitedTextWriteSettings(FormatWriteSettings): :param file_extension: Required. The file extension used to create the files. Type: string (or Expression with resultType string). :type file_extension: object + :param max_rows_per_file: Limit the written file's row count to be smaller + than or equal to the specified count. Type: integer (or Expression with + resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + _. when copy from non-file based + store without partitionOptions. Type: string (or Expression with + resultType string). + :type file_name_prefix: object """ _validation = { @@ -11093,12 +11173,16 @@ class DelimitedTextWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } - def __init__(self, *, file_extension, additional_properties=None, quote_all_text=None, **kwargs) -> None: + def __init__(self, *, file_extension, additional_properties=None, quote_all_text=None, max_rows_per_file=None, file_name_prefix=None, **kwargs) -> None: super(DelimitedTextWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.quote_all_text = quote_all_text self.file_extension = file_extension + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix self.type = 'DelimitedTextWriteSettings' @@ -23093,6 +23177,8 @@ class OrcSink(CopySink): :type type: str :param store_settings: ORC store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: ORC format settings. + :type format_settings: ~azure.mgmt.datafactory.models.OrcWriteSettings """ _validation = { @@ -23108,11 +23194,13 @@ class OrcSink(CopySink): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: super(OrcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings + self.format_settings = format_settings self.type = 'OrcSink' @@ -23167,6 +23255,45 @@ def __init__(self, *, additional_properties=None, source_retry_count=None, sourc self.type = 'OrcSource' +class OrcWriteSettings(FormatWriteSettings): + """Orc write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller + than or equal to the specified count. Type: integer (or Expression with + resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + _. when copy from non-file based + store without partitionOptions. Type: string (or Expression with + resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, max_rows_per_file=None, file_name_prefix=None, **kwargs) -> None: + super(OrcWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix + self.type = 'OrcWriteSettings' + + class PackageStore(Model): """Package store for the SSIS integration runtime. @@ -23351,6 +23478,8 @@ class ParquetSink(CopySink): :type type: str :param store_settings: Parquet store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: Parquet format settings. + :type format_settings: ~azure.mgmt.datafactory.models.ParquetWriteSettings """ _validation = { @@ -23366,11 +23495,13 @@ class ParquetSink(CopySink): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings + self.format_settings = format_settings self.type = 'ParquetSink' @@ -23425,6 +23556,45 @@ def __init__(self, *, additional_properties=None, source_retry_count=None, sourc self.type = 'ParquetSource' +class ParquetWriteSettings(FormatWriteSettings): + """Parquet write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller + than or equal to the specified count. Type: integer (or Expression with + resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + _. when copy from non-file based + store without partitionOptions. Type: string (or Expression with + resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, max_rows_per_file=None, file_name_prefix=None, **kwargs) -> None: + super(ParquetWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix + self.type = 'ParquetWriteSettings' + + class PaypalLinkedService(LinkedService): """Paypal Service linked service. @@ -32041,6 +32211,70 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'SybaseTable' +class TarGZipReadSettings(CompressionReadSettings): + """The TarGZip compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression + file name as folder path. Type: boolean (or Expression with resultType + boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, preserve_compression_file_name_as_folder=None, **kwargs) -> None: + super(TarGZipReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.preserve_compression_file_name_as_folder = preserve_compression_file_name_as_folder + self.type = 'TarGZipReadSettings' + + +class TarReadSettings(CompressionReadSettings): + """The Tar compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression + file name as folder path. Type: boolean (or Expression with resultType + boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, preserve_compression_file_name_as_folder=None, **kwargs) -> None: + super(TarReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.preserve_compression_file_name_as_folder = preserve_compression_file_name_as_folder + self.type = 'TarReadSettings' + + class TeradataLinkedService(LinkedService): """Linked service for Teradata data source. diff --git a/sdk/datafactory/azure-mgmt-datafactory/setup.py b/sdk/datafactory/azure-mgmt-datafactory/setup.py index 2287f0c260af..3b3fd5c68d7e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/setup.py +++ b/sdk/datafactory/azure-mgmt-datafactory/setup.py @@ -36,7 +36,7 @@ pass # Version extraction inspired from 'requests' -with open(os.path.join(package_folder_path, 'version.py') +with open(os.path.join(package_folder_path, 'version.py') if os.path.exists(os.path.join(package_folder_path, 'version.py')) else os.path.join(package_folder_path, '_version.py'), 'r') as fd: version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]',