From ec8191250c5d885ce7942fbfe2e4a3e164fd5759 Mon Sep 17 00:00:00 2001 From: xiafu Date: Tue, 26 May 2020 00:31:36 -0700 Subject: [PATCH 1/6] [DataLake][SetExpiry]Set Expiry of DataLake File --- .../azure/storage/blob/_blob_client.py | 5 +- .../_blob_operations_async.py | 2 +- .../_generated/operations/_blob_operations.py | 2 +- .../storage/blob/aio/_blob_client_async.py | 5 +- .../_data_lake_directory_client.py | 3 +- .../filedatalake/_data_lake_file_client.py | 18 +- .../azure/storage/filedatalake/_models.py | 92 ++++---- .../storage/filedatalake/_path_client.py | 1 - .../aio/_data_lake_directory_client_async.py | 3 +- .../aio/_data_lake_file_client_async.py | 18 +- .../filedatalake/aio/_path_client_async.py | 1 - .../recordings/test_file.test_set_expiry.yaml | 208 ++++++++++++++++++ ...test_file_async.test_set_expiry_async.yaml | 140 ++++++++++++ .../tests/test_file.py | 18 ++ .../tests/test_file_async.py | 22 ++ 15 files changed, 469 insertions(+), 69 deletions(-) create mode 100644 sdk/storage/azure-storage-file-datalake/tests/recordings/test_file.test_set_expiry.yaml create mode 100644 sdk/storage/azure-storage-file-datalake/tests/recordings/test_file_async.test_set_expiry_async.yaml diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py index 85837199921b..394b24df90a7 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py @@ -1007,13 +1007,14 @@ def get_blob_properties(self, **kwargs): snapshot=self.snapshot, lease_access_conditions=access_conditions, modified_access_conditions=mod_conditions, - cls=deserialize_blob_properties, + cls=kwargs.pop('cls', None) or deserialize_blob_properties, cpk_info=cpk_info, **kwargs) except StorageErrorException as error: process_storage_error(error) blob_props.name = self.blob_name - blob_props.container = self.container_name + if isinstance(blob_props, BlobProperties): + blob_props.container = self.container_name return blob_props # type: ignore def _set_http_headers_options(self, content_settings=None, **kwargs): diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_blob_operations_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_blob_operations_async.py index 21750b333fcd..54d6dab2a31b 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_blob_operations_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_blob_operations_async.py @@ -1119,7 +1119,7 @@ async def set_expiry(self, expiry_options, timeout=None, request_id=None, expire header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str') header_parameters['x-ms-expiry-option'] = self._serialize.header("expiry_options", expiry_options, 'str') if expires_on is not None: - header_parameters['x-ms-expiry-time'] = self._serialize.header("expires_on", expires_on, 'str') + header_parameters['x-ms-expiry-time'] = self._serialize.header("expires_on", expires_on, 'rfc-1123') # Construct and send request request = self._client.put(url, query_parameters, header_parameters) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py index 66b079abbd07..394a519856a6 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py @@ -1118,7 +1118,7 @@ def set_expiry(self, expiry_options, timeout=None, request_id=None, expires_on=N header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str') header_parameters['x-ms-expiry-option'] = self._serialize.header("expiry_options", expiry_options, 'str') if expires_on is not None: - header_parameters['x-ms-expiry-time'] = self._serialize.header("expires_on", expires_on, 'str') + header_parameters['x-ms-expiry-time'] = self._serialize.header("expires_on", expires_on, 'rfc-1123') # Construct and send request request = self._client.put(url, query_parameters, header_parameters) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py index d88075ae87b9..36774d79deea 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py @@ -534,13 +534,14 @@ async def get_blob_properties(self, **kwargs): snapshot=self.snapshot, lease_access_conditions=access_conditions, modified_access_conditions=mod_conditions, - cls=deserialize_blob_properties, + cls=kwargs.pop('cls', None) or deserialize_blob_properties, cpk_info=cpk_info, **kwargs) except StorageErrorException as error: process_storage_error(error) blob_props.name = self.blob_name - blob_props.container = self.container_name + if isinstance(blob_props, BlobProperties): + blob_props.container = self.container_name return blob_props # type: ignore @distributed_trace_async diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py index 90c525bedf99..184579cee26b 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py @@ -230,8 +230,7 @@ def get_directory_properties(self, **kwargs): :dedent: 4 :caption: Getting the properties for a file/directory. """ - blob_properties = self._get_path_properties(**kwargs) - return DirectoryProperties._from_blob_properties(blob_properties) # pylint: disable=protected-access + return self._get_path_properties(cls=DirectoryProperties._deserialize_dir_properties, **kwargs) # pylint: disable=protected-access def rename_directory(self, new_name, # type: str **kwargs): diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py index db076b81d6b3..b76c1634eb23 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py @@ -240,8 +240,22 @@ def get_file_properties(self, **kwargs): :dedent: 4 :caption: Getting the properties for a file. """ - blob_properties = self._get_path_properties(**kwargs) - return FileProperties._from_blob_properties(blob_properties) # pylint: disable=protected-access + return self._get_path_properties(cls=FileProperties._deserialize_file_properties, **kwargs) # pylint: disable=protected-access + + def set_file_expiry(self, expiry_options, expires_on=None, **kwargs): + # type: (**Any) -> None + """Sets the time a file will expire and be deleted. + + :param str expiry_options: + Required. Indicates mode of the expiry time. + Possible values include: 'NeverExpire', 'RelativeToCreation', 'RelativeToNow', 'Absolute' + :param datetime expires_on: + The time to set the file to expiry + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: None + """ + return self._blob_client._client.blob.set_expiry(expiry_options, expires_on=expires_on, **kwargs) # pylint: disable=protected-access def _upload_options( # pylint:disable=too-many-statements self, data, # type: Union[Iterable[AnyStr], IO[AnyStr]] diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_models.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_models.py index 406eedceac74..1b32d27002bf 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_models.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_models.py @@ -19,7 +19,7 @@ from azure.storage.blob import DelimitedJsonDialect as BlobDelimitedJSON from azure.storage.blob._generated.models import StorageErrorException from azure.storage.blob._models import ContainerPropertiesPaged -from ._deserialize import return_headers_and_deserialized_path_list +from ._deserialize import return_headers_and_deserialized_path_list, deserialize_metadata from ._generated.models import Path from ._shared.models import DictMixin from ._shared.response_handlers import process_storage_error @@ -131,33 +131,24 @@ class DirectoryProperties(DictMixin): :var ~azure.storage.filedatalake.ContentSettings content_settings: """ def __init__(self, **kwargs): - super(DirectoryProperties, self).__init__( - **kwargs - ) - self.name = None - self.etag = None + self.name = kwargs.get('name') + self.etag = kwargs.get('ETag') self.deleted = None - self.metadata = None - self.lease = None - self.last_modified = None - self.creation_time = None + self.metadata = kwargs.get('metadata') + self.lease = LeaseProperties(**kwargs) + self.last_modified = kwargs.get('Last-Modified') + self.creation_time = kwargs.get('x-ms-creation-time') self.deleted_time = None self.remaining_retention_days = None @classmethod - def _from_blob_properties(cls, blob_properties): - directory_props = DirectoryProperties() - directory_props.name = blob_properties.name - directory_props.etag = blob_properties.etag - directory_props.deleted = blob_properties.deleted - directory_props.metadata = blob_properties.metadata - directory_props.lease = blob_properties.lease - directory_props.lease.__class__ = LeaseProperties - directory_props.last_modified = blob_properties.last_modified - directory_props.creation_time = blob_properties.creation_time - directory_props.deleted_time = blob_properties.deleted_time - directory_props.remaining_retention_days = blob_properties.remaining_retention_days - return directory_props + def _deserialize_dir_properties(cls, response, obj, headers): + metadata = deserialize_metadata(response, obj, headers) + dir_properties = cls( + metadata=metadata, + **headers + ) + return dir_properties class FileProperties(DictMixin): @@ -179,37 +170,32 @@ class FileProperties(DictMixin): :var ~azure.storage.filedatalake.ContentSettings content_settings: """ def __init__(self, **kwargs): - super(FileProperties, self).__init__( - **kwargs - ) - self.name = None - self.etag = None + self.name = kwargs.get('name') + self.etag = kwargs.get('ETag') self.deleted = None - self.metadata = None - self.lease = None - self.last_modified = None - self.creation_time = None - self.size = None + self.metadata = kwargs.get('metadata') + self.lease = LeaseProperties(**kwargs) + self.last_modified = kwargs.get('Last-Modified') + self.creation_time = kwargs.get('x-ms-creation-time') + self.size = kwargs.get('Content-Length') self.deleted_time = None + self.expiry_time = kwargs.get("x-ms-expiry-time") self.remaining_retention_days = None - self.content_settings = None + self.content_settings = ContentSettings(**kwargs) @classmethod - def _from_blob_properties(cls, blob_properties): - file_props = FileProperties() - file_props.name = blob_properties.name - file_props.etag = blob_properties.etag - file_props.deleted = blob_properties.deleted - file_props.metadata = blob_properties.metadata - file_props.lease = blob_properties.lease - file_props.lease.__class__ = LeaseProperties - file_props.last_modified = blob_properties.last_modified - file_props.creation_time = blob_properties.creation_time - file_props.size = blob_properties.size - file_props.deleted_time = blob_properties.deleted_time - file_props.remaining_retention_days = blob_properties.remaining_retention_days - file_props.content_settings = blob_properties.content_settings - return file_props + def _deserialize_file_properties(cls, response, obj, headers): + metadata = deserialize_metadata(response, obj, headers) + file_properties = cls( + metadata=metadata, + **headers + ) + if 'Content-Range' in headers: + if 'x-ms-blob-content-md5' in headers: + file_properties.content_settings.content_md5 = headers['x-ms-blob-content-md5'] + else: + file_properties.content_settings.content_md5 = None + return file_properties class PathProperties(object): @@ -328,10 +314,10 @@ class LeaseProperties(BlobLeaseProperties): :ivar str duration: When a file is leased, specifies whether the lease is of infinite or fixed duration. """ - def __init__(self): - self.status = None - self.state = None - self.duration = None + def __init__(self, **kwargs): + super(LeaseProperties, self).__init__( + **kwargs + ) class ContentSettings(BlobContentSettings): diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py index 20ae77bd5f63..e79400fc4f1b 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py @@ -526,7 +526,6 @@ def _get_path_properties(self, **kwargs): :caption: Getting the properties for a file/directory. """ path_properties = self._blob_client.get_blob_properties(**kwargs) - path_properties.__class__ = DirectoryProperties return path_properties def set_metadata(self, metadata, # type: Dict[str, str] diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py index 8d4eb3ebbe83..b9313dc2de96 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py @@ -200,8 +200,7 @@ async def get_directory_properties(self, **kwargs): :dedent: 4 :caption: Getting the properties for a file/directory. """ - blob_properties = await self._get_path_properties(**kwargs) - return DirectoryProperties._from_blob_properties(blob_properties) # pylint: disable=protected-access + return await self._get_path_properties(cls=DirectoryProperties._deserialize_dir_properties, **kwargs) # pylint: disable=protected-access async def rename_directory(self, new_name, # type: str **kwargs): diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py index e74cc13442ad..c389805bd333 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py @@ -202,8 +202,22 @@ async def get_file_properties(self, **kwargs): :dedent: 4 :caption: Getting the properties for a file. """ - blob_properties = await self._get_path_properties(**kwargs) - return FileProperties._from_blob_properties(blob_properties) # pylint: disable=protected-access + return await self._get_path_properties(cls=FileProperties._deserialize_file_properties, **kwargs) # pylint: disable=protected-access + + async def set_file_expiry(self, expiry_options, expires_on=None, **kwargs): + # type: (**Any) -> None + """Sets the time a file will expire and be deleted. + + :param str expiry_options: + Required. Indicates mode of the expiry time. + Possible values include: 'NeverExpire', 'RelativeToCreation', 'RelativeToNow', 'Absolute' + :param datetime expires_on: + The time to set the file to expiry + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: None + """ + return await self._blob_client._client.blob.set_expiry(expiry_options, expires_on=expires_on, **kwargs) # pylint: disable=protected-access async def upload_data(self, data, # type: Union[AnyStr, Iterable[AnyStr], IO[AnyStr]] length=None, # type: Optional[int] diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py index 688f2d4c3ffd..935330690145 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py @@ -358,7 +358,6 @@ async def _get_path_properties(self, **kwargs): :rtype: DirectoryProperties or FileProperties """ path_properties = await self._blob_client.get_blob_properties(**kwargs) - path_properties.__class__ = DirectoryProperties return path_properties async def set_metadata(self, metadata, # type: Dict[str, str] diff --git a/sdk/storage/azure-storage-file-datalake/tests/recordings/test_file.test_set_expiry.yaml b/sdk/storage/azure-storage-file-datalake/tests/recordings/test_file.test_set_expiry.yaml new file mode 100644 index 000000000000..f21edffa71c6 --- /dev/null +++ b/sdk/storage/azure-storage-file-datalake/tests/recordings/test_file.test_set_expiry.yaml @@ -0,0 +1,208 @@ +interactions: +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + User-Agent: + - azsdk-python-storage-dfs/12.0.2 Python/3.7.3 (Windows-10-10.0.18362-SP0) + x-ms-client-request-id: + - 08a85f0a-9f22-11ea-b31d-001a7dda7113 + x-ms-date: + - Tue, 26 May 2020 07:25:11 GMT + x-ms-properties: + - '' + x-ms-version: + - '2019-12-12' + method: PUT + uri: https://storagename.dfs.core.windows.net/filesystem84ed0a59/directory84ed0a59?resource=directory + response: + body: + string: '' + headers: + Content-Length: + - '0' + Date: + - Tue, 26 May 2020 07:25:11 GMT + ETag: + - '"0x8D80145ED25E619"' + Last-Modified: + - Tue, 26 May 2020 07:25:11 GMT + Server: + - Windows-Azure-HDFS/1.0 Microsoft-HTTPAPI/2.0 + x-ms-request-id: + - 9388e921-901f-0066-392e-330280000000 + x-ms-version: + - '2019-12-12' + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + User-Agent: + - azsdk-python-storage-dfs/12.0.2 Python/3.7.3 (Windows-10-10.0.18362-SP0) + x-ms-client-request-id: + - 08fa93dc-9f22-11ea-bf47-001a7dda7113 + x-ms-content-disposition: + - inline + x-ms-content-language: + - spanish + x-ms-date: + - Tue, 26 May 2020 07:25:11 GMT + x-ms-properties: + - hello=d29ybGQ=,number=NDI= + x-ms-version: + - '2019-12-12' + method: PUT + uri: https://storagename.dfs.core.windows.net/filesystem84ed0a59/directory84ed0a59%2Fnewfile?resource=file + response: + body: + string: '' + headers: + Content-Length: + - '0' + Date: + - Tue, 26 May 2020 07:25:11 GMT + ETag: + - '"0x8D80145ED335B0A"' + Last-Modified: + - Tue, 26 May 2020 07:25:11 GMT + Server: + - Windows-Azure-HDFS/1.0 Microsoft-HTTPAPI/2.0 + x-ms-request-id: + - 9388e922-901f-0066-3a2e-330280000000 + x-ms-version: + - '2019-12-12' + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + User-Agent: + - azsdk-python-storage-dfs/12.0.2 Python/3.7.3 (Windows-10-10.0.18362-SP0) + x-ms-client-request-id: + - 09086e8c-9f22-11ea-bd32-001a7dda7113 + x-ms-date: + - Tue, 26 May 2020 07:25:11 GMT + x-ms-expiry-option: + - Absolute + x-ms-expiry-time: + - Tue, 26 May 2020 08:25:11 GMT + x-ms-version: + - '2019-12-12' + method: PUT + uri: https://storagename.blob.core.windows.net/filesystem84ed0a59/directory84ed0a59/newfile?comp=expiry + response: + body: + string: '' + headers: + Content-Length: + - '0' + Date: + - Tue, 26 May 2020 07:25:12 GMT + ETag: + - '"0x8D80145ED335B0A"' + Last-Modified: + - Tue, 26 May 2020 07:25:11 GMT + Server: + - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 + x-ms-request-id: + - 8528af15-701e-009a-2a2e-33d379000000 + x-ms-version: + - '2019-12-12' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - azsdk-python-storage-dfs/12.0.2 Python/3.7.3 (Windows-10-10.0.18362-SP0) + x-ms-client-request-id: + - 098639e8-9f22-11ea-8208-001a7dda7113 + x-ms-date: + - Tue, 26 May 2020 07:25:12 GMT + x-ms-version: + - '2019-12-12' + method: HEAD + uri: https://storagename.blob.core.windows.net/filesystem84ed0a59/directory84ed0a59/newfile + response: + body: + string: '' + headers: + Accept-Ranges: + - bytes + Content-Disposition: + - inline + Content-Language: + - spanish + Content-Length: + - '0' + Content-Type: + - application/octet-stream + Date: + - Tue, 26 May 2020 07:25:12 GMT + ETag: + - '"0x8D80145ED335B0A"' + Last-Modified: + - Tue, 26 May 2020 07:25:11 GMT + Server: + - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 + x-ms-access-tier: + - Hot + x-ms-access-tier-inferred: + - 'true' + x-ms-blob-type: + - BlockBlob + x-ms-creation-time: + - Tue, 26 May 2020 07:25:11 GMT + x-ms-expiry-time: + - Tue, 26 May 2020 08:25:11 GMT + x-ms-lease-state: + - available + x-ms-lease-status: + - unlocked + x-ms-meta-hello: + - world + x-ms-meta-number: + - '42' + x-ms-request-id: + - 8528af6d-701e-009a-792e-33d379000000 + x-ms-server-encrypted: + - 'true' + x-ms-version: + - '2019-12-12' + status: + code: 200 + message: OK +version: 1 diff --git a/sdk/storage/azure-storage-file-datalake/tests/recordings/test_file_async.test_set_expiry_async.yaml b/sdk/storage/azure-storage-file-datalake/tests/recordings/test_file_async.test_set_expiry_async.yaml new file mode 100644 index 000000000000..eb65df676e12 --- /dev/null +++ b/sdk/storage/azure-storage-file-datalake/tests/recordings/test_file_async.test_set_expiry_async.yaml @@ -0,0 +1,140 @@ +interactions: +- request: + body: null + headers: + User-Agent: + - azsdk-python-storage-dfs/12.0.2 Python/3.7.3 (Windows-10-10.0.18362-SP0) + x-ms-client-request-id: + - 107e1ee6-9f22-11ea-b27b-001a7dda7113 + x-ms-date: + - Tue, 26 May 2020 07:25:24 GMT + x-ms-properties: + - '' + x-ms-version: + - '2019-12-12' + method: PUT + uri: https://storagename.dfs.core.windows.net/filesystem217a0f53/directory217a0f53?resource=directory + response: + body: + string: '' + headers: + Content-Length: '0' + Date: Tue, 26 May 2020 07:25:23 GMT + Etag: '"0x8D80145F4DF209F"' + Last-Modified: Tue, 26 May 2020 07:25:24 GMT + Server: Windows-Azure-HDFS/1.0 Microsoft-HTTPAPI/2.0 + x-ms-request-id: c7f29fe2-401f-0028-752e-332c08000000 + x-ms-version: '2019-12-12' + status: + code: 201 + message: Created + url: https://emilyhnseuap.dfs.core.windows.net/filesystem217a0f53/directory217a0f53?resource=directory +- request: + body: null + headers: + User-Agent: + - azsdk-python-storage-dfs/12.0.2 Python/3.7.3 (Windows-10-10.0.18362-SP0) + x-ms-client-request-id: + - 10b47fba-9f22-11ea-993d-001a7dda7113 + x-ms-content-disposition: + - inline + x-ms-content-language: + - spanish + x-ms-date: + - Tue, 26 May 2020 07:25:24 GMT + x-ms-properties: + - hello=d29ybGQ=,number=NDI= + x-ms-version: + - '2019-12-12' + method: PUT + uri: https://storagename.dfs.core.windows.net/filesystem217a0f53/directory217a0f53%2Fnewfile?resource=file + response: + body: + string: '' + headers: + Content-Length: '0' + Date: Tue, 26 May 2020 07:25:24 GMT + Etag: '"0x8D80145F4ECE3A1"' + Last-Modified: Tue, 26 May 2020 07:25:24 GMT + Server: Windows-Azure-HDFS/1.0 Microsoft-HTTPAPI/2.0 + x-ms-request-id: c7f29fe3-401f-0028-762e-332c08000000 + x-ms-version: '2019-12-12' + status: + code: 201 + message: Created + url: https://emilyhnseuap.dfs.core.windows.net/filesystem217a0f53/directory217a0f53%2Fnewfile?resource=file +- request: + body: null + headers: + User-Agent: + - azsdk-python-storage-dfs/12.0.2 Python/3.7.3 (Windows-10-10.0.18362-SP0) + x-ms-client-request-id: + - 10c24164-9f22-11ea-8bbd-001a7dda7113 + x-ms-date: + - Tue, 26 May 2020 07:25:24 GMT + x-ms-expiry-option: + - Absolute + x-ms-expiry-time: + - Tue, 26 May 2020 08:25:24 GMT + x-ms-version: + - '2019-12-12' + method: PUT + uri: https://storagename.blob.core.windows.net/filesystem217a0f53/directory217a0f53/newfile?comp=expiry + response: + body: + string: '' + headers: + Content-Length: '0' + Date: Tue, 26 May 2020 07:25:25 GMT + Etag: '"0x8D80145F4ECE3A1"' + Last-Modified: Tue, 26 May 2020 07:25:24 GMT + Server: Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 + x-ms-request-id: 5cbafaac-901e-0059-6d2e-33ca23000000 + x-ms-version: '2019-12-12' + status: + code: 200 + message: OK + url: https://emilyhnseuap.blob.core.windows.net/filesystem217a0f53/directory217a0f53/newfile?comp=expiry +- request: + body: null + headers: + User-Agent: + - azsdk-python-storage-dfs/12.0.2 Python/3.7.3 (Windows-10-10.0.18362-SP0) + x-ms-client-request-id: + - 113b38ca-9f22-11ea-9baf-001a7dda7113 + x-ms-date: + - Tue, 26 May 2020 07:25:25 GMT + x-ms-version: + - '2019-12-12' + method: HEAD + uri: https://storagename.blob.core.windows.net/filesystem217a0f53/directory217a0f53/newfile + response: + body: + string: '' + headers: + Accept-Ranges: bytes + Content-Disposition: inline + Content-Language: spanish + Content-Length: '0' + Content-Type: application/octet-stream + Date: Tue, 26 May 2020 07:25:25 GMT + Etag: '"0x8D80145F4ECE3A1"' + Last-Modified: Tue, 26 May 2020 07:25:24 GMT + Server: Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 + x-ms-access-tier: Hot + x-ms-access-tier-inferred: 'true' + x-ms-blob-type: BlockBlob + x-ms-creation-time: Tue, 26 May 2020 07:25:24 GMT + x-ms-expiry-time: Tue, 26 May 2020 08:25:24 GMT + x-ms-lease-state: available + x-ms-lease-status: unlocked + x-ms-meta-hello: world + x-ms-meta-number: '42' + x-ms-request-id: 5cbafb24-901e-0059-502e-33ca23000000 + x-ms-server-encrypted: 'true' + x-ms-version: '2019-12-12' + status: + code: 200 + message: OK + url: https://emilyhnseuap.blob.core.windows.net/filesystem217a0f53/directory217a0f53/newfile +version: 1 diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_file.py b/sdk/storage/azure-storage-file-datalake/tests/test_file.py index 8b966724ce76..393e0d0565fd 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_file.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_file.py @@ -557,6 +557,24 @@ def test_get_properties(self): self.assertEqual(properties.metadata['hello'], metadata['hello']) self.assertEqual(properties.content_settings.content_language, content_settings.content_language) + @record + def test_set_expiry(self): + # Arrange + directory_client = self._create_directory_and_return_client() + + metadata = {'hello': 'world', 'number': '42'} + content_settings = ContentSettings( + content_language='spanish', + content_disposition='inline') + expires_on = datetime.utcnow() + timedelta(hours=1) + file_client = directory_client.create_file("newfile", metadata=metadata, content_settings=content_settings) + file_client.set_file_expiry("Absolute", expires_on=expires_on) + properties = file_client.get_file_properties() + + # Assert + self.assertTrue(properties) + self.assertIsNotNone(properties.expiry_time) + @record def test_rename_file_with_non_used_name(self): file_client = self._create_file_and_return_client() diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_file_async.py b/sdk/storage/azure-storage-file-datalake/tests/test_file_async.py index 8bd1ef819a14..759d996fadd2 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_file_async.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_file_async.py @@ -676,6 +676,28 @@ def test_get_properties_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_get_properties()) + async def _test_set_expiry(self): + # Arrange + directory_client = await self._create_directory_and_return_client() + + metadata = {'hello': 'world', 'number': '42'} + content_settings = ContentSettings( + content_language='spanish', + content_disposition='inline') + expires_on = datetime.utcnow() + timedelta(hours=1) + file_client = await directory_client.create_file("newfile", metadata=metadata, content_settings=content_settings) + await file_client.set_file_expiry("Absolute", expires_on=expires_on) + properties = await file_client.get_file_properties() + + # Assert + self.assertTrue(properties) + self.assertIsNotNone(properties.expiry_time) + + @record + def test_set_expiry_async(self): + loop = asyncio.get_event_loop() + loop.run_until_complete(self._test_set_expiry()) + async def _test_rename_file_with_non_used_name(self): file_client = await self._create_file_and_return_client() data_bytes = b"abc" From 9bc70d4c6947d8b465b65eed04396c54f039e498 Mon Sep 17 00:00:00 2001 From: xiafu Date: Wed, 27 May 2020 15:47:02 -0700 Subject: [PATCH 2/6] address comments --- .../storage/filedatalake/_data_lake_file_client.py | 11 +++++++---- .../filedatalake/aio/_data_lake_file_client_async.py | 11 +++++++---- 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py index b76c1634eb23..735fab1ad852 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py @@ -242,15 +242,18 @@ def get_file_properties(self, **kwargs): """ return self._get_path_properties(cls=FileProperties._deserialize_file_properties, **kwargs) # pylint: disable=protected-access - def set_file_expiry(self, expiry_options, expires_on=None, **kwargs): - # type: (**Any) -> None + def set_file_expiry(self, expiry_options, # type: str + expires_on=None, # type: Optional[Union[datetime, int]] + **kwargs): + # type: (str, Optional[Union[datetime, int]], **Any) -> None """Sets the time a file will expire and be deleted. :param str expiry_options: Required. Indicates mode of the expiry time. Possible values include: 'NeverExpire', 'RelativeToCreation', 'RelativeToNow', 'Absolute' - :param datetime expires_on: - The time to set the file to expiry + :param datetime or int expires_on: + The time to set the file to expiry. + When expiry_options is RelativeTo*, expires_on should be an int in milliseconds :keyword int timeout: The timeout parameter is expressed in seconds. :rtype: None diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py index c389805bd333..33d92ab5cf49 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py @@ -204,15 +204,18 @@ async def get_file_properties(self, **kwargs): """ return await self._get_path_properties(cls=FileProperties._deserialize_file_properties, **kwargs) # pylint: disable=protected-access - async def set_file_expiry(self, expiry_options, expires_on=None, **kwargs): - # type: (**Any) -> None + async def set_file_expiry(self, expiry_options, # type: str + expires_on=None, # type: Optional[Union[datetime, int]] + **kwargs): + # type: (str, Optional[Union[datetime, int]], **Any) -> None """Sets the time a file will expire and be deleted. :param str expiry_options: Required. Indicates mode of the expiry time. Possible values include: 'NeverExpire', 'RelativeToCreation', 'RelativeToNow', 'Absolute' - :param datetime expires_on: - The time to set the file to expiry + :param datetime or int expires_on: + The time to set the file to expiry. + When expiry_options is RelativeTo*, expires_on should be an int in milliseconds :keyword int timeout: The timeout parameter is expressed in seconds. :rtype: None From c6a8d7d56808a11e821c069cb2f8ee434fc45603 Mon Sep 17 00:00:00 2001 From: xiafu Date: Fri, 11 Sep 2020 01:10:42 -0700 Subject: [PATCH 3/6] use datalake set_expiry operation --- .../azure/storage/blob/_blob_client.py | 2 +- .../azure/storage/filedatalake/__init__.py | 3 +- .../_data_lake_directory_client.py | 3 +- .../filedatalake/_data_lake_file_client.py | 7 +- .../storage/filedatalake/_deserialize.py | 40 +++++++++ .../azure/storage/filedatalake/_download.py | 5 +- .../filedatalake/_file_system_client.py | 3 +- .../filedatalake/_list_paths_helper.py | 68 ++++++++++++++ .../azure/storage/filedatalake/_models.py | 90 ------------------- .../storage/filedatalake/_path_client.py | 3 + .../aio/_data_lake_directory_client_async.py | 3 +- .../aio/_data_lake_file_client_async.py | 4 +- .../filedatalake/aio/_download_async.py | 5 +- 13 files changed, 129 insertions(+), 107 deletions(-) create mode 100644 sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_list_paths_helper.py diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py index 394b24df90a7..6cbc05a1dbe9 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py @@ -577,7 +577,7 @@ def _download_blob_options(self, offset=None, length=None, **kwargs): 'lease_access_conditions': access_conditions, 'modified_access_conditions': mod_conditions, 'cpk_info': cpk_info, - 'cls': deserialize_blob_stream, + 'cls': kwargs.pop('cls', None) or deserialize_blob_stream, 'max_concurrency':kwargs.pop('max_concurrency', 1), 'encoding': kwargs.pop('encoding', None), 'timeout': kwargs.pop('timeout', None), diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/__init__.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/__init__.py index a86368c72c85..ca22597427ad 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/__init__.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/__init__.py @@ -18,7 +18,6 @@ DirectoryProperties, FileProperties, PathProperties, - PathPropertiesPaged, LeaseProperties, ContentSettings, AccountSasPermissions, @@ -32,6 +31,7 @@ DelimitedJsonDialect, DataLakeFileQueryError ) +from ._list_paths_helper import PathPropertiesPaged from ._shared_access_signature import generate_account_sas, generate_file_system_sas, generate_directory_sas, \ generate_file_sas @@ -60,7 +60,6 @@ 'DirectoryProperties', 'FileProperties', 'PathProperties', - 'PathPropertiesPaged', 'LeaseProperties', 'ContentSettings', 'AccountSasPermissions', diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py index 184579cee26b..beb73248dd06 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py @@ -3,6 +3,7 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +from ._deserialize import deserialize_dir_properties from ._shared.base_client import parse_connection_str from ._data_lake_file_client import DataLakeFileClient from ._models import DirectoryProperties @@ -230,7 +231,7 @@ def get_directory_properties(self, **kwargs): :dedent: 4 :caption: Getting the properties for a file/directory. """ - return self._get_path_properties(cls=DirectoryProperties._deserialize_dir_properties, **kwargs) # pylint: disable=protected-access + return self._get_path_properties(cls=deserialize_dir_properties, **kwargs) # pylint: disable=protected-access def rename_directory(self, new_name, # type: str **kwargs): diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py index 735fab1ad852..b349bbb7b34d 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py @@ -16,7 +16,7 @@ from ._download import StorageStreamDownloader from ._path_client import PathClient from ._serialize import get_mod_conditions, get_path_http_headers, get_access_conditions, add_metadata_headers -from ._deserialize import process_storage_error +from ._deserialize import process_storage_error, deserialize_file_properties from ._models import FileProperties, DataLakeFileQueryError @@ -240,7 +240,7 @@ def get_file_properties(self, **kwargs): :dedent: 4 :caption: Getting the properties for a file. """ - return self._get_path_properties(cls=FileProperties._deserialize_file_properties, **kwargs) # pylint: disable=protected-access + return self._get_path_properties(cls=deserialize_file_properties, **kwargs) # pylint: disable=protected-access def set_file_expiry(self, expiry_options, # type: str expires_on=None, # type: Optional[Union[datetime, int]] @@ -258,7 +258,8 @@ def set_file_expiry(self, expiry_options, # type: str The timeout parameter is expressed in seconds. :rtype: None """ - return self._blob_client._client.blob.set_expiry(expiry_options, expires_on=expires_on, **kwargs) # pylint: disable=protected-access + return self._datalake_client_for_blob_operation.path\ + .set_expiry(expiry_options, expires_on=expires_on, **kwargs) # pylint: disable=protected-access def _upload_options( # pylint:disable=too-many-statements self, data, # type: Union[Iterable[AnyStr], IO[AnyStr]] diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_deserialize.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_deserialize.py index 9d0881a7229e..f54a82bd0d67 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_deserialize.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_deserialize.py @@ -12,6 +12,7 @@ from azure.core.pipeline.policies import ContentDecodePolicy from azure.core.exceptions import HttpResponseError, DecodeError, ResourceModifiedError, ClientAuthenticationError, \ ResourceNotFoundError, ResourceExistsError +from ._models import FileProperties, DirectoryProperties, LeaseProperties from ._shared.models import StorageErrorCode if TYPE_CHECKING: @@ -20,6 +21,45 @@ _LOGGER = logging.getLogger(__name__) +def deserialize_dir_properties(response, obj, headers): + metadata = deserialize_metadata(response, obj, headers) + dir_properties = DirectoryProperties( + metadata=metadata, + **headers + ) + return dir_properties + + +def deserialize_file_properties(response, obj, headers): + metadata = deserialize_metadata(response, obj, headers) + file_properties = FileProperties( + metadata=metadata, + **headers + ) + if 'Content-Range' in headers: + if 'x-ms-blob-content-md5' in headers: + file_properties.content_settings.content_md5 = headers['x-ms-blob-content-md5'] + else: + file_properties.content_settings.content_md5 = None + return file_properties + + +def from_blob_properties(blob_properties): + file_props = FileProperties() + file_props.name = blob_properties.name + file_props.etag = blob_properties.etag + file_props.deleted = blob_properties.deleted + file_props.metadata = blob_properties.metadata + file_props.lease = blob_properties.lease + file_props.lease.__class__ = LeaseProperties + file_props.last_modified = blob_properties.last_modified + file_props.creation_time = blob_properties.creation_time + file_props.size = blob_properties.size + file_props.deleted_time = blob_properties.deleted_time + file_props.remaining_retention_days = blob_properties.remaining_retention_days + file_props.content_settings = blob_properties.content_settings + return file_props + def normalize_headers(headers): normalized = {} for key, value in headers.items(): diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_download.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_download.py index 181b503d8c4a..e4efd8c23dba 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_download.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_download.py @@ -3,8 +3,7 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- - -from ._models import FileProperties +from ._deserialize import from_blob_properties class StorageStreamDownloader(object): @@ -23,7 +22,7 @@ class StorageStreamDownloader(object): def __init__(self, downloader): self._downloader = downloader self.name = self._downloader.name - self.properties = FileProperties._from_blob_properties(self._downloader.properties) # pylint: disable=protected-access + self.properties = from_blob_properties(self._downloader.properties) # pylint: disable=protected-access self.size = self._downloader.size def __len__(self): diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py index c29ae03ab2b0..5a8221b99dd5 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py @@ -16,7 +16,8 @@ from azure.storage.blob import ContainerClient from ._shared.base_client import StorageAccountHostsMixin, parse_query, parse_connection_str from ._serialize import convert_dfs_url_to_blob_url -from ._models import LocationMode, FileSystemProperties, PathPropertiesPaged, PublicAccess +from ._models import LocationMode, FileSystemProperties, PublicAccess +from ._list_paths_helper import PathPropertiesPaged from ._data_lake_file_client import DataLakeFileClient from ._data_lake_directory_client import DataLakeDirectoryClient from ._data_lake_lease import DataLakeLeaseClient diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_list_paths_helper.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_list_paths_helper.py new file mode 100644 index 000000000000..d29477b29c6c --- /dev/null +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_list_paths_helper.py @@ -0,0 +1,68 @@ +from azure.core.paging import PageIterator +from ._generated.models import StorageErrorException +from ._models import PathProperties +from ._deserialize import return_headers_and_deserialized_path_list +from ._generated.models import Path +from ._shared.response_handlers import process_storage_error + + +class PathPropertiesPaged(PageIterator): + """An Iterable of Path properties. + + :ivar str path: Filters the results to return only paths under the specified path. + :ivar int results_per_page: The maximum number of results retrieved per API call. + :ivar str continuation_token: The continuation token to retrieve the next page of results. + :ivar list(~azure.storage.filedatalake.PathProperties) current_page: The current page of listed results. + + :param callable command: Function to retrieve the next page of items. + :param str path: Filters the results to return only paths under the specified path. + :param int max_results: The maximum number of psths to retrieve per + call. + :param str continuation_token: An opaque continuation token. + """ + def __init__( + self, command, + recursive, + path=None, + max_results=None, + continuation_token=None, + upn=None): + super(PathPropertiesPaged, self).__init__( + get_next=self._get_next_cb, + extract_data=self._extract_data_cb, + continuation_token=continuation_token or "" + ) + self._command = command + self.recursive = recursive + self.results_per_page = max_results + self.path = path + self.upn = upn + self.current_page = None + self.path_list = None + + def _get_next_cb(self, continuation_token): + try: + return self._command( + self.recursive, + continuation=continuation_token or None, + path=self.path, + max_results=self.results_per_page, + upn=self.upn, + cls=return_headers_and_deserialized_path_list) + except StorageErrorException as error: + process_storage_error(error) + + def _extract_data_cb(self, get_next_return): + self.path_list, self._response = get_next_return + self.current_page = [self._build_item(item) for item in self.path_list] + + return self._response['continuation'] or None, self.current_page + + @staticmethod + def _build_item(item): + if isinstance(item, PathProperties): + return item + if isinstance(item, Path): + path = PathProperties._from_generated(item) # pylint: disable=protected-access + return path + return item \ No newline at end of file diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_models.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_models.py index 1b32d27002bf..6dc9200f51e6 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_models.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_models.py @@ -7,7 +7,6 @@ # pylint: disable=super-init-not-called, too-many-lines from enum import Enum -from azure.core.paging import PageIterator from azure.storage.blob import LeaseProperties as BlobLeaseProperties from azure.storage.blob import AccountSasPermissions as BlobAccountSasPermissions from azure.storage.blob import ResourceTypes as BlobResourceTypes @@ -17,12 +16,8 @@ from azure.storage.blob import AccessPolicy as BlobAccessPolicy from azure.storage.blob import DelimitedTextDialect as BlobDelimitedTextDialect from azure.storage.blob import DelimitedJsonDialect as BlobDelimitedJSON -from azure.storage.blob._generated.models import StorageErrorException from azure.storage.blob._models import ContainerPropertiesPaged -from ._deserialize import return_headers_and_deserialized_path_list, deserialize_metadata -from ._generated.models import Path from ._shared.models import DictMixin -from ._shared.response_handlers import process_storage_error class FileSystemProperties(object): @@ -141,15 +136,6 @@ def __init__(self, **kwargs): self.deleted_time = None self.remaining_retention_days = None - @classmethod - def _deserialize_dir_properties(cls, response, obj, headers): - metadata = deserialize_metadata(response, obj, headers) - dir_properties = cls( - metadata=metadata, - **headers - ) - return dir_properties - class FileProperties(DictMixin): """ @@ -183,20 +169,6 @@ def __init__(self, **kwargs): self.remaining_retention_days = None self.content_settings = ContentSettings(**kwargs) - @classmethod - def _deserialize_file_properties(cls, response, obj, headers): - metadata = deserialize_metadata(response, obj, headers) - file_properties = cls( - metadata=metadata, - **headers - ) - if 'Content-Range' in headers: - if 'x-ms-blob-content-md5' in headers: - file_properties.content_settings.content_md5 = headers['x-ms-blob-content-md5'] - else: - file_properties.content_settings.content_md5 = None - return file_properties - class PathProperties(object): """Path properties listed by get_paths api. @@ -242,68 +214,6 @@ def _from_generated(cls, generated): return path_prop -class PathPropertiesPaged(PageIterator): - """An Iterable of Path properties. - - :ivar str path: Filters the results to return only paths under the specified path. - :ivar int results_per_page: The maximum number of results retrieved per API call. - :ivar str continuation_token: The continuation token to retrieve the next page of results. - :ivar list(~azure.storage.filedatalake.PathProperties) current_page: The current page of listed results. - - :param callable command: Function to retrieve the next page of items. - :param str path: Filters the results to return only paths under the specified path. - :param int max_results: The maximum number of psths to retrieve per - call. - :param str continuation_token: An opaque continuation token. - """ - def __init__( - self, command, - recursive, - path=None, - max_results=None, - continuation_token=None, - upn=None): - super(PathPropertiesPaged, self).__init__( - get_next=self._get_next_cb, - extract_data=self._extract_data_cb, - continuation_token=continuation_token or "" - ) - self._command = command - self.recursive = recursive - self.results_per_page = max_results - self.path = path - self.upn = upn - self.current_page = None - self.path_list = None - - def _get_next_cb(self, continuation_token): - try: - return self._command( - self.recursive, - continuation=continuation_token or None, - path=self.path, - max_results=self.results_per_page, - upn=self.upn, - cls=return_headers_and_deserialized_path_list) - except StorageErrorException as error: - process_storage_error(error) - - def _extract_data_cb(self, get_next_return): - self.path_list, self._response = get_next_return - self.current_page = [self._build_item(item) for item in self.path_list] - - return self._response['continuation'] or None, self.current_page - - @staticmethod - def _build_item(item): - if isinstance(item, PathProperties): - return item - if isinstance(item, Path): - path = PathProperties._from_generated(item) # pylint: disable=protected-access - return path - return item - - class LeaseProperties(BlobLeaseProperties): """DataLake Lease Properties. diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py index e79400fc4f1b..50a975fbeaa2 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py @@ -79,6 +79,9 @@ def __init__( # ADLS doesn't support secondary endpoint, make sure it's empty self._hosts[LocationMode.SECONDARY] = "" self._client = DataLakeStorageClient(self.url, file_system_name, path_name, pipeline=self._pipeline) + self._datalake_client_for_blob_operation = DataLakeStorageClient(self._blob_client.url, + file_system_name, path_name, + pipeline=self._pipeline) def __exit__(self, *args): self._blob_client.close() diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py index b9313dc2de96..3c4a8546f3a9 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py @@ -8,6 +8,7 @@ from ._data_lake_file_client_async import DataLakeFileClient from .._data_lake_directory_client import DataLakeDirectoryClient as DataLakeDirectoryClientBase from .._models import DirectoryProperties +from .._deserialize import deserialize_dir_properties from ._path_client_async import PathClient @@ -200,7 +201,7 @@ async def get_directory_properties(self, **kwargs): :dedent: 4 :caption: Getting the properties for a file/directory. """ - return await self._get_path_properties(cls=DirectoryProperties._deserialize_dir_properties, **kwargs) # pylint: disable=protected-access + return await self._get_path_properties(cls=deserialize_dir_properties, **kwargs) # pylint: disable=protected-access async def rename_directory(self, new_name, # type: str **kwargs): diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py index 33d92ab5cf49..7f6125d49f86 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py @@ -8,7 +8,7 @@ from ._download_async import StorageStreamDownloader from ._path_client_async import PathClient from .._data_lake_file_client import DataLakeFileClient as DataLakeFileClientBase -from .._deserialize import process_storage_error +from .._deserialize import process_storage_error, deserialize_file_properties from .._generated.models import StorageErrorException from .._models import FileProperties from ..aio._upload_helper import upload_datalake_file @@ -202,7 +202,7 @@ async def get_file_properties(self, **kwargs): :dedent: 4 :caption: Getting the properties for a file. """ - return await self._get_path_properties(cls=FileProperties._deserialize_file_properties, **kwargs) # pylint: disable=protected-access + return await self._get_path_properties(cls=deserialize_file_properties, **kwargs) # pylint: disable=protected-access async def set_file_expiry(self, expiry_options, # type: str expires_on=None, # type: Optional[Union[datetime, int]] diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_download_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_download_async.py index 2fda96f2b6fd..ea27438b19da 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_download_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_download_async.py @@ -3,8 +3,7 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- - -from .._models import FileProperties +from .._deserialize import from_blob_properties class StorageStreamDownloader(object): @@ -23,7 +22,7 @@ class StorageStreamDownloader(object): def __init__(self, downloader): self._downloader = downloader self.name = self._downloader.name - self.properties = FileProperties._from_blob_properties(self._downloader.properties) # pylint: disable=protected-access + self.properties = from_blob_properties(self._downloader.properties) # pylint: disable=protected-access self.size = self._downloader.size def __len__(self): From 99d2007479a80a513475f842ec2dc3bba4d764d4 Mon Sep 17 00:00:00 2001 From: xiafu Date: Wed, 23 Sep 2020 16:40:12 -0700 Subject: [PATCH 4/6] add serialize rfc1123 and fix pylint --- .../azure/storage/filedatalake/__init__.py | 2 +- .../storage/filedatalake/_data_lake_file_client.py | 10 ++++++++-- .../azure/storage/filedatalake/_list_paths_helper.py | 7 ++++++- .../azure/storage/filedatalake/_serialize.py | 8 ++++++++ .../filedatalake/aio/_data_lake_file_client_async.py | 5 +++++ 5 files changed, 28 insertions(+), 4 deletions(-) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/__init__.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/__init__.py index 8299c39f2c4b..c2650dcd855d 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/__init__.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/__init__.py @@ -35,7 +35,7 @@ AccessControlChangeFailure, AccessControlChanges, ) -from ._list_paths_helper import PathPropertiesPaged + from ._shared_access_signature import generate_account_sas, generate_file_system_sas, generate_directory_sas, \ generate_file_sas diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py index f33620491e08..3ca5c1916f4e 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py @@ -21,7 +21,8 @@ from ._generated.models import StorageErrorException from ._download import StorageStreamDownloader from ._path_client import PathClient -from ._serialize import get_mod_conditions, get_path_http_headers, get_access_conditions, add_metadata_headers +from ._serialize import get_mod_conditions, get_path_http_headers, get_access_conditions, add_metadata_headers, \ + convert_datetime_to_rfc1123 from ._deserialize import process_storage_error, deserialize_file_properties from ._models import FileProperties, DataLakeFileQueryError @@ -259,11 +260,16 @@ def set_file_expiry(self, expiry_options, # type: str Possible values include: 'NeverExpire', 'RelativeToCreation', 'RelativeToNow', 'Absolute' :param datetime or int expires_on: The time to set the file to expiry. - When expiry_options is RelativeTo*, expires_on should be an int in milliseconds + When expiry_options is RelativeTo*, expires_on should be an int in milliseconds. + If the type of expires_on is datetime, it should be in UTC time. :keyword int timeout: The timeout parameter is expressed in seconds. :rtype: None """ + try: + expires_on = convert_datetime_to_rfc1123(expires_on) + except AttributeError: + expires_on = str(expires_on) return self._datalake_client_for_blob_operation.path\ .set_expiry(expiry_options, expires_on=expires_on, **kwargs) # pylint: disable=protected-access diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_list_paths_helper.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_list_paths_helper.py index d29477b29c6c..1e4b19e2767a 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_list_paths_helper.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_list_paths_helper.py @@ -1,3 +1,8 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- from azure.core.paging import PageIterator from ._generated.models import StorageErrorException from ._models import PathProperties @@ -65,4 +70,4 @@ def _build_item(item): if isinstance(item, Path): path = PathProperties._from_generated(item) # pylint: disable=protected-access return path - return item \ No newline at end of file + return item diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py index a75979f07799..9d700bfb029f 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py @@ -13,6 +13,14 @@ def convert_dfs_url_to_blob_url(dfs_account_url): return dfs_account_url.replace('.dfs.', '.blob.', 1) +def convert_datetime_to_rfc1123(date): + weekday = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"][date.weekday()] + month = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", + "Oct", "Nov", "Dec"][date.month - 1] + return "%s, %02d %s %04d %02d:%02d:%02d GMT" % (weekday, date.day, month, + date.year, date.hour, date.minute, date.second) + + def add_metadata_headers(metadata=None): # type: (Optional[Dict[str, str]]) -> str headers = list() diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py index e4edad02a474..abb28ee4ca4e 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py @@ -13,6 +13,7 @@ from ._download_async import StorageStreamDownloader from ._path_client_async import PathClient from .._data_lake_file_client import DataLakeFileClient as DataLakeFileClientBase +from .._serialize import convert_datetime_to_rfc1123 from .._deserialize import process_storage_error, deserialize_file_properties from .._generated.models import StorageErrorException from .._models import FileProperties @@ -225,6 +226,10 @@ async def set_file_expiry(self, expiry_options, # type: str The timeout parameter is expressed in seconds. :rtype: None """ + try: + expires_on = convert_datetime_to_rfc1123(expires_on) + except AttributeError: + expires_on = str(expires_on) return await self._blob_client._client.blob.set_expiry(expiry_options, expires_on=expires_on, **kwargs) # pylint: disable=protected-access async def upload_data(self, data, # type: Union[AnyStr, Iterable[AnyStr], IO[AnyStr]] From ae9cab548193cc7215eb7b9a429dbc09bc3e7f58 Mon Sep 17 00:00:00 2001 From: xiafu Date: Wed, 23 Sep 2020 18:45:28 -0700 Subject: [PATCH 5/6] fix pylint --- .../storage/filedatalake/aio/_data_lake_file_client_async.py | 3 ++- .../azure/storage/filedatalake/aio/_path_client_async.py | 3 +++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py index abb28ee4ca4e..a8caf16eac72 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py @@ -230,7 +230,8 @@ async def set_file_expiry(self, expiry_options, # type: str expires_on = convert_datetime_to_rfc1123(expires_on) except AttributeError: expires_on = str(expires_on) - return await self._blob_client._client.blob.set_expiry(expiry_options, expires_on=expires_on, **kwargs) # pylint: disable=protected-access + return await self._datalake_client_for_blob_operation.path.set_expiry(expiry_options, expires_on=expires_on, + **kwargs) # pylint: disable=protected-access async def upload_data(self, data, # type: Union[AnyStr, Iterable[AnyStr], IO[AnyStr]] length=None, # type: Optional[int] diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py index 65a0cd63c412..215207bba63f 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py @@ -45,6 +45,9 @@ def __init__( **kwargs) self._client = DataLakeStorageClient(self.url, file_system_name, path_name, pipeline=self._pipeline) + self._datalake_client_for_blob_operation = DataLakeStorageClient(self._blob_client.url, + file_system_name, path_name, + pipeline=self._pipeline) self._loop = kwargs.get('loop', None) async def __aexit__(self, *args): From beee98aa589fee7acdce26ef80f87497c9194d8f Mon Sep 17 00:00:00 2001 From: xiafu Date: Tue, 29 Sep 2020 11:01:35 -0700 Subject: [PATCH 6/6] remove return type --- .../azure/storage/filedatalake/_data_lake_file_client.py | 4 ++-- .../storage/filedatalake/aio/_data_lake_file_client_async.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py index 3ca5c1916f4e..b4a9b5481f5f 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py @@ -270,8 +270,8 @@ def set_file_expiry(self, expiry_options, # type: str expires_on = convert_datetime_to_rfc1123(expires_on) except AttributeError: expires_on = str(expires_on) - return self._datalake_client_for_blob_operation.path\ - .set_expiry(expiry_options, expires_on=expires_on, **kwargs) # pylint: disable=protected-access + self._datalake_client_for_blob_operation.path \ + .set_expiry(expiry_options, expires_on=expires_on, **kwargs) # pylint: disable=protected-access def _upload_options( # pylint:disable=too-many-statements self, data, # type: Union[Iterable[AnyStr], IO[AnyStr]] diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py index a8caf16eac72..d075575dc8c6 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py @@ -230,8 +230,8 @@ async def set_file_expiry(self, expiry_options, # type: str expires_on = convert_datetime_to_rfc1123(expires_on) except AttributeError: expires_on = str(expires_on) - return await self._datalake_client_for_blob_operation.path.set_expiry(expiry_options, expires_on=expires_on, - **kwargs) # pylint: disable=protected-access + await self._datalake_client_for_blob_operation.path.set_expiry(expiry_options, expires_on=expires_on, + **kwargs) # pylint: disable=protected-access async def upload_data(self, data, # type: Union[AnyStr, Iterable[AnyStr], IO[AnyStr]] length=None, # type: Optional[int]