From 268ef0ce499b76355a7b938f808575505bbdf0a7 Mon Sep 17 00:00:00 2001 From: Changlong Liu Date: Fri, 18 Sep 2020 14:16:47 +0800 Subject: [PATCH 1/2] generate --- .../azure-mgmt-streamanalytics/CHANGELOG.md | 5 + .../azure-mgmt-streamanalytics/MANIFEST.in | 5 + .../azure-mgmt-streamanalytics/README.md | 21 + .../azure/mgmt/streamanalytics/__init__.py | 19 + .../mgmt/streamanalytics/_configuration.py | 70 + .../azure/mgmt/streamanalytics/_metadata.json | 68 + .../_stream_analytics_management_client.py | 110 + .../azure/mgmt/streamanalytics/_version.py | 9 + .../mgmt/streamanalytics/aio/__init__.py | 10 + .../streamanalytics/aio/_configuration.py | 66 + .../_stream_analytics_management_client.py | 104 + .../aio/operations/__init__.py | 29 + .../aio/operations/_clusters_operations.py | 692 +++ .../aio/operations/_functions_operations.py | 630 ++ .../aio/operations/_inputs_operations.py | 553 ++ .../aio/operations/_operations.py | 104 + .../aio/operations/_outputs_operations.py | 555 ++ .../_private_endpoints_operations.py | 380 ++ .../operations/_streaming_jobs_operations.py | 805 +++ .../operations/_subscriptions_operations.py | 97 + .../operations/_transformations_operations.py | 283 + .../mgmt/streamanalytics/models/__init__.py | 383 ++ .../mgmt/streamanalytics/models/_models.py | 4712 +++++++++++++++ .../streamanalytics/models/_models_py3.py | 5166 +++++++++++++++++ ...tream_analytics_management_client_enums.py | 148 + .../streamanalytics/operations/__init__.py | 29 + .../operations/_clusters_operations.py | 706 +++ .../operations/_functions_operations.py | 642 ++ .../operations/_inputs_operations.py | 564 ++ .../streamanalytics/operations/_operations.py | 109 + .../operations/_outputs_operations.py | 566 ++ .../_private_endpoints_operations.py | 389 ++ .../operations/_streaming_jobs_operations.py | 821 +++ .../operations/_subscriptions_operations.py | 102 + .../operations/_transformations_operations.py | 290 + .../azure/mgmt/streamanalytics/py.typed | 1 + .../dev_requirements.txt | 1 + .../sdk_packaging.toml | 9 + .../azure-mgmt-streamanalytics/setup.cfg | 2 + .../azure-mgmt-streamanalytics/setup.py | 90 + 40 files changed, 19345 insertions(+) create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/CHANGELOG.md create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/MANIFEST.in create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/README.md create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/__init__.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_configuration.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_metadata.json create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_stream_analytics_management_client.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_version.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/__init__.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_configuration.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_stream_analytics_management_client.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/__init__.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_clusters_operations.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_functions_operations.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_inputs_operations.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_operations.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_outputs_operations.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_private_endpoints_operations.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_streaming_jobs_operations.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_subscriptions_operations.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_transformations_operations.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_stream_analytics_management_client_enums.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/__init__.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_clusters_operations.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_functions_operations.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_inputs_operations.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_operations.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_outputs_operations.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_private_endpoints_operations.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_streaming_jobs_operations.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_subscriptions_operations.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_transformations_operations.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/py.typed create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/dev_requirements.txt create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/sdk_packaging.toml create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/setup.cfg create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/setup.py diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/CHANGELOG.md b/sdk/streamanalytics/azure-mgmt-streamanalytics/CHANGELOG.md new file mode 100644 index 000000000000..15d67b7d6167 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/CHANGELOG.md @@ -0,0 +1,5 @@ +# Release History + +## 1.0.0rc1 (2020-09-18) + + - Initial Release diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/MANIFEST.in b/sdk/streamanalytics/azure-mgmt-streamanalytics/MANIFEST.in new file mode 100644 index 000000000000..a3cb07df8765 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/MANIFEST.in @@ -0,0 +1,5 @@ +recursive-include tests *.py *.yaml +include *.md +include azure/__init__.py +include azure/mgmt/__init__.py + diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/README.md b/sdk/streamanalytics/azure-mgmt-streamanalytics/README.md new file mode 100644 index 000000000000..af30d19931aa --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/README.md @@ -0,0 +1,21 @@ +# Microsoft Azure SDK for Python + +This is the Microsoft Azure Stream Analytics Management Client Library. +This package has been tested with Python 2.7, 3.5, 3.6, 3.7 and 3.8. +For a more complete view of Azure libraries, see the [azure sdk python release](https://aka.ms/azsdk/python/all). + + +# Usage + +For code examples, see [Stream Analytics Management](https://docs.microsoft.com/python/api/overview/azure/?view=azure-python-preview) +on docs.microsoft.com. + + +# Provide Feedback + +If you encounter any bugs or have suggestions, please file an issue in the +[Issues](https://github.com/Azure/azure-sdk-for-python/issues) +section of the project. + + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-python%2Fazure-mgmt-streamanalytics%2FREADME.png) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/__init__.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/__init__.py new file mode 100644 index 000000000000..83390f96aa83 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/__init__.py @@ -0,0 +1,19 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._stream_analytics_management_client import StreamAnalyticsManagementClient +from ._version import VERSION + +__version__ = VERSION +__all__ = ['StreamAnalyticsManagementClient'] + +try: + from ._patch import patch_sdk # type: ignore + patch_sdk() +except ImportError: + pass diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_configuration.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_configuration.py new file mode 100644 index 000000000000..1034fe20616d --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_configuration.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMHttpLoggingPolicy + +from ._version import VERSION + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any + + from azure.core.credentials import TokenCredential + + +class StreamAnalyticsManagementClientConfiguration(Configuration): + """Configuration for StreamAnalyticsManagementClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: The ID of the target subscription. + :type subscription_id: str + """ + + def __init__( + self, + credential, # type: "TokenCredential" + subscription_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + super(StreamAnalyticsManagementClientConfiguration, self).__init__(**kwargs) + + self.credential = credential + self.subscription_id = subscription_id + self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) + kwargs.setdefault('sdk_moniker', 'mgmt-streamanalytics/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs # type: Any + ): + # type: (...) -> None + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_metadata.json b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_metadata.json new file mode 100644 index 000000000000..5ff7c85f8f06 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_metadata.json @@ -0,0 +1,68 @@ +{ + "chosen_version": "", + "total_api_version_list": ["2017-04-01-preview", "2020-03-01-preview"], + "client": { + "name": "StreamAnalyticsManagementClient", + "filename": "_stream_analytics_management_client", + "description": "Stream Analytics Client.", + "base_url": "\u0027https://management.azure.com\u0027", + "custom_base_url": null, + "azure_arm": true, + "has_lro_operations": true + }, + "global_parameters": { + "sync_method": { + "credential": { + "method_signature": "credential, # type: \"TokenCredential\"", + "description": "Credential needed for the client to connect to Azure.", + "docstring_type": "~azure.core.credentials.TokenCredential", + "required": true + }, + "subscription_id": { + "method_signature": "subscription_id, # type: str", + "description": "The ID of the target subscription.", + "docstring_type": "str", + "required": true + } + }, + "async_method": { + "credential": { + "method_signature": "credential, # type: \"AsyncTokenCredential\"", + "description": "Credential needed for the client to connect to Azure.", + "docstring_type": "~azure.core.credentials_async.AsyncTokenCredential", + "required": true + }, + "subscription_id": { + "method_signature": "subscription_id, # type: str", + "description": "The ID of the target subscription.", + "docstring_type": "str", + "required": true + } + }, + "constant": { + }, + "call": "credential, subscription_id" + }, + "config": { + "credential": true, + "credential_scopes": ["https://management.azure.com/.default"], + "credential_default_policy_type": "BearerTokenCredentialPolicy", + "credential_default_policy_type_has_async_version": true, + "credential_key_header_name": null + }, + "operation_groups": { + "functions": "FunctionsOperations", + "inputs": "InputsOperations", + "outputs": "OutputsOperations", + "streaming_jobs": "StreamingJobsOperations", + "subscriptions": "SubscriptionsOperations", + "transformations": "TransformationsOperations", + "operations": "Operations", + "clusters": "ClustersOperations", + "private_endpoints": "PrivateEndpointsOperations" + }, + "operation_mixins": { + }, + "sync_imports": "None", + "async_imports": "None" +} \ No newline at end of file diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_stream_analytics_management_client.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_stream_analytics_management_client.py new file mode 100644 index 000000000000..7322c3fb1613 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_stream_analytics_management_client.py @@ -0,0 +1,110 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import TYPE_CHECKING + +from azure.mgmt.core import ARMPipelineClient +from msrest import Deserializer, Serializer + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Optional + + from azure.core.credentials import TokenCredential + +from ._configuration import StreamAnalyticsManagementClientConfiguration +from .operations import FunctionsOperations +from .operations import InputsOperations +from .operations import OutputsOperations +from .operations import StreamingJobsOperations +from .operations import SubscriptionsOperations +from .operations import TransformationsOperations +from .operations import Operations +from .operations import ClustersOperations +from .operations import PrivateEndpointsOperations +from . import models + + +class StreamAnalyticsManagementClient(object): + """Stream Analytics Client. + + :ivar functions: FunctionsOperations operations + :vartype functions: stream_analytics_management_client.operations.FunctionsOperations + :ivar inputs: InputsOperations operations + :vartype inputs: stream_analytics_management_client.operations.InputsOperations + :ivar outputs: OutputsOperations operations + :vartype outputs: stream_analytics_management_client.operations.OutputsOperations + :ivar streaming_jobs: StreamingJobsOperations operations + :vartype streaming_jobs: stream_analytics_management_client.operations.StreamingJobsOperations + :ivar subscriptions: SubscriptionsOperations operations + :vartype subscriptions: stream_analytics_management_client.operations.SubscriptionsOperations + :ivar transformations: TransformationsOperations operations + :vartype transformations: stream_analytics_management_client.operations.TransformationsOperations + :ivar operations: Operations operations + :vartype operations: stream_analytics_management_client.operations.Operations + :ivar clusters: ClustersOperations operations + :vartype clusters: stream_analytics_management_client.operations.ClustersOperations + :ivar private_endpoints: PrivateEndpointsOperations operations + :vartype private_endpoints: stream_analytics_management_client.operations.PrivateEndpointsOperations + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: The ID of the target subscription. + :type subscription_id: str + :param str base_url: Service URL + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + """ + + def __init__( + self, + credential, # type: "TokenCredential" + subscription_id, # type: str + base_url=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + if not base_url: + base_url = 'https://management.azure.com' + self._config = StreamAnalyticsManagementClientConfiguration(credential, subscription_id, **kwargs) + self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False + self._deserialize = Deserializer(client_models) + + self.functions = FunctionsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.inputs = InputsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.outputs = OutputsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.streaming_jobs = StreamingJobsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.subscriptions = SubscriptionsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.transformations = TransformationsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.operations = Operations( + self._client, self._config, self._serialize, self._deserialize) + self.clusters = ClustersOperations( + self._client, self._config, self._serialize, self._deserialize) + self.private_endpoints = PrivateEndpointsOperations( + self._client, self._config, self._serialize, self._deserialize) + + def close(self): + # type: () -> None + self._client.close() + + def __enter__(self): + # type: () -> StreamAnalyticsManagementClient + self._client.__enter__() + return self + + def __exit__(self, *exc_details): + # type: (Any) -> None + self._client.__exit__(*exc_details) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_version.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_version.py new file mode 100644 index 000000000000..3b909b5c8886 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "1.0.0rc1" diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/__init__.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/__init__.py new file mode 100644 index 000000000000..372ab5c830f2 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/__init__.py @@ -0,0 +1,10 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._stream_analytics_management_client import StreamAnalyticsManagementClient +__all__ = ['StreamAnalyticsManagementClient'] diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_configuration.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_configuration.py new file mode 100644 index 000000000000..444e27397d52 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_configuration.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMHttpLoggingPolicy + +from .._version import VERSION + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + + +class StreamAnalyticsManagementClientConfiguration(Configuration): + """Configuration for StreamAnalyticsManagementClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: The ID of the target subscription. + :type subscription_id: str + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + **kwargs: Any + ) -> None: + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + super(StreamAnalyticsManagementClientConfiguration, self).__init__(**kwargs) + + self.credential = credential + self.subscription_id = subscription_id + self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) + kwargs.setdefault('sdk_moniker', 'mgmt-streamanalytics/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs: Any + ) -> None: + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_stream_analytics_management_client.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_stream_analytics_management_client.py new file mode 100644 index 000000000000..99472b9429a0 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_stream_analytics_management_client.py @@ -0,0 +1,104 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, Optional, TYPE_CHECKING + +from azure.mgmt.core import AsyncARMPipelineClient +from msrest import Deserializer, Serializer + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + +from ._configuration import StreamAnalyticsManagementClientConfiguration +from .operations import FunctionsOperations +from .operations import InputsOperations +from .operations import OutputsOperations +from .operations import StreamingJobsOperations +from .operations import SubscriptionsOperations +from .operations import TransformationsOperations +from .operations import Operations +from .operations import ClustersOperations +from .operations import PrivateEndpointsOperations +from .. import models + + +class StreamAnalyticsManagementClient(object): + """Stream Analytics Client. + + :ivar functions: FunctionsOperations operations + :vartype functions: stream_analytics_management_client.aio.operations.FunctionsOperations + :ivar inputs: InputsOperations operations + :vartype inputs: stream_analytics_management_client.aio.operations.InputsOperations + :ivar outputs: OutputsOperations operations + :vartype outputs: stream_analytics_management_client.aio.operations.OutputsOperations + :ivar streaming_jobs: StreamingJobsOperations operations + :vartype streaming_jobs: stream_analytics_management_client.aio.operations.StreamingJobsOperations + :ivar subscriptions: SubscriptionsOperations operations + :vartype subscriptions: stream_analytics_management_client.aio.operations.SubscriptionsOperations + :ivar transformations: TransformationsOperations operations + :vartype transformations: stream_analytics_management_client.aio.operations.TransformationsOperations + :ivar operations: Operations operations + :vartype operations: stream_analytics_management_client.aio.operations.Operations + :ivar clusters: ClustersOperations operations + :vartype clusters: stream_analytics_management_client.aio.operations.ClustersOperations + :ivar private_endpoints: PrivateEndpointsOperations operations + :vartype private_endpoints: stream_analytics_management_client.aio.operations.PrivateEndpointsOperations + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: The ID of the target subscription. + :type subscription_id: str + :param str base_url: Service URL + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + base_url: Optional[str] = None, + **kwargs: Any + ) -> None: + if not base_url: + base_url = 'https://management.azure.com' + self._config = StreamAnalyticsManagementClientConfiguration(credential, subscription_id, **kwargs) + self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False + self._deserialize = Deserializer(client_models) + + self.functions = FunctionsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.inputs = InputsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.outputs = OutputsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.streaming_jobs = StreamingJobsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.subscriptions = SubscriptionsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.transformations = TransformationsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.operations = Operations( + self._client, self._config, self._serialize, self._deserialize) + self.clusters = ClustersOperations( + self._client, self._config, self._serialize, self._deserialize) + self.private_endpoints = PrivateEndpointsOperations( + self._client, self._config, self._serialize, self._deserialize) + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> "StreamAnalyticsManagementClient": + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details) -> None: + await self._client.__aexit__(*exc_details) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/__init__.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/__init__.py new file mode 100644 index 000000000000..a247559efb05 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/__init__.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._functions_operations import FunctionsOperations +from ._inputs_operations import InputsOperations +from ._outputs_operations import OutputsOperations +from ._streaming_jobs_operations import StreamingJobsOperations +from ._subscriptions_operations import SubscriptionsOperations +from ._transformations_operations import TransformationsOperations +from ._operations import Operations +from ._clusters_operations import ClustersOperations +from ._private_endpoints_operations import PrivateEndpointsOperations + +__all__ = [ + 'FunctionsOperations', + 'InputsOperations', + 'OutputsOperations', + 'StreamingJobsOperations', + 'SubscriptionsOperations', + 'TransformationsOperations', + 'Operations', + 'ClustersOperations', + 'PrivateEndpointsOperations', +] diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_clusters_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_clusters_operations.py new file mode 100644 index 000000000000..29088ce6710f --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_clusters_operations.py @@ -0,0 +1,692 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class ClustersOperations: + """ClustersOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~stream_analytics_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def _create_or_update_initial( + self, + resource_group_name: str, + cluster_name: str, + cluster: "models.Cluster", + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + **kwargs + ) -> "models.Cluster": + cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-03-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._create_or_update_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(cluster, 'Cluster') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('Cluster', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('Cluster', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} # type: ignore + + async def begin_create_or_update( + self, + resource_group_name: str, + cluster_name: str, + cluster: "models.Cluster", + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + **kwargs + ) -> AsyncLROPoller["models.Cluster"]: + """Creates a Stream Analytics Cluster or replaces an already existing cluster. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the cluster. + :type cluster_name: str + :param cluster: The definition of the cluster that will be used to create a new cluster or + replace the existing one. + :type cluster: ~stream_analytics_management_client.models.Cluster + :param if_match: The ETag of the resource. Omit this value to always overwrite the current + record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent + changes. + :type if_match: str + :param if_none_match: Set to '*' to allow a new resource to be created, but to prevent updating + an existing record set. Other values will result in a 412 Pre-condition Failed response. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Cluster or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~stream_analytics_management_client.models.Cluster] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + cluster_name=cluster_name, + cluster=cluster, + if_match=if_match, + if_none_match=if_none_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('Cluster', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} # type: ignore + + async def _update_initial( + self, + resource_group_name: str, + cluster_name: str, + cluster: "models.Cluster", + if_match: Optional[str] = None, + **kwargs + ) -> Optional["models.Cluster"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Cluster"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-03-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._update_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(cluster, 'Cluster') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Cluster', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} # type: ignore + + async def begin_update( + self, + resource_group_name: str, + cluster_name: str, + cluster: "models.Cluster", + if_match: Optional[str] = None, + **kwargs + ) -> AsyncLROPoller["models.Cluster"]: + """Updates an existing cluster. This can be used to partially update (ie. update one or two + properties) a cluster without affecting the rest of the cluster definition. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the cluster. + :type cluster_name: str + :param cluster: The properties specified here will overwrite the corresponding properties in + the existing cluster (ie. Those properties will be updated). + :type cluster: ~stream_analytics_management_client.models.Cluster + :param if_match: The ETag of the resource. Omit this value to always overwrite the current + record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent + changes. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Cluster or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~stream_analytics_management_client.models.Cluster] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + cluster_name=cluster_name, + cluster=cluster, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('Cluster', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + cluster_name: str, + **kwargs + ) -> "models.Cluster": + """Gets information about the specified cluster. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the cluster. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Cluster, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.Cluster + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-03-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Cluster', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} # type: ignore + + async def _delete_initial( + self, + resource_group_name: str, + cluster_name: str, + **kwargs + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-03-01-preview" + accept = "application/json" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} # type: ignore + + async def begin_delete( + self, + resource_group_name: str, + cluster_name: str, + **kwargs + ) -> AsyncLROPoller[None]: + """Deletes the specified cluster. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the cluster. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + cluster_name=cluster_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} # type: ignore + + def list_by_subscription( + self, + **kwargs + ) -> AsyncIterable["models.ClusterListResult"]: + """Lists all of the clusters in the given subscription. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ClusterListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.ClusterListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ClusterListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-03-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_subscription.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('ClusterListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.Error, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/clusters'} # type: ignore + + def list_by_resource_group( + self, + resource_group_name: str, + **kwargs + ) -> AsyncIterable["models.ClusterListResult"]: + """Lists all of the clusters in the given resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ClusterListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.ClusterListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ClusterListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-03-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('ClusterListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.Error, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters'} # type: ignore + + def list_streaming_jobs( + self, + resource_group_name: str, + cluster_name: str, + **kwargs + ) -> AsyncIterable["models.ClusterJobListResult"]: + """Lists all of the streaming jobs in the given cluster. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the cluster. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ClusterJobListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.ClusterJobListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ClusterJobListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-03-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_streaming_jobs.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('ClusterJobListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.Error, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_streaming_jobs.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/listStreamingJobs'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_functions_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_functions_operations.py new file mode 100644 index 000000000000..1d04fcab6693 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_functions_operations.py @@ -0,0 +1,630 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class FunctionsOperations: + """FunctionsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~stream_analytics_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def create_or_replace( + self, + resource_group_name: str, + job_name: str, + function_name: str, + function: "models.Function", + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + **kwargs + ) -> "models.Function": + """Creates a function or replaces an already existing function under an existing streaming job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param function_name: The name of the function. + :type function_name: str + :param function: The definition of the function that will be used to create a new function or + replace the existing one under the streaming job. + :type function: ~stream_analytics_management_client.models.Function + :param if_match: The ETag of the function. Omit this value to always overwrite the current + function. Specify the last-seen ETag value to prevent accidentally overwriting concurrent + changes. + :type if_match: str + :param if_none_match: Set to '*' to allow a new function to be created, but to prevent updating + an existing function. Other values will result in a 412 Pre-condition Failed response. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Function, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.Function + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_replace.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'functionName': self._serialize.url("function_name", function_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(function, 'Function') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Function', pipeline_response) + + if response.status_code == 201: + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Function', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + create_or_replace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}'} # type: ignore + + async def update( + self, + resource_group_name: str, + job_name: str, + function_name: str, + function: "models.Function", + if_match: Optional[str] = None, + **kwargs + ) -> "models.Function": + """Updates an existing function under an existing streaming job. This can be used to partially + update (ie. update one or two properties) a function without affecting the rest the job or + function definition. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param function_name: The name of the function. + :type function_name: str + :param function: A function object. The properties specified here will overwrite the + corresponding properties in the existing function (ie. Those properties will be updated). Any + properties that are set to null here will mean that the corresponding property in the existing + function will remain the same and not change as a result of this PATCH operation. + :type function: ~stream_analytics_management_client.models.Function + :param if_match: The ETag of the function. Omit this value to always overwrite the current + function. Specify the last-seen ETag value to prevent accidentally overwriting concurrent + changes. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Function, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.Function + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'functionName': self._serialize.url("function_name", function_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(function, 'Function') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Function', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + job_name: str, + function_name: str, + **kwargs + ) -> None: + """Deletes a function from the streaming job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param function_name: The name of the function. + :type function_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'functionName': self._serialize.url("function_name", function_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + job_name: str, + function_name: str, + **kwargs + ) -> "models.Function": + """Gets details about the specified function. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param function_name: The name of the function. + :type function_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Function, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.Function + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'functionName': self._serialize.url("function_name", function_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Function', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}'} # type: ignore + + def list_by_streaming_job( + self, + resource_group_name: str, + job_name: str, + select: Optional[str] = None, + **kwargs + ) -> AsyncIterable["models.FunctionListResult"]: + """Lists all of the functions under the specified streaming job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param select: The $select OData query parameter. This is a comma-separated list of structural + properties to include in the response, or "\ *" to include all properties. By default, all + properties are returned except diagnostics. Currently only accepts '*\ ' as a valid value. + :type select: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either FunctionListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.FunctionListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.FunctionListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_streaming_job.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('FunctionListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_streaming_job.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions'} # type: ignore + + async def _test_initial( + self, + resource_group_name: str, + job_name: str, + function_name: str, + function: Optional["models.Function"] = None, + **kwargs + ) -> Optional["models.ResourceTestStatus"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ResourceTestStatus"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._test_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'functionName': self._serialize.url("function_name", function_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if function is not None: + body_content = self._serialize.body(function, 'Function') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('ResourceTestStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _test_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/test'} # type: ignore + + async def begin_test( + self, + resource_group_name: str, + job_name: str, + function_name: str, + function: Optional["models.Function"] = None, + **kwargs + ) -> AsyncLROPoller["models.ResourceTestStatus"]: + """Tests if the information provided for a function is valid. This can range from testing the + connection to the underlying web service behind the function or making sure the function code + provided is syntactically correct. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param function_name: The name of the function. + :type function_name: str + :param function: If the function specified does not already exist, this parameter must contain + the full function definition intended to be tested. If the function specified already exists, + this parameter can be left null to test the existing function as is or if specified, the + properties specified will overwrite the corresponding properties in the existing function + (exactly like a PATCH operation) and the resulting function will be tested. + :type function: ~stream_analytics_management_client.models.Function + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ResourceTestStatus or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~stream_analytics_management_client.models.ResourceTestStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.ResourceTestStatus"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._test_initial( + resource_group_name=resource_group_name, + job_name=job_name, + function_name=function_name, + function=function, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('ResourceTestStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_test.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/test'} # type: ignore + + async def retrieve_default_definition( + self, + resource_group_name: str, + job_name: str, + function_name: str, + function_retrieve_default_definition_parameters: Optional["models.FunctionRetrieveDefaultDefinitionParameters"] = None, + **kwargs + ) -> "models.Function": + """Retrieves the default definition of a function based on the parameters specified. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param function_name: The name of the function. + :type function_name: str + :param function_retrieve_default_definition_parameters: Parameters used to specify the type of + function to retrieve the default definition for. + :type function_retrieve_default_definition_parameters: ~stream_analytics_management_client.models.FunctionRetrieveDefaultDefinitionParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Function, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.Function + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.retrieve_default_definition.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'functionName': self._serialize.url("function_name", function_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if function_retrieve_default_definition_parameters is not None: + body_content = self._serialize.body(function_retrieve_default_definition_parameters, 'FunctionRetrieveDefaultDefinitionParameters') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Function', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + retrieve_default_definition.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/RetrieveDefaultDefinition'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_inputs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_inputs_operations.py new file mode 100644 index 000000000000..f7451e9e8aab --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_inputs_operations.py @@ -0,0 +1,553 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class InputsOperations: + """InputsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~stream_analytics_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def create_or_replace( + self, + resource_group_name: str, + job_name: str, + input_name: str, + input: "models.Input", + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + **kwargs + ) -> "models.Input": + """Creates an input or replaces an already existing input under an existing streaming job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param input_name: The name of the input. + :type input_name: str + :param input: The definition of the input that will be used to create a new input or replace + the existing one under the streaming job. + :type input: ~stream_analytics_management_client.models.Input + :param if_match: The ETag of the input. Omit this value to always overwrite the current input. + Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. + :type if_match: str + :param if_none_match: Set to '*' to allow a new input to be created, but to prevent updating an + existing input. Other values will result in a 412 Pre-condition Failed response. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Input, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.Input + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Input"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_replace.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'inputName': self._serialize.url("input_name", input_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(input, 'Input') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Input', pipeline_response) + + if response.status_code == 201: + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Input', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + create_or_replace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}'} # type: ignore + + async def update( + self, + resource_group_name: str, + job_name: str, + input_name: str, + input: "models.Input", + if_match: Optional[str] = None, + **kwargs + ) -> "models.Input": + """Updates an existing input under an existing streaming job. This can be used to partially update + (ie. update one or two properties) an input without affecting the rest the job or input + definition. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param input_name: The name of the input. + :type input_name: str + :param input: An Input object. The properties specified here will overwrite the corresponding + properties in the existing input (ie. Those properties will be updated). Any properties that + are set to null here will mean that the corresponding property in the existing input will + remain the same and not change as a result of this PATCH operation. + :type input: ~stream_analytics_management_client.models.Input + :param if_match: The ETag of the input. Omit this value to always overwrite the current input. + Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Input, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.Input + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Input"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'inputName': self._serialize.url("input_name", input_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(input, 'Input') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Input', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + job_name: str, + input_name: str, + **kwargs + ) -> None: + """Deletes an input from the streaming job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param input_name: The name of the input. + :type input_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'inputName': self._serialize.url("input_name", input_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + job_name: str, + input_name: str, + **kwargs + ) -> "models.Input": + """Gets details about the specified input. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param input_name: The name of the input. + :type input_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Input, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.Input + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Input"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'inputName': self._serialize.url("input_name", input_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Input', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}'} # type: ignore + + def list_by_streaming_job( + self, + resource_group_name: str, + job_name: str, + select: Optional[str] = None, + **kwargs + ) -> AsyncIterable["models.InputListResult"]: + """Lists all of the inputs under the specified streaming job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param select: The $select OData query parameter. This is a comma-separated list of structural + properties to include in the response, or "\ *" to include all properties. By default, all + properties are returned except diagnostics. Currently only accepts '*\ ' as a valid value. + :type select: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either InputListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.InputListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.InputListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_streaming_job.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('InputListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_streaming_job.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs'} # type: ignore + + async def _test_initial( + self, + resource_group_name: str, + job_name: str, + input_name: str, + input: Optional["models.Input"] = None, + **kwargs + ) -> Optional["models.ResourceTestStatus"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ResourceTestStatus"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._test_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'inputName': self._serialize.url("input_name", input_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if input is not None: + body_content = self._serialize.body(input, 'Input') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('ResourceTestStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _test_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}/test'} # type: ignore + + async def begin_test( + self, + resource_group_name: str, + job_name: str, + input_name: str, + input: Optional["models.Input"] = None, + **kwargs + ) -> AsyncLROPoller["models.ResourceTestStatus"]: + """Tests whether an input’s datasource is reachable and usable by the Azure Stream Analytics + service. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param input_name: The name of the input. + :type input_name: str + :param input: If the input specified does not already exist, this parameter must contain the + full input definition intended to be tested. If the input specified already exists, this + parameter can be left null to test the existing input as is or if specified, the properties + specified will overwrite the corresponding properties in the existing input (exactly like a + PATCH operation) and the resulting input will be tested. + :type input: ~stream_analytics_management_client.models.Input + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ResourceTestStatus or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~stream_analytics_management_client.models.ResourceTestStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.ResourceTestStatus"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._test_initial( + resource_group_name=resource_group_name, + job_name=job_name, + input_name=input_name, + input=input, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('ResourceTestStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_test.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}/test'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_operations.py new file mode 100644 index 000000000000..6e073694446f --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_operations.py @@ -0,0 +1,104 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class Operations: + """Operations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~stream_analytics_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs + ) -> AsyncIterable["models.OperationListResult"]: + """Lists all of the available Stream Analytics related operations. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OperationListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.OperationListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('OperationListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/providers/Microsoft.StreamAnalytics/operations'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_outputs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_outputs_operations.py new file mode 100644 index 000000000000..a9d08028e8aa --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_outputs_operations.py @@ -0,0 +1,555 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class OutputsOperations: + """OutputsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~stream_analytics_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def create_or_replace( + self, + resource_group_name: str, + job_name: str, + output_name: str, + output: "models.Output", + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + **kwargs + ) -> "models.Output": + """Creates an output or replaces an already existing output under an existing streaming job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param output_name: The name of the output. + :type output_name: str + :param output: The definition of the output that will be used to create a new output or replace + the existing one under the streaming job. + :type output: ~stream_analytics_management_client.models.Output + :param if_match: The ETag of the output. Omit this value to always overwrite the current + output. Specify the last-seen ETag value to prevent accidentally overwriting concurrent + changes. + :type if_match: str + :param if_none_match: Set to '*' to allow a new output to be created, but to prevent updating + an existing output. Other values will result in a 412 Pre-condition Failed response. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Output, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.Output + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Output"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_replace.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'outputName': self._serialize.url("output_name", output_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(output, 'Output') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Output', pipeline_response) + + if response.status_code == 201: + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Output', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + create_or_replace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}'} # type: ignore + + async def update( + self, + resource_group_name: str, + job_name: str, + output_name: str, + output: "models.Output", + if_match: Optional[str] = None, + **kwargs + ) -> "models.Output": + """Updates an existing output under an existing streaming job. This can be used to partially + update (ie. update one or two properties) an output without affecting the rest the job or + output definition. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param output_name: The name of the output. + :type output_name: str + :param output: An Output object. The properties specified here will overwrite the corresponding + properties in the existing output (ie. Those properties will be updated). Any properties that + are set to null here will mean that the corresponding property in the existing output will + remain the same and not change as a result of this PATCH operation. + :type output: ~stream_analytics_management_client.models.Output + :param if_match: The ETag of the output. Omit this value to always overwrite the current + output. Specify the last-seen ETag value to prevent accidentally overwriting concurrent + changes. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Output, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.Output + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Output"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'outputName': self._serialize.url("output_name", output_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(output, 'Output') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Output', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + job_name: str, + output_name: str, + **kwargs + ) -> None: + """Deletes an output from the streaming job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param output_name: The name of the output. + :type output_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'outputName': self._serialize.url("output_name", output_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + job_name: str, + output_name: str, + **kwargs + ) -> "models.Output": + """Gets details about the specified output. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param output_name: The name of the output. + :type output_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Output, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.Output + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Output"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'outputName': self._serialize.url("output_name", output_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Output', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}'} # type: ignore + + def list_by_streaming_job( + self, + resource_group_name: str, + job_name: str, + select: Optional[str] = None, + **kwargs + ) -> AsyncIterable["models.OutputListResult"]: + """Lists all of the outputs under the specified streaming job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param select: The $select OData query parameter. This is a comma-separated list of structural + properties to include in the response, or "\ *" to include all properties. By default, all + properties are returned except diagnostics. Currently only accepts '*\ ' as a valid value. + :type select: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OutputListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.OutputListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.OutputListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_streaming_job.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('OutputListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_streaming_job.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs'} # type: ignore + + async def _test_initial( + self, + resource_group_name: str, + job_name: str, + output_name: str, + output: Optional["models.Output"] = None, + **kwargs + ) -> Optional["models.ResourceTestStatus"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ResourceTestStatus"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._test_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'outputName': self._serialize.url("output_name", output_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if output is not None: + body_content = self._serialize.body(output, 'Output') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('ResourceTestStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _test_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}/test'} # type: ignore + + async def begin_test( + self, + resource_group_name: str, + job_name: str, + output_name: str, + output: Optional["models.Output"] = None, + **kwargs + ) -> AsyncLROPoller["models.ResourceTestStatus"]: + """Tests whether an output’s datasource is reachable and usable by the Azure Stream Analytics + service. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param output_name: The name of the output. + :type output_name: str + :param output: If the output specified does not already exist, this parameter must contain the + full output definition intended to be tested. If the output specified already exists, this + parameter can be left null to test the existing output as is or if specified, the properties + specified will overwrite the corresponding properties in the existing output (exactly like a + PATCH operation) and the resulting output will be tested. + :type output: ~stream_analytics_management_client.models.Output + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ResourceTestStatus or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~stream_analytics_management_client.models.ResourceTestStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.ResourceTestStatus"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._test_initial( + resource_group_name=resource_group_name, + job_name=job_name, + output_name=output_name, + output=output, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('ResourceTestStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_test.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}/test'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_private_endpoints_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_private_endpoints_operations.py new file mode 100644 index 000000000000..ba2975090858 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_private_endpoints_operations.py @@ -0,0 +1,380 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class PrivateEndpointsOperations: + """PrivateEndpointsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~stream_analytics_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def create_or_update( + self, + resource_group_name: str, + cluster_name: str, + private_endpoint_name: str, + private_endpoint: "models.PrivateEndpoint", + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + **kwargs + ) -> "models.PrivateEndpoint": + """Creates a Stream Analytics Private Endpoint or replaces an already existing Private Endpoint. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the cluster. + :type cluster_name: str + :param private_endpoint_name: The name of the private endpoint. + :type private_endpoint_name: str + :param private_endpoint: The definition of the private endpoint that will be used to create a + new cluster or replace the existing one. + :type private_endpoint: ~stream_analytics_management_client.models.PrivateEndpoint + :param if_match: The ETag of the resource. Omit this value to always overwrite the current + record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent + changes. + :type if_match: str + :param if_none_match: Set to '*' to allow a new resource to be created, but to prevent updating + an existing record set. Other values will result in a 412 Pre-condition Failed response. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpoint, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.PrivateEndpoint + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpoint"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-03-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + 'privateEndpointName': self._serialize.url("private_endpoint_name", private_endpoint_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(private_endpoint, 'PrivateEndpoint') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('PrivateEndpoint', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('PrivateEndpoint', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + cluster_name: str, + private_endpoint_name: str, + **kwargs + ) -> "models.PrivateEndpoint": + """Gets information about the specified Private Endpoint. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the cluster. + :type cluster_name: str + :param private_endpoint_name: The name of the private endpoint. + :type private_endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpoint, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.PrivateEndpoint + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpoint"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-03-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + 'privateEndpointName': self._serialize.url("private_endpoint_name", private_endpoint_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateEndpoint', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}'} # type: ignore + + async def _delete_initial( + self, + resource_group_name: str, + cluster_name: str, + private_endpoint_name: str, + **kwargs + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-03-01-preview" + accept = "application/json" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + 'privateEndpointName': self._serialize.url("private_endpoint_name", private_endpoint_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}'} # type: ignore + + async def begin_delete( + self, + resource_group_name: str, + cluster_name: str, + private_endpoint_name: str, + **kwargs + ) -> AsyncLROPoller[None]: + """Delete the specified private endpoint. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the cluster. + :type cluster_name: str + :param private_endpoint_name: The name of the private endpoint. + :type private_endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + cluster_name=cluster_name, + private_endpoint_name=private_endpoint_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}'} # type: ignore + + def list_by_cluster( + self, + resource_group_name: str, + cluster_name: str, + **kwargs + ) -> AsyncIterable["models.PrivateEndpointListResult"]: + """Lists the private endpoints in the cluster. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the cluster. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PrivateEndpointListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.PrivateEndpointListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-03-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_cluster.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('PrivateEndpointListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.Error, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_cluster.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_streaming_jobs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_streaming_jobs_operations.py new file mode 100644 index 000000000000..d6cdd13560d3 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_streaming_jobs_operations.py @@ -0,0 +1,805 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class StreamingJobsOperations: + """StreamingJobsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~stream_analytics_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def _create_or_replace_initial( + self, + resource_group_name: str, + job_name: str, + streaming_job: "models.StreamingJob", + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + **kwargs + ) -> "models.StreamingJob": + cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._create_or_replace_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(streaming_job, 'StreamingJob') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('StreamingJob', pipeline_response) + + if response.status_code == 201: + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('StreamingJob', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + _create_or_replace_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}'} # type: ignore + + async def begin_create_or_replace( + self, + resource_group_name: str, + job_name: str, + streaming_job: "models.StreamingJob", + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + **kwargs + ) -> AsyncLROPoller["models.StreamingJob"]: + """Creates a streaming job or replaces an already existing streaming job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param streaming_job: The definition of the streaming job that will be used to create a new + streaming job or replace the existing one. + :type streaming_job: ~stream_analytics_management_client.models.StreamingJob + :param if_match: The ETag of the streaming job. Omit this value to always overwrite the current + record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent + changes. + :type if_match: str + :param if_none_match: Set to '*' to allow a new streaming job to be created, but to prevent + updating an existing record set. Other values will result in a 412 Pre-condition Failed + response. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either StreamingJob or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~stream_analytics_management_client.models.StreamingJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_replace_initial( + resource_group_name=resource_group_name, + job_name=job_name, + streaming_job=streaming_job, + if_match=if_match, + if_none_match=if_none_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + response_headers = {} + response = pipeline_response.http_response + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('StreamingJob', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + return deserialized + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_replace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}'} # type: ignore + + async def update( + self, + resource_group_name: str, + job_name: str, + streaming_job: "models.StreamingJob", + if_match: Optional[str] = None, + **kwargs + ) -> "models.StreamingJob": + """Updates an existing streaming job. This can be used to partially update (ie. update one or two + properties) a streaming job without affecting the rest the job definition. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param streaming_job: A streaming job object. The properties specified here will overwrite the + corresponding properties in the existing streaming job (ie. Those properties will be updated). + Any properties that are set to null here will mean that the corresponding property in the + existing input will remain the same and not change as a result of this PATCH operation. + :type streaming_job: ~stream_analytics_management_client.models.StreamingJob + :param if_match: The ETag of the streaming job. Omit this value to always overwrite the current + record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent + changes. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StreamingJob, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.StreamingJob + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(streaming_job, 'StreamingJob') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('StreamingJob', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}'} # type: ignore + + async def _delete_initial( + self, + resource_group_name: str, + job_name: str, + **kwargs + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}'} # type: ignore + + async def begin_delete( + self, + resource_group_name: str, + job_name: str, + **kwargs + ) -> AsyncLROPoller[None]: + """Deletes a streaming job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + job_name=job_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + job_name: str, + expand: Optional[str] = None, + **kwargs + ) -> "models.StreamingJob": + """Gets details about the specified streaming job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param expand: The $expand OData query parameter. This is a comma-separated list of additional + streaming job properties to include in the response, beyond the default set returned when this + parameter is absent. The default set is all streaming job properties other than 'inputs', + 'transformation', 'outputs', and 'functions'. + :type expand: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StreamingJob, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.StreamingJob + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if expand is not None: + query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('StreamingJob', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}'} # type: ignore + + def list_by_resource_group( + self, + resource_group_name: str, + expand: Optional[str] = None, + **kwargs + ) -> AsyncIterable["models.StreamingJobListResult"]: + """Lists all of the streaming jobs in the specified resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param expand: The $expand OData query parameter. This is a comma-separated list of additional + streaming job properties to include in the response, beyond the default set returned when this + parameter is absent. The default set is all streaming job properties other than 'inputs', + 'transformation', 'outputs', and 'functions'. + :type expand: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either StreamingJobListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.StreamingJobListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJobListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if expand is not None: + query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('StreamingJobListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs'} # type: ignore + + def list( + self, + expand: Optional[str] = None, + **kwargs + ) -> AsyncIterable["models.StreamingJobListResult"]: + """Lists all of the streaming jobs in the given subscription. + + :param expand: The $expand OData query parameter. This is a comma-separated list of additional + streaming job properties to include in the response, beyond the default set returned when this + parameter is absent. The default set is all streaming job properties other than 'inputs', + 'transformation', 'outputs', and 'functions'. + :type expand: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either StreamingJobListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.StreamingJobListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJobListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if expand is not None: + query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('StreamingJobListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/streamingjobs'} # type: ignore + + async def _start_initial( + self, + resource_group_name: str, + job_name: str, + start_job_parameters: Optional["models.StartStreamingJobParameters"] = None, + **kwargs + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self._start_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if start_job_parameters is not None: + body_content = self._serialize.body(start_job_parameters, 'StartStreamingJobParameters') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/start'} # type: ignore + + async def begin_start( + self, + resource_group_name: str, + job_name: str, + start_job_parameters: Optional["models.StartStreamingJobParameters"] = None, + **kwargs + ) -> AsyncLROPoller[None]: + """Starts a streaming job. Once a job is started it will start processing input events and produce + output. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param start_job_parameters: Parameters applicable to a start streaming job operation. + :type start_job_parameters: ~stream_analytics_management_client.models.StartStreamingJobParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._start_initial( + resource_group_name=resource_group_name, + job_name=job_name, + start_job_parameters=start_job_parameters, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/start'} # type: ignore + + async def _stop_initial( + self, + resource_group_name: str, + job_name: str, + **kwargs + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + + # Construct URL + url = self._stop_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/stop'} # type: ignore + + async def begin_stop( + self, + resource_group_name: str, + job_name: str, + **kwargs + ) -> AsyncLROPoller[None]: + """Stops a running streaming job. This will cause a running streaming job to stop processing input + events and producing output. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._stop_initial( + resource_group_name=resource_group_name, + job_name=job_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/stop'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_subscriptions_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_subscriptions_operations.py new file mode 100644 index 000000000000..7a94e1139245 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_subscriptions_operations.py @@ -0,0 +1,97 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class SubscriptionsOperations: + """SubscriptionsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~stream_analytics_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def list_quotas( + self, + location: str, + **kwargs + ) -> "models.SubscriptionQuotasListResult": + """Retrieves the subscription's current quota information in a particular region. + + :param location: The region in which to retrieve the subscription's quota information. You can + find out which regions Azure Stream Analytics is supported in here: + https://azure.microsoft.com/en-us/regions/. + :type location: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SubscriptionQuotasListResult, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.SubscriptionQuotasListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SubscriptionQuotasListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + accept = "application/json" + + # Construct URL + url = self.list_quotas.metadata['url'] # type: ignore + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SubscriptionQuotasListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_quotas.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/quotas'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_transformations_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_transformations_operations.py new file mode 100644 index 000000000000..066f372f8baa --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_transformations_operations.py @@ -0,0 +1,283 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class TransformationsOperations: + """TransformationsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~stream_analytics_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def create_or_replace( + self, + resource_group_name: str, + job_name: str, + transformation_name: str, + transformation: "models.Transformation", + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + **kwargs + ) -> "models.Transformation": + """Creates a transformation or replaces an already existing transformation under an existing + streaming job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param transformation_name: The name of the transformation. + :type transformation_name: str + :param transformation: The definition of the transformation that will be used to create a new + transformation or replace the existing one under the streaming job. + :type transformation: ~stream_analytics_management_client.models.Transformation + :param if_match: The ETag of the transformation. Omit this value to always overwrite the + current transformation. Specify the last-seen ETag value to prevent accidentally overwriting + concurrent changes. + :type if_match: str + :param if_none_match: Set to '*' to allow a new transformation to be created, but to prevent + updating an existing transformation. Other values will result in a 412 Pre-condition Failed + response. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Transformation, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.Transformation + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Transformation"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_replace.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'transformationName': self._serialize.url("transformation_name", transformation_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(transformation, 'Transformation') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Transformation', pipeline_response) + + if response.status_code == 201: + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Transformation', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + create_or_replace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}'} # type: ignore + + async def update( + self, + resource_group_name: str, + job_name: str, + transformation_name: str, + transformation: "models.Transformation", + if_match: Optional[str] = None, + **kwargs + ) -> "models.Transformation": + """Updates an existing transformation under an existing streaming job. This can be used to + partially update (ie. update one or two properties) a transformation without affecting the rest + the job or transformation definition. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param transformation_name: The name of the transformation. + :type transformation_name: str + :param transformation: A Transformation object. The properties specified here will overwrite + the corresponding properties in the existing transformation (ie. Those properties will be + updated). Any properties that are set to null here will mean that the corresponding property in + the existing transformation will remain the same and not change as a result of this PATCH + operation. + :type transformation: ~stream_analytics_management_client.models.Transformation + :param if_match: The ETag of the transformation. Omit this value to always overwrite the + current transformation. Specify the last-seen ETag value to prevent accidentally overwriting + concurrent changes. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Transformation, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.Transformation + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Transformation"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'transformationName': self._serialize.url("transformation_name", transformation_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(transformation, 'Transformation') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Transformation', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + job_name: str, + transformation_name: str, + **kwargs + ) -> "models.Transformation": + """Gets details about the specified transformation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param transformation_name: The name of the transformation. + :type transformation_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Transformation, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.Transformation + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Transformation"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'transformationName': self._serialize.url("transformation_name", transformation_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Transformation', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py new file mode 100644 index 000000000000..bdaa063b8a5d --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py @@ -0,0 +1,383 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +try: + from ._models_py3 import AggregateFunctionProperties + from ._models_py3 import AvroSerialization + from ._models_py3 import AzureDataLakeStoreOutputDataSource + from ._models_py3 import AzureDataLakeStoreOutputDataSourceProperties + from ._models_py3 import AzureFunctionOutputDataSource + from ._models_py3 import AzureMachineLearningServiceFunctionBinding + from ._models_py3 import AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters + from ._models_py3 import AzureMachineLearningServiceInputColumn + from ._models_py3 import AzureMachineLearningServiceInputs + from ._models_py3 import AzureMachineLearningServiceOutputColumn + from ._models_py3 import AzureMachineLearningStudioFunctionBinding + from ._models_py3 import AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters + from ._models_py3 import AzureMachineLearningStudioInputColumn + from ._models_py3 import AzureMachineLearningStudioInputs + from ._models_py3 import AzureMachineLearningStudioOutputColumn + from ._models_py3 import AzureSqlDatabaseDataSourceProperties + from ._models_py3 import AzureSqlDatabaseOutputDataSource + from ._models_py3 import AzureSqlDatabaseOutputDataSourceProperties + from ._models_py3 import AzureSqlReferenceInputDataSource + from ._models_py3 import AzureSqlReferenceInputDataSourceProperties + from ._models_py3 import AzureSynapseDataSourceProperties + from ._models_py3 import AzureSynapseOutputDataSource + from ._models_py3 import AzureSynapseOutputDataSourceProperties + from ._models_py3 import AzureTableOutputDataSource + from ._models_py3 import BlobDataSourceProperties + from ._models_py3 import BlobOutputDataSource + from ._models_py3 import BlobOutputDataSourceProperties + from ._models_py3 import BlobReferenceInputDataSource + from ._models_py3 import BlobReferenceInputDataSourceProperties + from ._models_py3 import BlobStreamInputDataSource + from ._models_py3 import BlobStreamInputDataSourceProperties + from ._models_py3 import CSharpFunctionBinding + from ._models_py3 import CSharpFunctionRetrieveDefaultDefinitionParameters + from ._models_py3 import Cluster + from ._models_py3 import ClusterInfo + from ._models_py3 import ClusterJob + from ._models_py3 import ClusterJobListResult + from ._models_py3 import ClusterListResult + from ._models_py3 import ClusterProperties + from ._models_py3 import ClusterSku + from ._models_py3 import Compression + from ._models_py3 import CsvSerialization + from ._models_py3 import CustomClrSerialization + from ._models_py3 import DiagnosticCondition + from ._models_py3 import Diagnostics + from ._models_py3 import DocumentDbOutputDataSource + from ._models_py3 import Error + from ._models_py3 import ErrorAutoGenerated + from ._models_py3 import ErrorDetails + from ._models_py3 import ErrorResponse + from ._models_py3 import EventHubDataSourceProperties + from ._models_py3 import EventHubOutputDataSource + from ._models_py3 import EventHubOutputDataSourceProperties + from ._models_py3 import EventHubStreamInputDataSource + from ._models_py3 import EventHubStreamInputDataSourceProperties + from ._models_py3 import EventHubV2OutputDataSource + from ._models_py3 import EventHubV2StreamInputDataSource + from ._models_py3 import External + from ._models_py3 import Function + from ._models_py3 import FunctionBinding + from ._models_py3 import FunctionInput + from ._models_py3 import FunctionListResult + from ._models_py3 import FunctionOutput + from ._models_py3 import FunctionProperties + from ._models_py3 import FunctionRetrieveDefaultDefinitionParameters + from ._models_py3 import Identity + from ._models_py3 import Input + from ._models_py3 import InputListResult + from ._models_py3 import InputProperties + from ._models_py3 import IoTHubStreamInputDataSource + from ._models_py3 import JavaScriptFunctionBinding + from ._models_py3 import JavaScriptFunctionRetrieveDefaultDefinitionParameters + from ._models_py3 import JobStorageAccount + from ._models_py3 import JsonSerialization + from ._models_py3 import OAuthBasedDataSourceProperties + from ._models_py3 import Operation + from ._models_py3 import OperationDisplay + from ._models_py3 import OperationListResult + from ._models_py3 import Output + from ._models_py3 import OutputDataSource + from ._models_py3 import OutputListResult + from ._models_py3 import ParquetSerialization + from ._models_py3 import PowerBIOutputDataSource + from ._models_py3 import PowerBIOutputDataSourceProperties + from ._models_py3 import PrivateEndpoint + from ._models_py3 import PrivateEndpointListResult + from ._models_py3 import PrivateEndpointProperties + from ._models_py3 import PrivateLinkConnectionState + from ._models_py3 import PrivateLinkServiceConnection + from ._models_py3 import ProxyResource + from ._models_py3 import ReferenceInputDataSource + from ._models_py3 import ReferenceInputProperties + from ._models_py3 import Resource + from ._models_py3 import ResourceTestStatus + from ._models_py3 import ScalarFunctionProperties + from ._models_py3 import Serialization + from ._models_py3 import ServiceBusDataSourceProperties + from ._models_py3 import ServiceBusQueueOutputDataSource + from ._models_py3 import ServiceBusQueueOutputDataSourceProperties + from ._models_py3 import ServiceBusTopicOutputDataSource + from ._models_py3 import ServiceBusTopicOutputDataSourceProperties + from ._models_py3 import StartStreamingJobParameters + from ._models_py3 import StorageAccount + from ._models_py3 import StreamInputDataSource + from ._models_py3 import StreamInputProperties + from ._models_py3 import StreamingJob + from ._models_py3 import StreamingJobListResult + from ._models_py3 import StreamingJobSku + from ._models_py3 import SubResource + from ._models_py3 import SubscriptionQuota + from ._models_py3 import SubscriptionQuotasListResult + from ._models_py3 import TrackedResource + from ._models_py3 import Transformation +except (SyntaxError, ImportError): + from ._models import AggregateFunctionProperties # type: ignore + from ._models import AvroSerialization # type: ignore + from ._models import AzureDataLakeStoreOutputDataSource # type: ignore + from ._models import AzureDataLakeStoreOutputDataSourceProperties # type: ignore + from ._models import AzureFunctionOutputDataSource # type: ignore + from ._models import AzureMachineLearningServiceFunctionBinding # type: ignore + from ._models import AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters # type: ignore + from ._models import AzureMachineLearningServiceInputColumn # type: ignore + from ._models import AzureMachineLearningServiceInputs # type: ignore + from ._models import AzureMachineLearningServiceOutputColumn # type: ignore + from ._models import AzureMachineLearningStudioFunctionBinding # type: ignore + from ._models import AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters # type: ignore + from ._models import AzureMachineLearningStudioInputColumn # type: ignore + from ._models import AzureMachineLearningStudioInputs # type: ignore + from ._models import AzureMachineLearningStudioOutputColumn # type: ignore + from ._models import AzureSqlDatabaseDataSourceProperties # type: ignore + from ._models import AzureSqlDatabaseOutputDataSource # type: ignore + from ._models import AzureSqlDatabaseOutputDataSourceProperties # type: ignore + from ._models import AzureSqlReferenceInputDataSource # type: ignore + from ._models import AzureSqlReferenceInputDataSourceProperties # type: ignore + from ._models import AzureSynapseDataSourceProperties # type: ignore + from ._models import AzureSynapseOutputDataSource # type: ignore + from ._models import AzureSynapseOutputDataSourceProperties # type: ignore + from ._models import AzureTableOutputDataSource # type: ignore + from ._models import BlobDataSourceProperties # type: ignore + from ._models import BlobOutputDataSource # type: ignore + from ._models import BlobOutputDataSourceProperties # type: ignore + from ._models import BlobReferenceInputDataSource # type: ignore + from ._models import BlobReferenceInputDataSourceProperties # type: ignore + from ._models import BlobStreamInputDataSource # type: ignore + from ._models import BlobStreamInputDataSourceProperties # type: ignore + from ._models import CSharpFunctionBinding # type: ignore + from ._models import CSharpFunctionRetrieveDefaultDefinitionParameters # type: ignore + from ._models import Cluster # type: ignore + from ._models import ClusterInfo # type: ignore + from ._models import ClusterJob # type: ignore + from ._models import ClusterJobListResult # type: ignore + from ._models import ClusterListResult # type: ignore + from ._models import ClusterProperties # type: ignore + from ._models import ClusterSku # type: ignore + from ._models import Compression # type: ignore + from ._models import CsvSerialization # type: ignore + from ._models import CustomClrSerialization # type: ignore + from ._models import DiagnosticCondition # type: ignore + from ._models import Diagnostics # type: ignore + from ._models import DocumentDbOutputDataSource # type: ignore + from ._models import Error # type: ignore + from ._models import ErrorAutoGenerated # type: ignore + from ._models import ErrorDetails # type: ignore + from ._models import ErrorResponse # type: ignore + from ._models import EventHubDataSourceProperties # type: ignore + from ._models import EventHubOutputDataSource # type: ignore + from ._models import EventHubOutputDataSourceProperties # type: ignore + from ._models import EventHubStreamInputDataSource # type: ignore + from ._models import EventHubStreamInputDataSourceProperties # type: ignore + from ._models import EventHubV2OutputDataSource # type: ignore + from ._models import EventHubV2StreamInputDataSource # type: ignore + from ._models import External # type: ignore + from ._models import Function # type: ignore + from ._models import FunctionBinding # type: ignore + from ._models import FunctionInput # type: ignore + from ._models import FunctionListResult # type: ignore + from ._models import FunctionOutput # type: ignore + from ._models import FunctionProperties # type: ignore + from ._models import FunctionRetrieveDefaultDefinitionParameters # type: ignore + from ._models import Identity # type: ignore + from ._models import Input # type: ignore + from ._models import InputListResult # type: ignore + from ._models import InputProperties # type: ignore + from ._models import IoTHubStreamInputDataSource # type: ignore + from ._models import JavaScriptFunctionBinding # type: ignore + from ._models import JavaScriptFunctionRetrieveDefaultDefinitionParameters # type: ignore + from ._models import JobStorageAccount # type: ignore + from ._models import JsonSerialization # type: ignore + from ._models import OAuthBasedDataSourceProperties # type: ignore + from ._models import Operation # type: ignore + from ._models import OperationDisplay # type: ignore + from ._models import OperationListResult # type: ignore + from ._models import Output # type: ignore + from ._models import OutputDataSource # type: ignore + from ._models import OutputListResult # type: ignore + from ._models import ParquetSerialization # type: ignore + from ._models import PowerBIOutputDataSource # type: ignore + from ._models import PowerBIOutputDataSourceProperties # type: ignore + from ._models import PrivateEndpoint # type: ignore + from ._models import PrivateEndpointListResult # type: ignore + from ._models import PrivateEndpointProperties # type: ignore + from ._models import PrivateLinkConnectionState # type: ignore + from ._models import PrivateLinkServiceConnection # type: ignore + from ._models import ProxyResource # type: ignore + from ._models import ReferenceInputDataSource # type: ignore + from ._models import ReferenceInputProperties # type: ignore + from ._models import Resource # type: ignore + from ._models import ResourceTestStatus # type: ignore + from ._models import ScalarFunctionProperties # type: ignore + from ._models import Serialization # type: ignore + from ._models import ServiceBusDataSourceProperties # type: ignore + from ._models import ServiceBusQueueOutputDataSource # type: ignore + from ._models import ServiceBusQueueOutputDataSourceProperties # type: ignore + from ._models import ServiceBusTopicOutputDataSource # type: ignore + from ._models import ServiceBusTopicOutputDataSourceProperties # type: ignore + from ._models import StartStreamingJobParameters # type: ignore + from ._models import StorageAccount # type: ignore + from ._models import StreamInputDataSource # type: ignore + from ._models import StreamInputProperties # type: ignore + from ._models import StreamingJob # type: ignore + from ._models import StreamingJobListResult # type: ignore + from ._models import StreamingJobSku # type: ignore + from ._models import SubResource # type: ignore + from ._models import SubscriptionQuota # type: ignore + from ._models import SubscriptionQuotasListResult # type: ignore + from ._models import TrackedResource # type: ignore + from ._models import Transformation # type: ignore + +from ._stream_analytics_management_client_enums import ( + AuthenticationMode, + ClusterProvisioningState, + ClusterSkuName, + CompatibilityLevel, + ContentStoragePolicy, + Encoding, + EventSerializationType, + EventsOutOfOrderPolicy, + JobState, + JobType, + JsonOutputSerializationFormat, + OutputErrorPolicy, + OutputStartMode, + StreamingJobSkuName, +) + +__all__ = [ + 'AggregateFunctionProperties', + 'AvroSerialization', + 'AzureDataLakeStoreOutputDataSource', + 'AzureDataLakeStoreOutputDataSourceProperties', + 'AzureFunctionOutputDataSource', + 'AzureMachineLearningServiceFunctionBinding', + 'AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters', + 'AzureMachineLearningServiceInputColumn', + 'AzureMachineLearningServiceInputs', + 'AzureMachineLearningServiceOutputColumn', + 'AzureMachineLearningStudioFunctionBinding', + 'AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters', + 'AzureMachineLearningStudioInputColumn', + 'AzureMachineLearningStudioInputs', + 'AzureMachineLearningStudioOutputColumn', + 'AzureSqlDatabaseDataSourceProperties', + 'AzureSqlDatabaseOutputDataSource', + 'AzureSqlDatabaseOutputDataSourceProperties', + 'AzureSqlReferenceInputDataSource', + 'AzureSqlReferenceInputDataSourceProperties', + 'AzureSynapseDataSourceProperties', + 'AzureSynapseOutputDataSource', + 'AzureSynapseOutputDataSourceProperties', + 'AzureTableOutputDataSource', + 'BlobDataSourceProperties', + 'BlobOutputDataSource', + 'BlobOutputDataSourceProperties', + 'BlobReferenceInputDataSource', + 'BlobReferenceInputDataSourceProperties', + 'BlobStreamInputDataSource', + 'BlobStreamInputDataSourceProperties', + 'CSharpFunctionBinding', + 'CSharpFunctionRetrieveDefaultDefinitionParameters', + 'Cluster', + 'ClusterInfo', + 'ClusterJob', + 'ClusterJobListResult', + 'ClusterListResult', + 'ClusterProperties', + 'ClusterSku', + 'Compression', + 'CsvSerialization', + 'CustomClrSerialization', + 'DiagnosticCondition', + 'Diagnostics', + 'DocumentDbOutputDataSource', + 'Error', + 'ErrorAutoGenerated', + 'ErrorDetails', + 'ErrorResponse', + 'EventHubDataSourceProperties', + 'EventHubOutputDataSource', + 'EventHubOutputDataSourceProperties', + 'EventHubStreamInputDataSource', + 'EventHubStreamInputDataSourceProperties', + 'EventHubV2OutputDataSource', + 'EventHubV2StreamInputDataSource', + 'External', + 'Function', + 'FunctionBinding', + 'FunctionInput', + 'FunctionListResult', + 'FunctionOutput', + 'FunctionProperties', + 'FunctionRetrieveDefaultDefinitionParameters', + 'Identity', + 'Input', + 'InputListResult', + 'InputProperties', + 'IoTHubStreamInputDataSource', + 'JavaScriptFunctionBinding', + 'JavaScriptFunctionRetrieveDefaultDefinitionParameters', + 'JobStorageAccount', + 'JsonSerialization', + 'OAuthBasedDataSourceProperties', + 'Operation', + 'OperationDisplay', + 'OperationListResult', + 'Output', + 'OutputDataSource', + 'OutputListResult', + 'ParquetSerialization', + 'PowerBIOutputDataSource', + 'PowerBIOutputDataSourceProperties', + 'PrivateEndpoint', + 'PrivateEndpointListResult', + 'PrivateEndpointProperties', + 'PrivateLinkConnectionState', + 'PrivateLinkServiceConnection', + 'ProxyResource', + 'ReferenceInputDataSource', + 'ReferenceInputProperties', + 'Resource', + 'ResourceTestStatus', + 'ScalarFunctionProperties', + 'Serialization', + 'ServiceBusDataSourceProperties', + 'ServiceBusQueueOutputDataSource', + 'ServiceBusQueueOutputDataSourceProperties', + 'ServiceBusTopicOutputDataSource', + 'ServiceBusTopicOutputDataSourceProperties', + 'StartStreamingJobParameters', + 'StorageAccount', + 'StreamInputDataSource', + 'StreamInputProperties', + 'StreamingJob', + 'StreamingJobListResult', + 'StreamingJobSku', + 'SubResource', + 'SubscriptionQuota', + 'SubscriptionQuotasListResult', + 'TrackedResource', + 'Transformation', + 'AuthenticationMode', + 'ClusterProvisioningState', + 'ClusterSkuName', + 'CompatibilityLevel', + 'ContentStoragePolicy', + 'Encoding', + 'EventSerializationType', + 'EventsOutOfOrderPolicy', + 'JobState', + 'JobType', + 'JsonOutputSerializationFormat', + 'OutputErrorPolicy', + 'OutputStartMode', + 'StreamingJobSkuName', +] diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models.py new file mode 100644 index 000000000000..100ff571855a --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models.py @@ -0,0 +1,4712 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + + +class FunctionProperties(msrest.serialization.Model): + """The properties that are associated with a function. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AggregateFunctionProperties, ScalarFunctionProperties. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of function.Constant filled by server. + :type type: str + :ivar etag: The current entity tag for the function. This is an opaque string. You can use it + to detect whether the resource has changed between requests. You can also use it in the If- + Match or If-None-Match headers for write operations for optimistic concurrency. + :vartype etag: str + :param inputs: + :type inputs: list[~stream_analytics_management_client.models.FunctionInput] + :param output: Describes the output of a function. + :type output: ~stream_analytics_management_client.models.FunctionOutput + :param binding: The physical binding of the function. For example, in the Azure Machine + Learning web service’s case, this describes the endpoint. + :type binding: ~stream_analytics_management_client.models.FunctionBinding + """ + + _validation = { + 'type': {'required': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'inputs': {'key': 'properties.inputs', 'type': '[FunctionInput]'}, + 'output': {'key': 'properties.output', 'type': 'FunctionOutput'}, + 'binding': {'key': 'properties.binding', 'type': 'FunctionBinding'}, + } + + _subtype_map = { + 'type': {'Aggregate': 'AggregateFunctionProperties', 'Scalar': 'ScalarFunctionProperties'} + } + + def __init__( + self, + **kwargs + ): + super(FunctionProperties, self).__init__(**kwargs) + self.type = None # type: Optional[str] + self.etag = None + self.inputs = kwargs.get('inputs', None) + self.output = kwargs.get('output', None) + self.binding = kwargs.get('binding', None) + + +class AggregateFunctionProperties(FunctionProperties): + """The properties that are associated with an aggregate function. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of function.Constant filled by server. + :type type: str + :ivar etag: The current entity tag for the function. This is an opaque string. You can use it + to detect whether the resource has changed between requests. You can also use it in the If- + Match or If-None-Match headers for write operations for optimistic concurrency. + :vartype etag: str + :param inputs: + :type inputs: list[~stream_analytics_management_client.models.FunctionInput] + :param output: Describes the output of a function. + :type output: ~stream_analytics_management_client.models.FunctionOutput + :param binding: The physical binding of the function. For example, in the Azure Machine + Learning web service’s case, this describes the endpoint. + :type binding: ~stream_analytics_management_client.models.FunctionBinding + """ + + _validation = { + 'type': {'required': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'inputs': {'key': 'properties.inputs', 'type': '[FunctionInput]'}, + 'output': {'key': 'properties.output', 'type': 'FunctionOutput'}, + 'binding': {'key': 'properties.binding', 'type': 'FunctionBinding'}, + } + + def __init__( + self, + **kwargs + ): + super(AggregateFunctionProperties, self).__init__(**kwargs) + self.type = 'Aggregate' # type: str + + +class Serialization(msrest.serialization.Model): + """Describes how data from an input is serialized or how data is serialized when written to an output. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AvroSerialization, CsvSerialization, CustomClrSerialization, JsonSerialization, ParquetSerialization. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of serialization that the input or output uses. + Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: + "Csv", "Avro", "Json", "CustomClr", "Parquet". + :type type: str or ~stream_analytics_management_client.models.EventSerializationType + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Avro': 'AvroSerialization', 'Csv': 'CsvSerialization', 'CustomClr': 'CustomClrSerialization', 'Json': 'JsonSerialization', 'Parquet': 'ParquetSerialization'} + } + + def __init__( + self, + **kwargs + ): + super(Serialization, self).__init__(**kwargs) + self.type = None # type: Optional[str] + + +class AvroSerialization(Serialization): + """Describes how data from an input is serialized or how data is serialized when written to an output in Avro format. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of serialization that the input or output uses. + Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: + "Csv", "Avro", "Json", "CustomClr", "Parquet". + :type type: str or ~stream_analytics_management_client.models.EventSerializationType + :param properties: The properties that are associated with the Avro serialization type. + Required on PUT (CreateOrReplace) requests. + :type properties: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AvroSerialization, self).__init__(**kwargs) + self.type = 'Avro' # type: str + self.properties = kwargs.get('properties', None) + + +class OutputDataSource(msrest.serialization.Model): + """Describes the data source that output will be written to. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureFunctionOutputDataSource, AzureDataLakeStoreOutputDataSource, EventHubV2OutputDataSource, EventHubOutputDataSource, ServiceBusQueueOutputDataSource, ServiceBusTopicOutputDataSource, AzureSynapseOutputDataSource, AzureSqlDatabaseOutputDataSource, BlobOutputDataSource, DocumentDbOutputDataSource, AzureTableOutputDataSource, PowerBIOutputDataSource. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Microsoft.AzureFunction': 'AzureFunctionOutputDataSource', 'Microsoft.DataLake/Accounts': 'AzureDataLakeStoreOutputDataSource', 'Microsoft.EventHub/EventHub': 'EventHubV2OutputDataSource', 'Microsoft.ServiceBus/EventHub': 'EventHubOutputDataSource', 'Microsoft.ServiceBus/Queue': 'ServiceBusQueueOutputDataSource', 'Microsoft.ServiceBus/Topic': 'ServiceBusTopicOutputDataSource', 'Microsoft.Sql/Server/DataWarehouse': 'AzureSynapseOutputDataSource', 'Microsoft.Sql/Server/Database': 'AzureSqlDatabaseOutputDataSource', 'Microsoft.Storage/Blob': 'BlobOutputDataSource', 'Microsoft.Storage/DocumentDB': 'DocumentDbOutputDataSource', 'Microsoft.Storage/Table': 'AzureTableOutputDataSource', 'PowerBI': 'PowerBIOutputDataSource'} + } + + def __init__( + self, + **kwargs + ): + super(OutputDataSource, self).__init__(**kwargs) + self.type = None # type: Optional[str] + + +class AzureDataLakeStoreOutputDataSource(OutputDataSource): + """Describes an Azure Data Lake Store output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param refresh_token: A refresh token that can be used to obtain a valid access token that can + then be used to authenticate with the data source. A valid refresh token is currently only + obtainable via the Azure Portal. It is recommended to put a dummy string value here when + creating the data source and then going to the Azure Portal to authenticate the data source + which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) + requests. + :type refresh_token: str + :param token_user_principal_name: The user principal name (UPN) of the user that was used to + obtain the refresh token. Use this property to help remember which user was used to obtain the + refresh token. + :type token_user_principal_name: str + :param token_user_display_name: The user display name of the user that was used to obtain the + refresh token. Use this property to help remember which user was used to obtain the refresh + token. + :type token_user_display_name: str + :param account_name: The name of the Azure Data Lake Store account. Required on PUT + (CreateOrReplace) requests. + :type account_name: str + :param tenant_id: The tenant id of the user used to obtain the refresh token. Required on PUT + (CreateOrReplace) requests. + :type tenant_id: str + :param file_path_prefix: The location of the file to which the output should be written to. + Required on PUT (CreateOrReplace) requests. + :type file_path_prefix: str + :param date_format: The date format. Wherever {date} appears in filePathPrefix, the value of + this property is used as the date format instead. + :type date_format: str + :param time_format: The time format. Wherever {time} appears in filePathPrefix, the value of + this property is used as the time format instead. + :type time_format: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'refresh_token': {'key': 'properties.refreshToken', 'type': 'str'}, + 'token_user_principal_name': {'key': 'properties.tokenUserPrincipalName', 'type': 'str'}, + 'token_user_display_name': {'key': 'properties.tokenUserDisplayName', 'type': 'str'}, + 'account_name': {'key': 'properties.accountName', 'type': 'str'}, + 'tenant_id': {'key': 'properties.tenantId', 'type': 'str'}, + 'file_path_prefix': {'key': 'properties.filePathPrefix', 'type': 'str'}, + 'date_format': {'key': 'properties.dateFormat', 'type': 'str'}, + 'time_format': {'key': 'properties.timeFormat', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDataLakeStoreOutputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.DataLake/Accounts' # type: str + self.refresh_token = kwargs.get('refresh_token', None) + self.token_user_principal_name = kwargs.get('token_user_principal_name', None) + self.token_user_display_name = kwargs.get('token_user_display_name', None) + self.account_name = kwargs.get('account_name', None) + self.tenant_id = kwargs.get('tenant_id', None) + self.file_path_prefix = kwargs.get('file_path_prefix', None) + self.date_format = kwargs.get('date_format', None) + self.time_format = kwargs.get('time_format', None) + self.authentication_mode = kwargs.get('authentication_mode', None) + + +class OAuthBasedDataSourceProperties(msrest.serialization.Model): + """The properties that are associated with data sources that use OAuth as their authentication model. + + :param refresh_token: A refresh token that can be used to obtain a valid access token that can + then be used to authenticate with the data source. A valid refresh token is currently only + obtainable via the Azure Portal. It is recommended to put a dummy string value here when + creating the data source and then going to the Azure Portal to authenticate the data source + which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) + requests. + :type refresh_token: str + :param token_user_principal_name: The user principal name (UPN) of the user that was used to + obtain the refresh token. Use this property to help remember which user was used to obtain the + refresh token. + :type token_user_principal_name: str + :param token_user_display_name: The user display name of the user that was used to obtain the + refresh token. Use this property to help remember which user was used to obtain the refresh + token. + :type token_user_display_name: str + """ + + _attribute_map = { + 'refresh_token': {'key': 'refreshToken', 'type': 'str'}, + 'token_user_principal_name': {'key': 'tokenUserPrincipalName', 'type': 'str'}, + 'token_user_display_name': {'key': 'tokenUserDisplayName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(OAuthBasedDataSourceProperties, self).__init__(**kwargs) + self.refresh_token = kwargs.get('refresh_token', None) + self.token_user_principal_name = kwargs.get('token_user_principal_name', None) + self.token_user_display_name = kwargs.get('token_user_display_name', None) + + +class AzureDataLakeStoreOutputDataSourceProperties(OAuthBasedDataSourceProperties): + """The properties that are associated with an Azure Data Lake Store. + + :param refresh_token: A refresh token that can be used to obtain a valid access token that can + then be used to authenticate with the data source. A valid refresh token is currently only + obtainable via the Azure Portal. It is recommended to put a dummy string value here when + creating the data source and then going to the Azure Portal to authenticate the data source + which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) + requests. + :type refresh_token: str + :param token_user_principal_name: The user principal name (UPN) of the user that was used to + obtain the refresh token. Use this property to help remember which user was used to obtain the + refresh token. + :type token_user_principal_name: str + :param token_user_display_name: The user display name of the user that was used to obtain the + refresh token. Use this property to help remember which user was used to obtain the refresh + token. + :type token_user_display_name: str + :param account_name: The name of the Azure Data Lake Store account. Required on PUT + (CreateOrReplace) requests. + :type account_name: str + :param tenant_id: The tenant id of the user used to obtain the refresh token. Required on PUT + (CreateOrReplace) requests. + :type tenant_id: str + :param file_path_prefix: The location of the file to which the output should be written to. + Required on PUT (CreateOrReplace) requests. + :type file_path_prefix: str + :param date_format: The date format. Wherever {date} appears in filePathPrefix, the value of + this property is used as the date format instead. + :type date_format: str + :param time_format: The time format. Wherever {time} appears in filePathPrefix, the value of + this property is used as the time format instead. + :type time_format: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + """ + + _attribute_map = { + 'refresh_token': {'key': 'refreshToken', 'type': 'str'}, + 'token_user_principal_name': {'key': 'tokenUserPrincipalName', 'type': 'str'}, + 'token_user_display_name': {'key': 'tokenUserDisplayName', 'type': 'str'}, + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'file_path_prefix': {'key': 'filePathPrefix', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'str'}, + 'time_format': {'key': 'timeFormat', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDataLakeStoreOutputDataSourceProperties, self).__init__(**kwargs) + self.account_name = kwargs.get('account_name', None) + self.tenant_id = kwargs.get('tenant_id', None) + self.file_path_prefix = kwargs.get('file_path_prefix', None) + self.date_format = kwargs.get('date_format', None) + self.time_format = kwargs.get('time_format', None) + self.authentication_mode = kwargs.get('authentication_mode', None) + + +class AzureFunctionOutputDataSource(OutputDataSource): + """Defines the metadata of AzureFunctionOutputDataSource. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param function_app_name: The name of your Azure Functions app. + :type function_app_name: str + :param function_name: The name of the function in your Azure Functions app. + :type function_name: str + :param api_key: If you want to use an Azure Function from another subscription, you can do so + by providing the key to access your function. + :type api_key: str + :param max_batch_size: A property that lets you set the maximum size for each output batch + that's sent to your Azure function. The input unit is in bytes. By default, this value is + 262,144 bytes (256 KB). + :type max_batch_size: float + :param max_batch_count: A property that lets you specify the maximum number of events in each + batch that's sent to Azure Functions. The default value is 100. + :type max_batch_count: float + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'function_app_name': {'key': 'properties.functionAppName', 'type': 'str'}, + 'function_name': {'key': 'properties.functionName', 'type': 'str'}, + 'api_key': {'key': 'properties.apiKey', 'type': 'str'}, + 'max_batch_size': {'key': 'properties.maxBatchSize', 'type': 'float'}, + 'max_batch_count': {'key': 'properties.maxBatchCount', 'type': 'float'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureFunctionOutputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.AzureFunction' # type: str + self.function_app_name = kwargs.get('function_app_name', None) + self.function_name = kwargs.get('function_name', None) + self.api_key = kwargs.get('api_key', None) + self.max_batch_size = kwargs.get('max_batch_size', None) + self.max_batch_count = kwargs.get('max_batch_count', None) + + +class FunctionBinding(msrest.serialization.Model): + """The physical binding of the function. For example, in the Azure Machine Learning web service’s case, this describes the endpoint. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureMachineLearningStudioFunctionBinding, AzureMachineLearningServiceFunctionBinding, CSharpFunctionBinding, JavaScriptFunctionBinding. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the function binding type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Microsoft.MachineLearning/WebService': 'AzureMachineLearningStudioFunctionBinding', 'Microsoft.MachineLearningServices': 'AzureMachineLearningServiceFunctionBinding', 'Microsoft.StreamAnalytics/CLRUdf': 'CSharpFunctionBinding', 'Microsoft.StreamAnalytics/JavascriptUdf': 'JavaScriptFunctionBinding'} + } + + def __init__( + self, + **kwargs + ): + super(FunctionBinding, self).__init__(**kwargs) + self.type = None # type: Optional[str] + + +class AzureMachineLearningServiceFunctionBinding(FunctionBinding): + """The binding to an Azure Machine Learning web service. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the function binding type.Constant filled by server. + :type type: str + :param endpoint: The Request-Response execute endpoint of the Azure Machine Learning web + service. + :type endpoint: str + :param api_key: The API key used to authenticate with Request-Response endpoint. + :type api_key: str + :param inputs: The inputs for the Azure Machine Learning web service endpoint. + :type inputs: + list[~stream_analytics_management_client.models.AzureMachineLearningServiceInputColumn] + :param outputs: A list of outputs from the Azure Machine Learning web service endpoint + execution. + :type outputs: + list[~stream_analytics_management_client.models.AzureMachineLearningServiceOutputColumn] + :param batch_size: Number between 1 and 10000 describing maximum number of rows for every Azure + ML RRS execute request. Default is 1000. + :type batch_size: int + :param number_of_parallel_requests: The number of parallel requests that will be sent per + partition of your job to the machine learning service. Default is 1. + :type number_of_parallel_requests: int + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, + 'api_key': {'key': 'properties.apiKey', 'type': 'str'}, + 'inputs': {'key': 'properties.inputs', 'type': '[AzureMachineLearningServiceInputColumn]'}, + 'outputs': {'key': 'properties.outputs', 'type': '[AzureMachineLearningServiceOutputColumn]'}, + 'batch_size': {'key': 'properties.batchSize', 'type': 'int'}, + 'number_of_parallel_requests': {'key': 'properties.numberOfParallelRequests', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureMachineLearningServiceFunctionBinding, self).__init__(**kwargs) + self.type = 'Microsoft.MachineLearningServices' # type: str + self.endpoint = kwargs.get('endpoint', None) + self.api_key = kwargs.get('api_key', None) + self.inputs = kwargs.get('inputs', None) + self.outputs = kwargs.get('outputs', None) + self.batch_size = kwargs.get('batch_size', None) + self.number_of_parallel_requests = kwargs.get('number_of_parallel_requests', None) + + +class FunctionRetrieveDefaultDefinitionParameters(msrest.serialization.Model): + """Parameters used to specify the type of function to retrieve the default definition for. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters, AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters, CSharpFunctionRetrieveDefaultDefinitionParameters, JavaScriptFunctionRetrieveDefaultDefinitionParameters. + + All required parameters must be populated in order to send to Azure. + + :param binding_type: Required. Indicates the function binding type.Constant filled by server. + :type binding_type: str + """ + + _validation = { + 'binding_type': {'required': True}, + } + + _attribute_map = { + 'binding_type': {'key': 'bindingType', 'type': 'str'}, + } + + _subtype_map = { + 'binding_type': {'Microsoft.MachineLearning/WebService': 'AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters', 'Microsoft.MachineLearningServices': 'AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters', 'Microsoft.StreamAnalytics/CLRUdf': 'CSharpFunctionRetrieveDefaultDefinitionParameters', 'Microsoft.StreamAnalytics/JavascriptUdf': 'JavaScriptFunctionRetrieveDefaultDefinitionParameters'} + } + + def __init__( + self, + **kwargs + ): + super(FunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) + self.binding_type = None # type: Optional[str] + + +class AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): + """The parameters needed to retrieve the default function definition for an Azure Machine Learning web service function. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param binding_type: Required. Indicates the function binding type.Constant filled by server. + :type binding_type: str + :param execute_endpoint: The Request-Response execute endpoint of the Azure Machine Learning + web service. + :type execute_endpoint: str + :ivar udf_type: The function type. Default value: "Scalar". + :vartype udf_type: str + """ + + _validation = { + 'binding_type': {'required': True}, + 'udf_type': {'constant': True}, + } + + _attribute_map = { + 'binding_type': {'key': 'bindingType', 'type': 'str'}, + 'execute_endpoint': {'key': 'bindingRetrievalProperties.executeEndpoint', 'type': 'str'}, + 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, + } + + udf_type = "Scalar" + + def __init__( + self, + **kwargs + ): + super(AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) + self.binding_type = 'Microsoft.MachineLearningServices' # type: str + self.execute_endpoint = kwargs.get('execute_endpoint', None) + + +class AzureMachineLearningServiceInputColumn(msrest.serialization.Model): + """Describes an input column for the Azure Machine Learning web service endpoint. + + :param name: The name of the input column. + :type name: str + :param data_type: The (Azure Machine Learning supported) data type of the input column. + :type data_type: str + :param map_to: The zero based index of the function parameter this input maps to. + :type map_to: int + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'map_to': {'key': 'mapTo', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureMachineLearningServiceInputColumn, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.data_type = kwargs.get('data_type', None) + self.map_to = kwargs.get('map_to', None) + + +class AzureMachineLearningServiceInputs(msrest.serialization.Model): + """The inputs for the Azure Machine Learning web service endpoint. + + :param name: The name of the input. This is the name provided while authoring the endpoint. + :type name: str + :param column_names: A list of input columns for the Azure Machine Learning web service + endpoint. + :type column_names: + list[~stream_analytics_management_client.models.AzureMachineLearningServiceInputColumn] + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'column_names': {'key': 'columnNames', 'type': '[AzureMachineLearningServiceInputColumn]'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureMachineLearningServiceInputs, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.column_names = kwargs.get('column_names', None) + + +class AzureMachineLearningServiceOutputColumn(msrest.serialization.Model): + """Describes an output column for the Azure Machine Learning web service endpoint. + + :param name: The name of the output column. + :type name: str + :param data_type: The (Azure Machine Learning supported) data type of the output column. + :type data_type: str + :param map_to: The zero based index of the function parameter this input maps to. + :type map_to: int + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'map_to': {'key': 'mapTo', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureMachineLearningServiceOutputColumn, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.data_type = kwargs.get('data_type', None) + self.map_to = kwargs.get('map_to', None) + + +class AzureMachineLearningStudioFunctionBinding(FunctionBinding): + """The binding to an Azure Machine Learning Studio. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the function binding type.Constant filled by server. + :type type: str + :param endpoint: The Request-Response execute endpoint of the Azure Machine Learning Studio. + Find out more here: https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning- + consume-web-services#request-response-service-rrs. + :type endpoint: str + :param api_key: The API key used to authenticate with Request-Response endpoint. + :type api_key: str + :param inputs: The inputs for the Azure Machine Learning Studio endpoint. + :type inputs: ~stream_analytics_management_client.models.AzureMachineLearningStudioInputs + :param outputs: A list of outputs from the Azure Machine Learning Studio endpoint execution. + :type outputs: + list[~stream_analytics_management_client.models.AzureMachineLearningStudioOutputColumn] + :param batch_size: Number between 1 and 10000 describing maximum number of rows for every Azure + ML RRS execute request. Default is 1000. + :type batch_size: int + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, + 'api_key': {'key': 'properties.apiKey', 'type': 'str'}, + 'inputs': {'key': 'properties.inputs', 'type': 'AzureMachineLearningStudioInputs'}, + 'outputs': {'key': 'properties.outputs', 'type': '[AzureMachineLearningStudioOutputColumn]'}, + 'batch_size': {'key': 'properties.batchSize', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureMachineLearningStudioFunctionBinding, self).__init__(**kwargs) + self.type = 'Microsoft.MachineLearning/WebService' # type: str + self.endpoint = kwargs.get('endpoint', None) + self.api_key = kwargs.get('api_key', None) + self.inputs = kwargs.get('inputs', None) + self.outputs = kwargs.get('outputs', None) + self.batch_size = kwargs.get('batch_size', None) + + +class AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): + """The parameters needed to retrieve the default function definition for an Azure Machine Learning Studio function. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param binding_type: Required. Indicates the function binding type.Constant filled by server. + :type binding_type: str + :param execute_endpoint: The Request-Response execute endpoint of the Azure Machine Learning + Studio. Find out more here: https://docs.microsoft.com/en-us/azure/machine-learning/machine- + learning-consume-web-services#request-response-service-rrs. + :type execute_endpoint: str + :ivar udf_type: The function type. Default value: "Scalar". + :vartype udf_type: str + """ + + _validation = { + 'binding_type': {'required': True}, + 'udf_type': {'constant': True}, + } + + _attribute_map = { + 'binding_type': {'key': 'bindingType', 'type': 'str'}, + 'execute_endpoint': {'key': 'bindingRetrievalProperties.executeEndpoint', 'type': 'str'}, + 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, + } + + udf_type = "Scalar" + + def __init__( + self, + **kwargs + ): + super(AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) + self.binding_type = 'Microsoft.MachineLearning/WebService' # type: str + self.execute_endpoint = kwargs.get('execute_endpoint', None) + + +class AzureMachineLearningStudioInputColumn(msrest.serialization.Model): + """Describes an input column for the Azure Machine Learning Studio endpoint. + + :param name: The name of the input column. + :type name: str + :param data_type: The (Azure Machine Learning supported) data type of the input column. A list + of valid Azure Machine Learning data types are described at https://msdn.microsoft.com/en- + us/library/azure/dn905923.aspx . + :type data_type: str + :param map_to: The zero based index of the function parameter this input maps to. + :type map_to: int + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'map_to': {'key': 'mapTo', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureMachineLearningStudioInputColumn, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.data_type = kwargs.get('data_type', None) + self.map_to = kwargs.get('map_to', None) + + +class AzureMachineLearningStudioInputs(msrest.serialization.Model): + """The inputs for the Azure Machine Learning Studio endpoint. + + :param name: The name of the input. This is the name provided while authoring the endpoint. + :type name: str + :param column_names: A list of input columns for the Azure Machine Learning Studio endpoint. + :type column_names: + list[~stream_analytics_management_client.models.AzureMachineLearningStudioInputColumn] + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'column_names': {'key': 'columnNames', 'type': '[AzureMachineLearningStudioInputColumn]'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureMachineLearningStudioInputs, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.column_names = kwargs.get('column_names', None) + + +class AzureMachineLearningStudioOutputColumn(msrest.serialization.Model): + """Describes an output column for the Azure Machine Learning Studio endpoint. + + :param name: The name of the output column. + :type name: str + :param data_type: The (Azure Machine Learning supported) data type of the output column. A list + of valid Azure Machine Learning data types are described at https://msdn.microsoft.com/en- + us/library/azure/dn905923.aspx . + :type data_type: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureMachineLearningStudioOutputColumn, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.data_type = kwargs.get('data_type', None) + + +class AzureSqlDatabaseDataSourceProperties(msrest.serialization.Model): + """The properties that are associated with an Azure SQL database data source. + + :param server: The name of the SQL server containing the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type server: str + :param database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) + requests. + :type database: str + :param user: The user name that will be used to connect to the Azure SQL database. Required on + PUT (CreateOrReplace) requests. + :type user: str + :param password: The password that will be used to connect to the Azure SQL database. Required + on PUT (CreateOrReplace) requests. + :type password: str + :param table: The name of the table in the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type table: str + :param max_batch_count: Max Batch count for write to Sql database, the default value is 10,000. + Optional on PUT requests. + :type max_batch_count: float + :param max_writer_count: Max Write r count, currently only 1(single writer) and 0(based on + query partition) are available. Optional on PUT requests. + :type max_writer_count: float + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + """ + + _attribute_map = { + 'server': {'key': 'server', 'type': 'str'}, + 'database': {'key': 'database', 'type': 'str'}, + 'user': {'key': 'user', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'str'}, + 'table': {'key': 'table', 'type': 'str'}, + 'max_batch_count': {'key': 'maxBatchCount', 'type': 'float'}, + 'max_writer_count': {'key': 'maxWriterCount', 'type': 'float'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureSqlDatabaseDataSourceProperties, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.database = kwargs.get('database', None) + self.user = kwargs.get('user', None) + self.password = kwargs.get('password', None) + self.table = kwargs.get('table', None) + self.max_batch_count = kwargs.get('max_batch_count', None) + self.max_writer_count = kwargs.get('max_writer_count', None) + self.authentication_mode = kwargs.get('authentication_mode', None) + + +class AzureSqlDatabaseOutputDataSource(OutputDataSource): + """Describes an Azure SQL database output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param server: The name of the SQL server containing the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type server: str + :param database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) + requests. + :type database: str + :param user: The user name that will be used to connect to the Azure SQL database. Required on + PUT (CreateOrReplace) requests. + :type user: str + :param password: The password that will be used to connect to the Azure SQL database. Required + on PUT (CreateOrReplace) requests. + :type password: str + :param table: The name of the table in the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type table: str + :param max_batch_count: Max Batch count for write to Sql database, the default value is 10,000. + Optional on PUT requests. + :type max_batch_count: float + :param max_writer_count: Max Write r count, currently only 1(single writer) and 0(based on + query partition) are available. Optional on PUT requests. + :type max_writer_count: float + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'properties.server', 'type': 'str'}, + 'database': {'key': 'properties.database', 'type': 'str'}, + 'user': {'key': 'properties.user', 'type': 'str'}, + 'password': {'key': 'properties.password', 'type': 'str'}, + 'table': {'key': 'properties.table', 'type': 'str'}, + 'max_batch_count': {'key': 'properties.maxBatchCount', 'type': 'float'}, + 'max_writer_count': {'key': 'properties.maxWriterCount', 'type': 'float'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureSqlDatabaseOutputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.Sql/Server/Database' # type: str + self.server = kwargs.get('server', None) + self.database = kwargs.get('database', None) + self.user = kwargs.get('user', None) + self.password = kwargs.get('password', None) + self.table = kwargs.get('table', None) + self.max_batch_count = kwargs.get('max_batch_count', None) + self.max_writer_count = kwargs.get('max_writer_count', None) + self.authentication_mode = kwargs.get('authentication_mode', None) + + +class AzureSqlDatabaseOutputDataSourceProperties(AzureSqlDatabaseDataSourceProperties): + """The properties that are associated with an Azure SQL database output. + + :param server: The name of the SQL server containing the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type server: str + :param database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) + requests. + :type database: str + :param user: The user name that will be used to connect to the Azure SQL database. Required on + PUT (CreateOrReplace) requests. + :type user: str + :param password: The password that will be used to connect to the Azure SQL database. Required + on PUT (CreateOrReplace) requests. + :type password: str + :param table: The name of the table in the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type table: str + :param max_batch_count: Max Batch count for write to Sql database, the default value is 10,000. + Optional on PUT requests. + :type max_batch_count: float + :param max_writer_count: Max Write r count, currently only 1(single writer) and 0(based on + query partition) are available. Optional on PUT requests. + :type max_writer_count: float + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + """ + + _attribute_map = { + 'server': {'key': 'server', 'type': 'str'}, + 'database': {'key': 'database', 'type': 'str'}, + 'user': {'key': 'user', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'str'}, + 'table': {'key': 'table', 'type': 'str'}, + 'max_batch_count': {'key': 'maxBatchCount', 'type': 'float'}, + 'max_writer_count': {'key': 'maxWriterCount', 'type': 'float'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureSqlDatabaseOutputDataSourceProperties, self).__init__(**kwargs) + + +class ReferenceInputDataSource(msrest.serialization.Model): + """Describes an input data source that contains reference data. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureSqlReferenceInputDataSource, BlobReferenceInputDataSource. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of input data source containing reference data. + Required on PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Microsoft.Sql/Server/Database': 'AzureSqlReferenceInputDataSource', 'Microsoft.Storage/Blob': 'BlobReferenceInputDataSource'} + } + + def __init__( + self, + **kwargs + ): + super(ReferenceInputDataSource, self).__init__(**kwargs) + self.type = None # type: Optional[str] + + +class AzureSqlReferenceInputDataSource(ReferenceInputDataSource): + """Describes an Azure SQL database reference input data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of input data source containing reference data. + Required on PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param properties: + :type properties: + ~stream_analytics_management_client.models.AzureSqlReferenceInputDataSourceProperties + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'AzureSqlReferenceInputDataSourceProperties'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureSqlReferenceInputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.Sql/Server/Database' # type: str + self.properties = kwargs.get('properties', None) + + +class AzureSqlReferenceInputDataSourceProperties(msrest.serialization.Model): + """AzureSqlReferenceInputDataSourceProperties. + + :param server: This element is associated with the datasource element. This is the name of the + server that contains the database that will be written to. + :type server: str + :param database: This element is associated with the datasource element. This is the name of + the database that output will be written to. + :type database: str + :param user: This element is associated with the datasource element. This is the user name that + will be used to connect to the SQL Database instance. + :type user: str + :param password: This element is associated with the datasource element. This is the password + that will be used to connect to the SQL Database instance. + :type password: str + :param table: This element is associated with the datasource element. The name of the table in + the Azure SQL database.. + :type table: str + :param refresh_type: This element is associated with the datasource element. This element is of + enum type. It indicates what kind of data refresh option do we want to + use:Static/RefreshPeriodicallyWithFull/RefreshPeriodicallyWithDelta. + :type refresh_type: str + :param refresh_rate: This element is associated with the datasource element. This indicates how + frequently the data will be fetched from the database. It is of DateTime format. + :type refresh_rate: str + :param full_snapshot_query: This element is associated with the datasource element. This query + is used to fetch data from the sql database. + :type full_snapshot_query: str + :param delta_snapshot_query: This element is associated with the datasource element. This query + is used to fetch incremental changes from the SQL database. To use this option, we recommend + using temporal tables in Azure SQL Database. + :type delta_snapshot_query: str + """ + + _attribute_map = { + 'server': {'key': 'server', 'type': 'str'}, + 'database': {'key': 'database', 'type': 'str'}, + 'user': {'key': 'user', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'str'}, + 'table': {'key': 'table', 'type': 'str'}, + 'refresh_type': {'key': 'refreshType', 'type': 'str'}, + 'refresh_rate': {'key': 'refreshRate', 'type': 'str'}, + 'full_snapshot_query': {'key': 'fullSnapshotQuery', 'type': 'str'}, + 'delta_snapshot_query': {'key': 'deltaSnapshotQuery', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureSqlReferenceInputDataSourceProperties, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.database = kwargs.get('database', None) + self.user = kwargs.get('user', None) + self.password = kwargs.get('password', None) + self.table = kwargs.get('table', None) + self.refresh_type = kwargs.get('refresh_type', None) + self.refresh_rate = kwargs.get('refresh_rate', None) + self.full_snapshot_query = kwargs.get('full_snapshot_query', None) + self.delta_snapshot_query = kwargs.get('delta_snapshot_query', None) + + +class AzureSynapseDataSourceProperties(msrest.serialization.Model): + """The properties that are associated with an Azure SQL database data source. + + :param server: The name of the SQL server containing the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type server: str + :param database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) + requests. + :type database: str + :param table: The name of the table in the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type table: str + :param user: The user name that will be used to connect to the Azure SQL database. Required on + PUT (CreateOrReplace) requests. + :type user: str + :param password: The password that will be used to connect to the Azure SQL database. Required + on PUT (CreateOrReplace) requests. + :type password: str + """ + + _attribute_map = { + 'server': {'key': 'server', 'type': 'str'}, + 'database': {'key': 'database', 'type': 'str'}, + 'table': {'key': 'table', 'type': 'str'}, + 'user': {'key': 'user', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureSynapseDataSourceProperties, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.database = kwargs.get('database', None) + self.table = kwargs.get('table', None) + self.user = kwargs.get('user', None) + self.password = kwargs.get('password', None) + + +class AzureSynapseOutputDataSource(OutputDataSource): + """Describes an Azure Synapse output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param server: The name of the SQL server containing the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type server: str + :param database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) + requests. + :type database: str + :param table: The name of the table in the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type table: str + :param user: The user name that will be used to connect to the Azure SQL database. Required on + PUT (CreateOrReplace) requests. + :type user: str + :param password: The password that will be used to connect to the Azure SQL database. Required + on PUT (CreateOrReplace) requests. + :type password: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'properties.server', 'type': 'str'}, + 'database': {'key': 'properties.database', 'type': 'str'}, + 'table': {'key': 'properties.table', 'type': 'str'}, + 'user': {'key': 'properties.user', 'type': 'str'}, + 'password': {'key': 'properties.password', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureSynapseOutputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.Sql/Server/DataWarehouse' # type: str + self.server = kwargs.get('server', None) + self.database = kwargs.get('database', None) + self.table = kwargs.get('table', None) + self.user = kwargs.get('user', None) + self.password = kwargs.get('password', None) + + +class AzureSynapseOutputDataSourceProperties(AzureSynapseDataSourceProperties): + """The properties that are associated with an Azure Synapse output. + + :param server: The name of the SQL server containing the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type server: str + :param database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) + requests. + :type database: str + :param table: The name of the table in the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type table: str + :param user: The user name that will be used to connect to the Azure SQL database. Required on + PUT (CreateOrReplace) requests. + :type user: str + :param password: The password that will be used to connect to the Azure SQL database. Required + on PUT (CreateOrReplace) requests. + :type password: str + """ + + _attribute_map = { + 'server': {'key': 'server', 'type': 'str'}, + 'database': {'key': 'database', 'type': 'str'}, + 'table': {'key': 'table', 'type': 'str'}, + 'user': {'key': 'user', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureSynapseOutputDataSourceProperties, self).__init__(**kwargs) + + +class AzureTableOutputDataSource(OutputDataSource): + """Describes an Azure Table output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param account_name: The name of the Azure Storage account. Required on PUT (CreateOrReplace) + requests. + :type account_name: str + :param account_key: The account key for the Azure Storage account. Required on PUT + (CreateOrReplace) requests. + :type account_key: str + :param table: The name of the Azure Table. Required on PUT (CreateOrReplace) requests. + :type table: str + :param partition_key: This element indicates the name of a column from the SELECT statement in + the query that will be used as the partition key for the Azure Table. Required on PUT + (CreateOrReplace) requests. + :type partition_key: str + :param row_key: This element indicates the name of a column from the SELECT statement in the + query that will be used as the row key for the Azure Table. Required on PUT (CreateOrReplace) + requests. + :type row_key: str + :param columns_to_remove: If specified, each item in the array is the name of a column to + remove (if present) from output event entities. + :type columns_to_remove: list[str] + :param batch_size: The number of rows to write to the Azure Table at a time. + :type batch_size: int + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'account_name': {'key': 'properties.accountName', 'type': 'str'}, + 'account_key': {'key': 'properties.accountKey', 'type': 'str'}, + 'table': {'key': 'properties.table', 'type': 'str'}, + 'partition_key': {'key': 'properties.partitionKey', 'type': 'str'}, + 'row_key': {'key': 'properties.rowKey', 'type': 'str'}, + 'columns_to_remove': {'key': 'properties.columnsToRemove', 'type': '[str]'}, + 'batch_size': {'key': 'properties.batchSize', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureTableOutputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.Storage/Table' # type: str + self.account_name = kwargs.get('account_name', None) + self.account_key = kwargs.get('account_key', None) + self.table = kwargs.get('table', None) + self.partition_key = kwargs.get('partition_key', None) + self.row_key = kwargs.get('row_key', None) + self.columns_to_remove = kwargs.get('columns_to_remove', None) + self.batch_size = kwargs.get('batch_size', None) + + +class BlobDataSourceProperties(msrest.serialization.Model): + """The properties that are associated with a blob data source. + + :param storage_accounts: A list of one or more Azure Storage accounts. Required on PUT + (CreateOrReplace) requests. + :type storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] + :param container: The name of a container within the associated Storage account. This container + contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) + requests. + :type container: str + :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern + against which blob names will be matched to determine whether or not they should be included as + input or output to the job. See https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and + example. + :type path_pattern: str + :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this + property is used as the date format instead. + :type date_format: str + :param time_format: The time format. Wherever {time} appears in pathPattern, the value of this + property is used as the time format instead. + :type time_format: str + """ + + _attribute_map = { + 'storage_accounts': {'key': 'storageAccounts', 'type': '[StorageAccount]'}, + 'container': {'key': 'container', 'type': 'str'}, + 'path_pattern': {'key': 'pathPattern', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'str'}, + 'time_format': {'key': 'timeFormat', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(BlobDataSourceProperties, self).__init__(**kwargs) + self.storage_accounts = kwargs.get('storage_accounts', None) + self.container = kwargs.get('container', None) + self.path_pattern = kwargs.get('path_pattern', None) + self.date_format = kwargs.get('date_format', None) + self.time_format = kwargs.get('time_format', None) + + +class BlobOutputDataSource(OutputDataSource): + """Describes a blob output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param storage_accounts: A list of one or more Azure Storage accounts. Required on PUT + (CreateOrReplace) requests. + :type storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] + :param container: The name of a container within the associated Storage account. This container + contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) + requests. + :type container: str + :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern + against which blob names will be matched to determine whether or not they should be included as + input or output to the job. See https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and + example. + :type path_pattern: str + :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this + property is used as the date format instead. + :type date_format: str + :param time_format: The time format. Wherever {time} appears in pathPattern, the value of this + property is used as the time format instead. + :type time_format: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'storage_accounts': {'key': 'properties.storageAccounts', 'type': '[StorageAccount]'}, + 'container': {'key': 'properties.container', 'type': 'str'}, + 'path_pattern': {'key': 'properties.pathPattern', 'type': 'str'}, + 'date_format': {'key': 'properties.dateFormat', 'type': 'str'}, + 'time_format': {'key': 'properties.timeFormat', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(BlobOutputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.Storage/Blob' # type: str + self.storage_accounts = kwargs.get('storage_accounts', None) + self.container = kwargs.get('container', None) + self.path_pattern = kwargs.get('path_pattern', None) + self.date_format = kwargs.get('date_format', None) + self.time_format = kwargs.get('time_format', None) + self.authentication_mode = kwargs.get('authentication_mode', None) + + +class BlobOutputDataSourceProperties(BlobDataSourceProperties): + """The properties that are associated with a blob output. + + :param storage_accounts: A list of one or more Azure Storage accounts. Required on PUT + (CreateOrReplace) requests. + :type storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] + :param container: The name of a container within the associated Storage account. This container + contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) + requests. + :type container: str + :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern + against which blob names will be matched to determine whether or not they should be included as + input or output to the job. See https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and + example. + :type path_pattern: str + :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this + property is used as the date format instead. + :type date_format: str + :param time_format: The time format. Wherever {time} appears in pathPattern, the value of this + property is used as the time format instead. + :type time_format: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + """ + + _attribute_map = { + 'storage_accounts': {'key': 'storageAccounts', 'type': '[StorageAccount]'}, + 'container': {'key': 'container', 'type': 'str'}, + 'path_pattern': {'key': 'pathPattern', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'str'}, + 'time_format': {'key': 'timeFormat', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(BlobOutputDataSourceProperties, self).__init__(**kwargs) + self.authentication_mode = kwargs.get('authentication_mode', None) + + +class BlobReferenceInputDataSource(ReferenceInputDataSource): + """Describes a blob input data source that contains reference data. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of input data source containing reference data. + Required on PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param storage_accounts: A list of one or more Azure Storage accounts. Required on PUT + (CreateOrReplace) requests. + :type storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] + :param container: The name of a container within the associated Storage account. This container + contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) + requests. + :type container: str + :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern + against which blob names will be matched to determine whether or not they should be included as + input or output to the job. See https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and + example. + :type path_pattern: str + :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this + property is used as the date format instead. + :type date_format: str + :param time_format: The time format. Wherever {time} appears in pathPattern, the value of this + property is used as the time format instead. + :type time_format: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'storage_accounts': {'key': 'properties.storageAccounts', 'type': '[StorageAccount]'}, + 'container': {'key': 'properties.container', 'type': 'str'}, + 'path_pattern': {'key': 'properties.pathPattern', 'type': 'str'}, + 'date_format': {'key': 'properties.dateFormat', 'type': 'str'}, + 'time_format': {'key': 'properties.timeFormat', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(BlobReferenceInputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.Storage/Blob' # type: str + self.storage_accounts = kwargs.get('storage_accounts', None) + self.container = kwargs.get('container', None) + self.path_pattern = kwargs.get('path_pattern', None) + self.date_format = kwargs.get('date_format', None) + self.time_format = kwargs.get('time_format', None) + + +class BlobReferenceInputDataSourceProperties(BlobDataSourceProperties): + """The properties that are associated with a blob input containing reference data. + + :param storage_accounts: A list of one or more Azure Storage accounts. Required on PUT + (CreateOrReplace) requests. + :type storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] + :param container: The name of a container within the associated Storage account. This container + contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) + requests. + :type container: str + :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern + against which blob names will be matched to determine whether or not they should be included as + input or output to the job. See https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and + example. + :type path_pattern: str + :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this + property is used as the date format instead. + :type date_format: str + :param time_format: The time format. Wherever {time} appears in pathPattern, the value of this + property is used as the time format instead. + :type time_format: str + """ + + _attribute_map = { + 'storage_accounts': {'key': 'storageAccounts', 'type': '[StorageAccount]'}, + 'container': {'key': 'container', 'type': 'str'}, + 'path_pattern': {'key': 'pathPattern', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'str'}, + 'time_format': {'key': 'timeFormat', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(BlobReferenceInputDataSourceProperties, self).__init__(**kwargs) + + +class StreamInputDataSource(msrest.serialization.Model): + """Describes an input data source that contains stream data. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: IoTHubStreamInputDataSource, EventHubV2StreamInputDataSource, EventHubStreamInputDataSource, BlobStreamInputDataSource. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of input data source containing stream data. Required + on PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Microsoft.Devices/IotHubs': 'IoTHubStreamInputDataSource', 'Microsoft.EventHub/EventHub': 'EventHubV2StreamInputDataSource', 'Microsoft.ServiceBus/EventHub': 'EventHubStreamInputDataSource', 'Microsoft.Storage/Blob': 'BlobStreamInputDataSource'} + } + + def __init__( + self, + **kwargs + ): + super(StreamInputDataSource, self).__init__(**kwargs) + self.type = None # type: Optional[str] + + +class BlobStreamInputDataSource(StreamInputDataSource): + """Describes a blob input data source that contains stream data. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of input data source containing stream data. Required + on PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param storage_accounts: A list of one or more Azure Storage accounts. Required on PUT + (CreateOrReplace) requests. + :type storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] + :param container: The name of a container within the associated Storage account. This container + contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) + requests. + :type container: str + :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern + against which blob names will be matched to determine whether or not they should be included as + input or output to the job. See https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and + example. + :type path_pattern: str + :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this + property is used as the date format instead. + :type date_format: str + :param time_format: The time format. Wherever {time} appears in pathPattern, the value of this + property is used as the time format instead. + :type time_format: str + :param source_partition_count: The partition count of the blob input data source. Range 1 - + 256. + :type source_partition_count: int + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'storage_accounts': {'key': 'properties.storageAccounts', 'type': '[StorageAccount]'}, + 'container': {'key': 'properties.container', 'type': 'str'}, + 'path_pattern': {'key': 'properties.pathPattern', 'type': 'str'}, + 'date_format': {'key': 'properties.dateFormat', 'type': 'str'}, + 'time_format': {'key': 'properties.timeFormat', 'type': 'str'}, + 'source_partition_count': {'key': 'properties.sourcePartitionCount', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(BlobStreamInputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.Storage/Blob' # type: str + self.storage_accounts = kwargs.get('storage_accounts', None) + self.container = kwargs.get('container', None) + self.path_pattern = kwargs.get('path_pattern', None) + self.date_format = kwargs.get('date_format', None) + self.time_format = kwargs.get('time_format', None) + self.source_partition_count = kwargs.get('source_partition_count', None) + + +class BlobStreamInputDataSourceProperties(BlobDataSourceProperties): + """The properties that are associated with a blob input containing stream data. + + :param storage_accounts: A list of one or more Azure Storage accounts. Required on PUT + (CreateOrReplace) requests. + :type storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] + :param container: The name of a container within the associated Storage account. This container + contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) + requests. + :type container: str + :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern + against which blob names will be matched to determine whether or not they should be included as + input or output to the job. See https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and + example. + :type path_pattern: str + :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this + property is used as the date format instead. + :type date_format: str + :param time_format: The time format. Wherever {time} appears in pathPattern, the value of this + property is used as the time format instead. + :type time_format: str + :param source_partition_count: The partition count of the blob input data source. Range 1 - + 256. + :type source_partition_count: int + """ + + _attribute_map = { + 'storage_accounts': {'key': 'storageAccounts', 'type': '[StorageAccount]'}, + 'container': {'key': 'container', 'type': 'str'}, + 'path_pattern': {'key': 'pathPattern', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'str'}, + 'time_format': {'key': 'timeFormat', 'type': 'str'}, + 'source_partition_count': {'key': 'sourcePartitionCount', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(BlobStreamInputDataSourceProperties, self).__init__(**kwargs) + self.source_partition_count = kwargs.get('source_partition_count', None) + + +class Resource(msrest.serialization.Model): + """Resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + + +class TrackedResource(Resource): + """The resource model definition for a ARM tracked top level resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: The geo-location where the resource lives. + :type location: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(TrackedResource, self).__init__(**kwargs) + self.tags = kwargs.get('tags', None) + self.location = kwargs.get('location', None) + + +class Cluster(TrackedResource): + """A Stream Analytics Cluster object. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: The geo-location where the resource lives. + :type location: str + :param sku: The SKU of the cluster. This determines the size/capacity of the cluster. Required + on PUT (CreateOrUpdate) requests. + :type sku: ~stream_analytics_management_client.models.ClusterSku + :ivar etag: The current entity tag for the cluster. This is an opaque string. You can use it to + detect whether the resource has changed between requests. You can also use it in the If-Match + or If-None-Match headers for write operations for optimistic concurrency. + :vartype etag: str + :param properties: The properties associated with a Stream Analytics cluster. + :type properties: ~stream_analytics_management_client.models.ClusterProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'sku': {'key': 'sku', 'type': 'ClusterSku'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'ClusterProperties'}, + } + + def __init__( + self, + **kwargs + ): + super(Cluster, self).__init__(**kwargs) + self.sku = kwargs.get('sku', None) + self.etag = None + self.properties = kwargs.get('properties', None) + + +class ClusterInfo(msrest.serialization.Model): + """The properties associated with a Stream Analytics cluster. + + :param id: The resource id of cluster. + :type id: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ClusterInfo, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + + +class ClusterJob(msrest.serialization.Model): + """A streaming job. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource ID of the streaming job. + :vartype id: str + :ivar streaming_units: The number of streaming units that are used by the streaming job. + :vartype streaming_units: int + :ivar job_state: The current execution state of the streaming job. Possible values include: + "Created", "Starting", "Running", "Stopping", "Stopped", "Deleting", "Failed", "Degraded", + "Restarting", "Scaling". + :vartype job_state: str or ~stream_analytics_management_client.models.JobState + """ + + _validation = { + 'id': {'readonly': True}, + 'streaming_units': {'readonly': True}, + 'job_state': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'streaming_units': {'key': 'streamingUnits', 'type': 'int'}, + 'job_state': {'key': 'jobState', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ClusterJob, self).__init__(**kwargs) + self.id = None + self.streaming_units = None + self.job_state = None + + +class ClusterJobListResult(msrest.serialization.Model): + """A list of streaming jobs. Populated by a List operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: A list of streaming jobs. + :vartype value: list[~stream_analytics_management_client.models.ClusterJob] + :ivar next_link: The URL to fetch the next set of streaming jobs. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ClusterJob]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ClusterJobListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class ClusterListResult(msrest.serialization.Model): + """A list of clusters populated by a 'list' operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: A list of clusters. + :vartype value: list[~stream_analytics_management_client.models.Cluster] + :ivar next_link: The URL to fetch the next set of clusters. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Cluster]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ClusterListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class ClusterProperties(msrest.serialization.Model): + """The properties associated with a Stream Analytics cluster. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar created_date: The date this cluster was created. + :vartype created_date: ~datetime.datetime + :ivar cluster_id: Unique identifier for the cluster. + :vartype cluster_id: str + :ivar provisioning_state: The status of the cluster provisioning. The three terminal states + are: Succeeded, Failed and Canceled. Possible values include: "Succeeded", "Failed", + "Canceled", "InProgress". + :vartype provisioning_state: str or + ~stream_analytics_management_client.models.ClusterProvisioningState + :ivar capacity_allocated: Represents the number of streaming units currently being used on the + cluster. + :vartype capacity_allocated: int + :ivar capacity_assigned: Represents the sum of the SUs of all streaming jobs associated with + the cluster. If all of the jobs were running, this would be the capacity allocated. + :vartype capacity_assigned: int + """ + + _validation = { + 'created_date': {'readonly': True}, + 'cluster_id': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'capacity_allocated': {'readonly': True}, + 'capacity_assigned': {'readonly': True}, + } + + _attribute_map = { + 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, + 'cluster_id': {'key': 'clusterId', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'capacity_allocated': {'key': 'capacityAllocated', 'type': 'int'}, + 'capacity_assigned': {'key': 'capacityAssigned', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(ClusterProperties, self).__init__(**kwargs) + self.created_date = None + self.cluster_id = None + self.provisioning_state = None + self.capacity_allocated = None + self.capacity_assigned = None + + +class ClusterSku(msrest.serialization.Model): + """The SKU of the cluster. This determines the size/capacity of the cluster. Required on PUT (CreateOrUpdate) requests. + + :param name: Specifies the SKU name of the cluster. Required on PUT (CreateOrUpdate) requests. + Possible values include: "Default". + :type name: str or ~stream_analytics_management_client.models.ClusterSkuName + :param capacity: Denotes the number of streaming units the cluster can support. Valid values + for this property are multiples of 36 with a minimum value of 36 and maximum value of 216. + Required on PUT (CreateOrUpdate) requests. + :type capacity: int + """ + + _validation = { + 'capacity': {'maximum': 216, 'minimum': 36}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'capacity': {'key': 'capacity', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(ClusterSku, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.capacity = kwargs.get('capacity', None) + + +class Compression(msrest.serialization.Model): + """Describes how input data is compressed. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Compression, self).__init__(**kwargs) + self.type = kwargs['type'] + + +class CSharpFunctionBinding(FunctionBinding): + """The binding to a CSharp function. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the function binding type.Constant filled by server. + :type type: str + :param script: The Csharp code containing a single function definition. + :type script: str + :param dll_path: The Csharp code containing a single function definition. + :type dll_path: str + :param class_property: The Csharp code containing a single function definition. + :type class_property: str + :param method: The Csharp code containing a single function definition. + :type method: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'script': {'key': 'properties.script', 'type': 'str'}, + 'dll_path': {'key': 'properties.dllPath', 'type': 'str'}, + 'class_property': {'key': 'properties.class', 'type': 'str'}, + 'method': {'key': 'properties.method', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(CSharpFunctionBinding, self).__init__(**kwargs) + self.type = 'Microsoft.StreamAnalytics/CLRUdf' # type: str + self.script = kwargs.get('script', None) + self.dll_path = kwargs.get('dll_path', None) + self.class_property = kwargs.get('class_property', None) + self.method = kwargs.get('method', None) + + +class CSharpFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): + """The parameters needed to retrieve the default function definition for a CSharp function. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param binding_type: Required. Indicates the function binding type.Constant filled by server. + :type binding_type: str + :param script: The CSharp code containing a single function definition. + :type script: str + :ivar udf_type: The function type. Default value: "Scalar". + :vartype udf_type: str + """ + + _validation = { + 'binding_type': {'required': True}, + 'udf_type': {'constant': True}, + } + + _attribute_map = { + 'binding_type': {'key': 'bindingType', 'type': 'str'}, + 'script': {'key': 'bindingRetrievalProperties.script', 'type': 'str'}, + 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, + } + + udf_type = "Scalar" + + def __init__( + self, + **kwargs + ): + super(CSharpFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) + self.binding_type = 'Microsoft.StreamAnalytics/CLRUdf' # type: str + self.script = kwargs.get('script', None) + + +class CsvSerialization(Serialization): + """Describes how data from an input is serialized or how data is serialized when written to an output in CSV format. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of serialization that the input or output uses. + Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: + "Csv", "Avro", "Json", "CustomClr", "Parquet". + :type type: str or ~stream_analytics_management_client.models.EventSerializationType + :param field_delimiter: Specifies the delimiter that will be used to separate comma-separated + value (CSV) records. See https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream- + analytics-input or https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics- + output for a list of supported values. Required on PUT (CreateOrReplace) requests. + :type field_delimiter: str + :param encoding: Specifies the encoding of the incoming data in the case of input and the + encoding of outgoing data in the case of output. Required on PUT (CreateOrReplace) requests. + Possible values include: "UTF8". + :type encoding: str or ~stream_analytics_management_client.models.Encoding + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'field_delimiter': {'key': 'properties.fieldDelimiter', 'type': 'str'}, + 'encoding': {'key': 'properties.encoding', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(CsvSerialization, self).__init__(**kwargs) + self.type = 'Csv' # type: str + self.field_delimiter = kwargs.get('field_delimiter', None) + self.encoding = kwargs.get('encoding', None) + + +class CustomClrSerialization(Serialization): + """Describes how data from an input is serialized or how data is serialized when written to an output in custom format. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of serialization that the input or output uses. + Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: + "Csv", "Avro", "Json", "CustomClr", "Parquet". + :type type: str or ~stream_analytics_management_client.models.EventSerializationType + :param serialization_dll_path: The serialization library path. + :type serialization_dll_path: str + :param serialization_class_name: The serialization class name. + :type serialization_class_name: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'serialization_dll_path': {'key': 'properties.serializationDllPath', 'type': 'str'}, + 'serialization_class_name': {'key': 'properties.serializationClassName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(CustomClrSerialization, self).__init__(**kwargs) + self.type = 'CustomClr' # type: str + self.serialization_dll_path = kwargs.get('serialization_dll_path', None) + self.serialization_class_name = kwargs.get('serialization_class_name', None) + + +class DiagnosticCondition(msrest.serialization.Model): + """Condition applicable to the resource, or to the job overall, that warrant customer attention. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar since: The UTC timestamp of when the condition started. Customers should be able to find + a corresponding event in the ops log around this time. + :vartype since: str + :ivar code: The opaque diagnostic code. + :vartype code: str + :ivar message: The human-readable message describing the condition in detail. Localized in the + Accept-Language of the client request. + :vartype message: str + """ + + _validation = { + 'since': {'readonly': True}, + 'code': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'since': {'key': 'since', 'type': 'str'}, + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DiagnosticCondition, self).__init__(**kwargs) + self.since = None + self.code = None + self.message = None + + +class Diagnostics(msrest.serialization.Model): + """Describes conditions applicable to the Input, Output, or the job overall, that warrant customer attention. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar conditions: A collection of zero or more conditions applicable to the resource, or to the + job overall, that warrant customer attention. + :vartype conditions: list[~stream_analytics_management_client.models.DiagnosticCondition] + """ + + _validation = { + 'conditions': {'readonly': True}, + } + + _attribute_map = { + 'conditions': {'key': 'conditions', 'type': '[DiagnosticCondition]'}, + } + + def __init__( + self, + **kwargs + ): + super(Diagnostics, self).__init__(**kwargs) + self.conditions = None + + +class DocumentDbOutputDataSource(OutputDataSource): + """Describes a DocumentDB output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param account_id: The DocumentDB account name or ID. Required on PUT (CreateOrReplace) + requests. + :type account_id: str + :param account_key: The account key for the DocumentDB account. Required on PUT + (CreateOrReplace) requests. + :type account_key: str + :param database: The name of the DocumentDB database. Required on PUT (CreateOrReplace) + requests. + :type database: str + :param collection_name_pattern: The collection name pattern for the collections to be used. The + collection name format can be constructed using the optional {partition} token, where + partitions start from 0. See the DocumentDB section of https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-output for more information. Required on PUT + (CreateOrReplace) requests. + :type collection_name_pattern: str + :param partition_key: The name of the field in output events used to specify the key for + partitioning output across collections. If 'collectionNamePattern' contains the {partition} + token, this property is required to be specified. + :type partition_key: str + :param document_id: The name of the field in output events used to specify the primary key + which insert or update operations are based on. + :type document_id: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'account_id': {'key': 'properties.accountId', 'type': 'str'}, + 'account_key': {'key': 'properties.accountKey', 'type': 'str'}, + 'database': {'key': 'properties.database', 'type': 'str'}, + 'collection_name_pattern': {'key': 'properties.collectionNamePattern', 'type': 'str'}, + 'partition_key': {'key': 'properties.partitionKey', 'type': 'str'}, + 'document_id': {'key': 'properties.documentId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DocumentDbOutputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.Storage/DocumentDB' # type: str + self.account_id = kwargs.get('account_id', None) + self.account_key = kwargs.get('account_key', None) + self.database = kwargs.get('database', None) + self.collection_name_pattern = kwargs.get('collection_name_pattern', None) + self.partition_key = kwargs.get('partition_key', None) + self.document_id = kwargs.get('document_id', None) + + +class Error(msrest.serialization.Model): + """Common error representation. + + :param error: Error definition properties. + :type error: ~stream_analytics_management_client.models.ErrorAutoGenerated + """ + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorAutoGenerated'}, + } + + def __init__( + self, + **kwargs + ): + super(Error, self).__init__(**kwargs) + self.error = kwargs.get('error', None) + + +class ErrorAutoGenerated(msrest.serialization.Model): + """Error definition properties. + + :param code: Error code. + :type code: str + :param message: Error message. + :type message: str + :param target: Error target. + :type target: str + :param details: Error details. + :type details: list[~stream_analytics_management_client.models.ErrorDetails] + """ + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ErrorDetails]'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorAutoGenerated, self).__init__(**kwargs) + self.code = kwargs.get('code', None) + self.message = kwargs.get('message', None) + self.target = kwargs.get('target', None) + self.details = kwargs.get('details', None) + + +class ErrorDetails(msrest.serialization.Model): + """Common error details representation. + + :param code: Error code. + :type code: str + :param target: Error target. + :type target: str + :param message: Error message. + :type message: str + """ + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorDetails, self).__init__(**kwargs) + self.code = kwargs.get('code', None) + self.target = kwargs.get('target', None) + self.message = kwargs.get('message', None) + + +class ErrorResponse(msrest.serialization.Model): + """Describes the error that occurred. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar code: Error code associated with the error that occurred. + :vartype code: str + :ivar message: Describes the error in detail. + :vartype message: str + """ + + _validation = { + 'code': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorResponse, self).__init__(**kwargs) + self.code = None + self.message = None + + +class ServiceBusDataSourceProperties(msrest.serialization.Model): + """The common properties that are associated with Service Bus data sources (Queues, Topics, Event Hubs, etc.). + + :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + """ + + _attribute_map = { + 'service_bus_namespace': {'key': 'serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ServiceBusDataSourceProperties, self).__init__(**kwargs) + self.service_bus_namespace = kwargs.get('service_bus_namespace', None) + self.shared_access_policy_name = kwargs.get('shared_access_policy_name', None) + self.shared_access_policy_key = kwargs.get('shared_access_policy_key', None) + self.authentication_mode = kwargs.get('authentication_mode', None) + + +class EventHubDataSourceProperties(ServiceBusDataSourceProperties): + """The common properties that are associated with Event Hub data sources. + + :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :param event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + :type event_hub_name: str + """ + + _attribute_map = { + 'service_bus_namespace': {'key': 'serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + 'event_hub_name': {'key': 'eventHubName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(EventHubDataSourceProperties, self).__init__(**kwargs) + self.event_hub_name = kwargs.get('event_hub_name', None) + + +class EventHubOutputDataSource(OutputDataSource): + """Describes an Event Hub output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :param event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + :type event_hub_name: str + :param partition_key: The key/column that is used to determine to which partition to send event + data. + :type partition_key: str + :param property_columns: + :type property_columns: list[str] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + 'event_hub_name': {'key': 'properties.eventHubName', 'type': 'str'}, + 'partition_key': {'key': 'properties.partitionKey', 'type': 'str'}, + 'property_columns': {'key': 'properties.propertyColumns', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(EventHubOutputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.ServiceBus/EventHub' # type: str + self.service_bus_namespace = kwargs.get('service_bus_namespace', None) + self.shared_access_policy_name = kwargs.get('shared_access_policy_name', None) + self.shared_access_policy_key = kwargs.get('shared_access_policy_key', None) + self.authentication_mode = kwargs.get('authentication_mode', None) + self.event_hub_name = kwargs.get('event_hub_name', None) + self.partition_key = kwargs.get('partition_key', None) + self.property_columns = kwargs.get('property_columns', None) + + +class EventHubOutputDataSourceProperties(EventHubDataSourceProperties): + """The properties that are associated with an Event Hub output. + + :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :param event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + :type event_hub_name: str + :param partition_key: The key/column that is used to determine to which partition to send event + data. + :type partition_key: str + :param property_columns: + :type property_columns: list[str] + """ + + _attribute_map = { + 'service_bus_namespace': {'key': 'serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + 'event_hub_name': {'key': 'eventHubName', 'type': 'str'}, + 'partition_key': {'key': 'partitionKey', 'type': 'str'}, + 'property_columns': {'key': 'propertyColumns', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(EventHubOutputDataSourceProperties, self).__init__(**kwargs) + self.partition_key = kwargs.get('partition_key', None) + self.property_columns = kwargs.get('property_columns', None) + + +class EventHubStreamInputDataSource(StreamInputDataSource): + """Describes an Event Hub input data source that contains stream data. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of input data source containing stream data. Required + on PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :param event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + :type event_hub_name: str + :param consumer_group_name: The name of an Event Hub Consumer Group that should be used to read + events from the Event Hub. Specifying distinct consumer group names for multiple inputs allows + each of those inputs to receive the same events from the Event Hub. If not specified, the input + uses the Event Hub’s default consumer group. + :type consumer_group_name: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + 'event_hub_name': {'key': 'properties.eventHubName', 'type': 'str'}, + 'consumer_group_name': {'key': 'properties.consumerGroupName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(EventHubStreamInputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.ServiceBus/EventHub' # type: str + self.service_bus_namespace = kwargs.get('service_bus_namespace', None) + self.shared_access_policy_name = kwargs.get('shared_access_policy_name', None) + self.shared_access_policy_key = kwargs.get('shared_access_policy_key', None) + self.authentication_mode = kwargs.get('authentication_mode', None) + self.event_hub_name = kwargs.get('event_hub_name', None) + self.consumer_group_name = kwargs.get('consumer_group_name', None) + + +class EventHubStreamInputDataSourceProperties(EventHubDataSourceProperties): + """The properties that are associated with a Event Hub input containing stream data. + + :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :param event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + :type event_hub_name: str + :param consumer_group_name: The name of an Event Hub Consumer Group that should be used to read + events from the Event Hub. Specifying distinct consumer group names for multiple inputs allows + each of those inputs to receive the same events from the Event Hub. If not specified, the input + uses the Event Hub’s default consumer group. + :type consumer_group_name: str + """ + + _attribute_map = { + 'service_bus_namespace': {'key': 'serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + 'event_hub_name': {'key': 'eventHubName', 'type': 'str'}, + 'consumer_group_name': {'key': 'consumerGroupName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(EventHubStreamInputDataSourceProperties, self).__init__(**kwargs) + self.consumer_group_name = kwargs.get('consumer_group_name', None) + + +class EventHubV2OutputDataSource(OutputDataSource): + """Describes an Event Hub output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :param event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + :type event_hub_name: str + :param partition_key: The key/column that is used to determine to which partition to send event + data. + :type partition_key: str + :param property_columns: + :type property_columns: list[str] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + 'event_hub_name': {'key': 'properties.eventHubName', 'type': 'str'}, + 'partition_key': {'key': 'properties.partitionKey', 'type': 'str'}, + 'property_columns': {'key': 'properties.propertyColumns', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(EventHubV2OutputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.EventHub/EventHub' # type: str + self.service_bus_namespace = kwargs.get('service_bus_namespace', None) + self.shared_access_policy_name = kwargs.get('shared_access_policy_name', None) + self.shared_access_policy_key = kwargs.get('shared_access_policy_key', None) + self.authentication_mode = kwargs.get('authentication_mode', None) + self.event_hub_name = kwargs.get('event_hub_name', None) + self.partition_key = kwargs.get('partition_key', None) + self.property_columns = kwargs.get('property_columns', None) + + +class EventHubV2StreamInputDataSource(StreamInputDataSource): + """Describes an Event Hub input data source that contains stream data. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of input data source containing stream data. Required + on PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :param event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + :type event_hub_name: str + :param consumer_group_name: The name of an Event Hub Consumer Group that should be used to read + events from the Event Hub. Specifying distinct consumer group names for multiple inputs allows + each of those inputs to receive the same events from the Event Hub. If not specified, the input + uses the Event Hub’s default consumer group. + :type consumer_group_name: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + 'event_hub_name': {'key': 'properties.eventHubName', 'type': 'str'}, + 'consumer_group_name': {'key': 'properties.consumerGroupName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(EventHubV2StreamInputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.EventHub/EventHub' # type: str + self.service_bus_namespace = kwargs.get('service_bus_namespace', None) + self.shared_access_policy_name = kwargs.get('shared_access_policy_name', None) + self.shared_access_policy_key = kwargs.get('shared_access_policy_key', None) + self.authentication_mode = kwargs.get('authentication_mode', None) + self.event_hub_name = kwargs.get('event_hub_name', None) + self.consumer_group_name = kwargs.get('consumer_group_name', None) + + +class External(msrest.serialization.Model): + """The storage account where the custom code artifacts are located. + + :param storage_account: The properties that are associated with an Azure Storage account. + :type storage_account: ~stream_analytics_management_client.models.StorageAccount + :param container: + :type container: str + :param path: + :type path: str + """ + + _attribute_map = { + 'storage_account': {'key': 'storageAccount', 'type': 'StorageAccount'}, + 'container': {'key': 'container', 'type': 'str'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(External, self).__init__(**kwargs) + self.storage_account = kwargs.get('storage_account', None) + self.container = kwargs.get('container', None) + self.path = kwargs.get('path', None) + + +class SubResource(msrest.serialization.Model): + """The base sub-resource model definition. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource Id. + :vartype id: str + :param name: Resource name. + :type name: str + :ivar type: Resource type. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SubResource, self).__init__(**kwargs) + self.id = None + self.name = kwargs.get('name', None) + self.type = None + + +class Function(SubResource): + """A function object, containing all information associated with the named function. All functions are contained under a streaming job. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource Id. + :vartype id: str + :param name: Resource name. + :type name: str + :ivar type: Resource type. + :vartype type: str + :param properties: The properties that are associated with a function. + :type properties: ~stream_analytics_management_client.models.FunctionProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'FunctionProperties'}, + } + + def __init__( + self, + **kwargs + ): + super(Function, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + + +class FunctionInput(msrest.serialization.Model): + """Describes one input parameter of a function. + + :param data_type: The (Azure Stream Analytics supported) data type of the function input + parameter. A list of valid Azure Stream Analytics data types are described at + https://msdn.microsoft.com/en-us/library/azure/dn835065.aspx. + :type data_type: str + :param is_configuration_parameter: A flag indicating if the parameter is a configuration + parameter. True if this input parameter is expected to be a constant. Default is false. + :type is_configuration_parameter: bool + """ + + _attribute_map = { + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'is_configuration_parameter': {'key': 'isConfigurationParameter', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(FunctionInput, self).__init__(**kwargs) + self.data_type = kwargs.get('data_type', None) + self.is_configuration_parameter = kwargs.get('is_configuration_parameter', None) + + +class FunctionListResult(msrest.serialization.Model): + """Object containing a list of functions under a streaming job. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: A list of functions under a streaming job. Populated by a 'List' operation. + :vartype value: list[~stream_analytics_management_client.models.Function] + :ivar next_link: The link (url) to the next page of results. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Function]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(FunctionListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class FunctionOutput(msrest.serialization.Model): + """Describes the output of a function. + + :param data_type: The (Azure Stream Analytics supported) data type of the function output. A + list of valid Azure Stream Analytics data types are described at https://msdn.microsoft.com/en- + us/library/azure/dn835065.aspx. + :type data_type: str + """ + + _attribute_map = { + 'data_type': {'key': 'dataType', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(FunctionOutput, self).__init__(**kwargs) + self.data_type = kwargs.get('data_type', None) + + +class Identity(msrest.serialization.Model): + """Describes how identity is verified. + + :param tenant_id: + :type tenant_id: str + :param principal_id: + :type principal_id: str + :param type: + :type type: str + """ + + _attribute_map = { + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Identity, self).__init__(**kwargs) + self.tenant_id = kwargs.get('tenant_id', None) + self.principal_id = kwargs.get('principal_id', None) + self.type = kwargs.get('type', None) + + +class Input(SubResource): + """An input object, containing all information associated with the named input. All inputs are contained under a streaming job. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource Id. + :vartype id: str + :param name: Resource name. + :type name: str + :ivar type: Resource type. + :vartype type: str + :param properties: The properties that are associated with an input. Required on PUT + (CreateOrReplace) requests. + :type properties: ~stream_analytics_management_client.models.InputProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'InputProperties'}, + } + + def __init__( + self, + **kwargs + ): + super(Input, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + + +class InputListResult(msrest.serialization.Model): + """Object containing a list of inputs under a streaming job. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: A list of inputs under a streaming job. Populated by a 'List' operation. + :vartype value: list[~stream_analytics_management_client.models.Input] + :ivar next_link: The link (url) to the next page of results. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Input]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(InputListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class InputProperties(msrest.serialization.Model): + """The properties that are associated with an input. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ReferenceInputProperties, StreamInputProperties. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates whether the input is a source of reference data or stream + data. Required on PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param serialization: Describes how data from an input is serialized or how data is serialized + when written to an output. Required on PUT (CreateOrReplace) requests. + :type serialization: ~stream_analytics_management_client.models.Serialization + :ivar diagnostics: Describes conditions applicable to the Input, Output, or the job overall, + that warrant customer attention. + :vartype diagnostics: ~stream_analytics_management_client.models.Diagnostics + :ivar etag: The current entity tag for the input. This is an opaque string. You can use it to + detect whether the resource has changed between requests. You can also use it in the If-Match + or If-None-Match headers for write operations for optimistic concurrency. + :vartype etag: str + :param compression: Describes how input data is compressed. + :type compression: ~stream_analytics_management_client.models.Compression + :param partition_key: partitionKey Describes a key in the input data which is used for + partitioning the input data. + :type partition_key: str + """ + + _validation = { + 'type': {'required': True}, + 'diagnostics': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'serialization': {'key': 'serialization', 'type': 'Serialization'}, + 'diagnostics': {'key': 'diagnostics', 'type': 'Diagnostics'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'compression': {'key': 'compression', 'type': 'Compression'}, + 'partition_key': {'key': 'partitionKey', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Reference': 'ReferenceInputProperties', 'Stream': 'StreamInputProperties'} + } + + def __init__( + self, + **kwargs + ): + super(InputProperties, self).__init__(**kwargs) + self.type = None # type: Optional[str] + self.serialization = kwargs.get('serialization', None) + self.diagnostics = None + self.etag = None + self.compression = kwargs.get('compression', None) + self.partition_key = kwargs.get('partition_key', None) + + +class IoTHubStreamInputDataSource(StreamInputDataSource): + """Describes an IoT Hub input data source that contains stream data. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of input data source containing stream data. Required + on PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param iot_hub_namespace: The name or the URI of the IoT Hub. Required on PUT (CreateOrReplace) + requests. + :type iot_hub_namespace: str + :param shared_access_policy_name: The shared access policy name for the IoT Hub. This policy + must contain at least the Service connect permission. Required on PUT (CreateOrReplace) + requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_key: str + :param consumer_group_name: The name of an IoT Hub Consumer Group that should be used to read + events from the IoT Hub. If not specified, the input uses the Iot Hub’s default consumer group. + :type consumer_group_name: str + :param endpoint: The IoT Hub endpoint to connect to (ie. messages/events, + messages/operationsMonitoringEvents, etc.). + :type endpoint: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'iot_hub_namespace': {'key': 'properties.iotHubNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, + 'consumer_group_name': {'key': 'properties.consumerGroupName', 'type': 'str'}, + 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(IoTHubStreamInputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.Devices/IotHubs' # type: str + self.iot_hub_namespace = kwargs.get('iot_hub_namespace', None) + self.shared_access_policy_name = kwargs.get('shared_access_policy_name', None) + self.shared_access_policy_key = kwargs.get('shared_access_policy_key', None) + self.consumer_group_name = kwargs.get('consumer_group_name', None) + self.endpoint = kwargs.get('endpoint', None) + + +class JavaScriptFunctionBinding(FunctionBinding): + """The binding to a JavaScript function. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the function binding type.Constant filled by server. + :type type: str + :param script: The JavaScript code containing a single function definition. For example: + 'function (x, y) { return x + y; }'. + :type script: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'script': {'key': 'properties.script', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(JavaScriptFunctionBinding, self).__init__(**kwargs) + self.type = 'Microsoft.StreamAnalytics/JavascriptUdf' # type: str + self.script = kwargs.get('script', None) + + +class JavaScriptFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): + """The parameters needed to retrieve the default function definition for a JavaScript function. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param binding_type: Required. Indicates the function binding type.Constant filled by server. + :type binding_type: str + :param script: The JavaScript code containing a single function definition. For example: + 'function (x, y) { return x + y; }'. + :type script: str + :ivar udf_type: The function type. Default value: "Scalar". + :vartype udf_type: str + """ + + _validation = { + 'binding_type': {'required': True}, + 'udf_type': {'constant': True}, + } + + _attribute_map = { + 'binding_type': {'key': 'bindingType', 'type': 'str'}, + 'script': {'key': 'bindingRetrievalProperties.script', 'type': 'str'}, + 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, + } + + udf_type = "Scalar" + + def __init__( + self, + **kwargs + ): + super(JavaScriptFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) + self.binding_type = 'Microsoft.StreamAnalytics/JavascriptUdf' # type: str + self.script = kwargs.get('script', None) + + +class StorageAccount(msrest.serialization.Model): + """The properties that are associated with an Azure Storage account. + + :param account_name: The name of the Azure Storage account. Required on PUT (CreateOrReplace) + requests. + :type account_name: str + :param account_key: The account key for the Azure Storage account. Required on PUT + (CreateOrReplace) requests. + :type account_key: str + """ + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'account_key': {'key': 'accountKey', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageAccount, self).__init__(**kwargs) + self.account_name = kwargs.get('account_name', None) + self.account_key = kwargs.get('account_key', None) + + +class JobStorageAccount(StorageAccount): + """The properties that are associated with an Azure Storage account with MSI. + + :param account_name: The name of the Azure Storage account. Required on PUT (CreateOrReplace) + requests. + :type account_name: str + :param account_key: The account key for the Azure Storage account. Required on PUT + (CreateOrReplace) requests. + :type account_key: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + """ + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'account_key': {'key': 'accountKey', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(JobStorageAccount, self).__init__(**kwargs) + self.authentication_mode = kwargs.get('authentication_mode', None) + + +class JsonSerialization(Serialization): + """Describes how data from an input is serialized or how data is serialized when written to an output in JSON format. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of serialization that the input or output uses. + Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: + "Csv", "Avro", "Json", "CustomClr", "Parquet". + :type type: str or ~stream_analytics_management_client.models.EventSerializationType + :param encoding: Specifies the encoding of the incoming data in the case of input and the + encoding of outgoing data in the case of output. Required on PUT (CreateOrReplace) requests. + Possible values include: "UTF8". + :type encoding: str or ~stream_analytics_management_client.models.Encoding + :param format: This property only applies to JSON serialization of outputs only. It is not + applicable to inputs. This property specifies the format of the JSON the output will be written + in. The currently supported values are 'lineSeparated' indicating the output will be formatted + by having each JSON object separated by a new line and 'array' indicating the output will be + formatted as an array of JSON objects. Default value is 'lineSeparated' if left null. Possible + values include: "LineSeparated", "Array". + :type format: str or ~stream_analytics_management_client.models.JsonOutputSerializationFormat + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'encoding': {'key': 'properties.encoding', 'type': 'str'}, + 'format': {'key': 'properties.format', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(JsonSerialization, self).__init__(**kwargs) + self.type = 'Json' # type: str + self.encoding = kwargs.get('encoding', None) + self.format = kwargs.get('format', None) + + +class Operation(msrest.serialization.Model): + """A Stream Analytics REST API operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: The name of the operation being performed on this particular object. + :vartype name: str + :ivar display: Contains the localized display information for this particular operation / + action. + :vartype display: ~stream_analytics_management_client.models.OperationDisplay + """ + + _validation = { + 'name': {'readonly': True}, + 'display': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display': {'key': 'display', 'type': 'OperationDisplay'}, + } + + def __init__( + self, + **kwargs + ): + super(Operation, self).__init__(**kwargs) + self.name = None + self.display = None + + +class OperationDisplay(msrest.serialization.Model): + """Contains the localized display information for this particular operation / action. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar provider: The localized friendly form of the resource provider name. + :vartype provider: str + :ivar resource: The localized friendly form of the resource type related to this + action/operation. + :vartype resource: str + :ivar operation: The localized friendly name for the operation. + :vartype operation: str + :ivar description: The localized friendly description for the operation. + :vartype description: str + """ + + _validation = { + 'provider': {'readonly': True}, + 'resource': {'readonly': True}, + 'operation': {'readonly': True}, + 'description': {'readonly': True}, + } + + _attribute_map = { + 'provider': {'key': 'provider', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'str'}, + 'operation': {'key': 'operation', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(OperationDisplay, self).__init__(**kwargs) + self.provider = None + self.resource = None + self.operation = None + self.description = None + + +class OperationListResult(msrest.serialization.Model): + """Result of the request to list Stream Analytics operations. It contains a list of operations and a URL link to get the next set of results. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of Stream Analytics operations supported by the Microsoft.StreamAnalytics + resource provider. + :vartype value: list[~stream_analytics_management_client.models.Operation] + :ivar next_link: URL to get the next set of operation list results if there are any. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Operation]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(OperationListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class Output(SubResource): + """An output object, containing all information associated with the named output. All outputs are contained under a streaming job. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource Id. + :vartype id: str + :param name: Resource name. + :type name: str + :ivar type: Resource type. + :vartype type: str + :param datasource: Describes the data source that output will be written to. Required on PUT + (CreateOrReplace) requests. + :type datasource: ~stream_analytics_management_client.models.OutputDataSource + :param time_window: + :type time_window: str + :param size_window: + :type size_window: float + :param serialization: Describes how data from an input is serialized or how data is serialized + when written to an output. Required on PUT (CreateOrReplace) requests. + :type serialization: ~stream_analytics_management_client.models.Serialization + :ivar diagnostics: Describes conditions applicable to the Input, Output, or the job overall, + that warrant customer attention. + :vartype diagnostics: ~stream_analytics_management_client.models.Diagnostics + :ivar etag: The current entity tag for the output. This is an opaque string. You can use it to + detect whether the resource has changed between requests. You can also use it in the If-Match + or If-None-Match headers for write operations for optimistic concurrency. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'diagnostics': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'datasource': {'key': 'properties.datasource', 'type': 'OutputDataSource'}, + 'time_window': {'key': 'properties.timeWindow', 'type': 'str'}, + 'size_window': {'key': 'properties.sizeWindow', 'type': 'float'}, + 'serialization': {'key': 'properties.serialization', 'type': 'Serialization'}, + 'diagnostics': {'key': 'properties.diagnostics', 'type': 'Diagnostics'}, + 'etag': {'key': 'properties.etag', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Output, self).__init__(**kwargs) + self.datasource = kwargs.get('datasource', None) + self.time_window = kwargs.get('time_window', None) + self.size_window = kwargs.get('size_window', None) + self.serialization = kwargs.get('serialization', None) + self.diagnostics = None + self.etag = None + + +class OutputListResult(msrest.serialization.Model): + """Object containing a list of outputs under a streaming job. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: A list of outputs under a streaming job. Populated by a 'List' operation. + :vartype value: list[~stream_analytics_management_client.models.Output] + :ivar next_link: The link (url) to the next page of results. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Output]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(OutputListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class ParquetSerialization(Serialization): + """Describes how data from an input is serialized or how data is serialized when written to an output in Parquet format. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of serialization that the input or output uses. + Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: + "Csv", "Avro", "Json", "CustomClr", "Parquet". + :type type: str or ~stream_analytics_management_client.models.EventSerializationType + :param properties: The properties that are associated with the Parquet serialization type. + Required on PUT (CreateOrReplace) requests. + :type properties: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ParquetSerialization, self).__init__(**kwargs) + self.type = 'Parquet' # type: str + self.properties = kwargs.get('properties', None) + + +class PowerBIOutputDataSource(OutputDataSource): + """Describes a Power BI output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param refresh_token: A refresh token that can be used to obtain a valid access token that can + then be used to authenticate with the data source. A valid refresh token is currently only + obtainable via the Azure Portal. It is recommended to put a dummy string value here when + creating the data source and then going to the Azure Portal to authenticate the data source + which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) + requests. + :type refresh_token: str + :param token_user_principal_name: The user principal name (UPN) of the user that was used to + obtain the refresh token. Use this property to help remember which user was used to obtain the + refresh token. + :type token_user_principal_name: str + :param token_user_display_name: The user display name of the user that was used to obtain the + refresh token. Use this property to help remember which user was used to obtain the refresh + token. + :type token_user_display_name: str + :param dataset: The name of the Power BI dataset. Required on PUT (CreateOrReplace) requests. + :type dataset: str + :param table: The name of the Power BI table under the specified dataset. Required on PUT + (CreateOrReplace) requests. + :type table: str + :param group_id: The ID of the Power BI group. + :type group_id: str + :param group_name: The name of the Power BI group. Use this property to help remember which + specific Power BI group id was used. + :type group_name: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'refresh_token': {'key': 'properties.refreshToken', 'type': 'str'}, + 'token_user_principal_name': {'key': 'properties.tokenUserPrincipalName', 'type': 'str'}, + 'token_user_display_name': {'key': 'properties.tokenUserDisplayName', 'type': 'str'}, + 'dataset': {'key': 'properties.dataset', 'type': 'str'}, + 'table': {'key': 'properties.table', 'type': 'str'}, + 'group_id': {'key': 'properties.groupId', 'type': 'str'}, + 'group_name': {'key': 'properties.groupName', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PowerBIOutputDataSource, self).__init__(**kwargs) + self.type = 'PowerBI' # type: str + self.refresh_token = kwargs.get('refresh_token', None) + self.token_user_principal_name = kwargs.get('token_user_principal_name', None) + self.token_user_display_name = kwargs.get('token_user_display_name', None) + self.dataset = kwargs.get('dataset', None) + self.table = kwargs.get('table', None) + self.group_id = kwargs.get('group_id', None) + self.group_name = kwargs.get('group_name', None) + self.authentication_mode = kwargs.get('authentication_mode', None) + + +class PowerBIOutputDataSourceProperties(OAuthBasedDataSourceProperties): + """The properties that are associated with a Power BI output. + + :param refresh_token: A refresh token that can be used to obtain a valid access token that can + then be used to authenticate with the data source. A valid refresh token is currently only + obtainable via the Azure Portal. It is recommended to put a dummy string value here when + creating the data source and then going to the Azure Portal to authenticate the data source + which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) + requests. + :type refresh_token: str + :param token_user_principal_name: The user principal name (UPN) of the user that was used to + obtain the refresh token. Use this property to help remember which user was used to obtain the + refresh token. + :type token_user_principal_name: str + :param token_user_display_name: The user display name of the user that was used to obtain the + refresh token. Use this property to help remember which user was used to obtain the refresh + token. + :type token_user_display_name: str + :param dataset: The name of the Power BI dataset. Required on PUT (CreateOrReplace) requests. + :type dataset: str + :param table: The name of the Power BI table under the specified dataset. Required on PUT + (CreateOrReplace) requests. + :type table: str + :param group_id: The ID of the Power BI group. + :type group_id: str + :param group_name: The name of the Power BI group. Use this property to help remember which + specific Power BI group id was used. + :type group_name: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + """ + + _attribute_map = { + 'refresh_token': {'key': 'refreshToken', 'type': 'str'}, + 'token_user_principal_name': {'key': 'tokenUserPrincipalName', 'type': 'str'}, + 'token_user_display_name': {'key': 'tokenUserDisplayName', 'type': 'str'}, + 'dataset': {'key': 'dataset', 'type': 'str'}, + 'table': {'key': 'table', 'type': 'str'}, + 'group_id': {'key': 'groupId', 'type': 'str'}, + 'group_name': {'key': 'groupName', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PowerBIOutputDataSourceProperties, self).__init__(**kwargs) + self.dataset = kwargs.get('dataset', None) + self.table = kwargs.get('table', None) + self.group_id = kwargs.get('group_id', None) + self.group_name = kwargs.get('group_name', None) + self.authentication_mode = kwargs.get('authentication_mode', None) + + +class PrivateEndpoint(Resource): + """Complete information about the private endpoint. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param properties: The properties associated with a private endpoint. + :type properties: ~stream_analytics_management_client.models.PrivateEndpointProperties + :ivar etag: Unique opaque string (generally a GUID) that represents the metadata state of the + resource (private endpoint) and changes whenever the resource is updated. Required on PUT + (CreateOrUpdate) requests. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'PrivateEndpointProperties'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateEndpoint, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + self.etag = None + + +class PrivateEndpointListResult(msrest.serialization.Model): + """A list of private endpoints. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: A list of private endpoints. + :vartype value: list[~stream_analytics_management_client.models.PrivateEndpoint] + :ivar next_link: The URL to fetch the next set of private endpoints. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PrivateEndpoint]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateEndpointListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class PrivateEndpointProperties(msrest.serialization.Model): + """The properties associated with a private endpoint. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar created_date: The date when this private endpoint was created. + :vartype created_date: str + :param manual_private_link_service_connections: A list of connections to the remote resource. + Immutable after it is set. + :type manual_private_link_service_connections: + list[~stream_analytics_management_client.models.PrivateLinkServiceConnection] + """ + + _validation = { + 'created_date': {'readonly': True}, + } + + _attribute_map = { + 'created_date': {'key': 'createdDate', 'type': 'str'}, + 'manual_private_link_service_connections': {'key': 'manualPrivateLinkServiceConnections', 'type': '[PrivateLinkServiceConnection]'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateEndpointProperties, self).__init__(**kwargs) + self.created_date = None + self.manual_private_link_service_connections = kwargs.get('manual_private_link_service_connections', None) + + +class PrivateLinkConnectionState(msrest.serialization.Model): + """A collection of read-only information about the state of the connection to the private remote resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar status: Indicates whether the connection has been Approved/Rejected/Removed by the owner + of the remote resource/service. + :vartype status: str + :ivar description: The reason for approval/rejection of the connection. + :vartype description: str + :ivar actions_required: A message indicating if changes on the service provider require any + updates on the consumer. + :vartype actions_required: str + """ + + _validation = { + 'status': {'readonly': True}, + 'description': {'readonly': True}, + 'actions_required': {'readonly': True}, + } + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateLinkConnectionState, self).__init__(**kwargs) + self.status = None + self.description = None + self.actions_required = None + + +class PrivateLinkServiceConnection(msrest.serialization.Model): + """A grouping of information about the connection to the remote resource. + + :param private_link_service_id: The resource id of the private link service. Required on PUT + (CreateOrUpdate) requests. + :type private_link_service_id: str + :param group_ids: The ID(s) of the group(s) obtained from the remote resource that this private + endpoint should connect to. Required on PUT (CreateOrUpdate) requests. + :type group_ids: list[str] + :param request_message: A message passed to the owner of the remote resource with this + connection request. Restricted to 140 chars. + :type request_message: str + :param private_link_service_connection_state: A collection of read-only information about the + state of the connection to the private remote resource. + :type private_link_service_connection_state: + ~stream_analytics_management_client.models.PrivateLinkConnectionState + """ + + _attribute_map = { + 'private_link_service_id': {'key': 'properties.privateLinkServiceId', 'type': 'str'}, + 'group_ids': {'key': 'properties.groupIds', 'type': '[str]'}, + 'request_message': {'key': 'properties.requestMessage', 'type': 'str'}, + 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkConnectionState'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateLinkServiceConnection, self).__init__(**kwargs) + self.private_link_service_id = kwargs.get('private_link_service_id', None) + self.group_ids = kwargs.get('group_ids', None) + self.request_message = kwargs.get('request_message', None) + self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None) + + +class ProxyResource(Resource): + """The resource model definition for a ARM proxy resource. It will have everything other than required location and tags. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ProxyResource, self).__init__(**kwargs) + + +class ReferenceInputProperties(InputProperties): + """The properties that are associated with an input containing reference data. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates whether the input is a source of reference data or stream + data. Required on PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param serialization: Describes how data from an input is serialized or how data is serialized + when written to an output. Required on PUT (CreateOrReplace) requests. + :type serialization: ~stream_analytics_management_client.models.Serialization + :ivar diagnostics: Describes conditions applicable to the Input, Output, or the job overall, + that warrant customer attention. + :vartype diagnostics: ~stream_analytics_management_client.models.Diagnostics + :ivar etag: The current entity tag for the input. This is an opaque string. You can use it to + detect whether the resource has changed between requests. You can also use it in the If-Match + or If-None-Match headers for write operations for optimistic concurrency. + :vartype etag: str + :param compression: Describes how input data is compressed. + :type compression: ~stream_analytics_management_client.models.Compression + :param partition_key: partitionKey Describes a key in the input data which is used for + partitioning the input data. + :type partition_key: str + :param datasource: Describes an input data source that contains reference data. Required on PUT + (CreateOrReplace) requests. + :type datasource: ~stream_analytics_management_client.models.ReferenceInputDataSource + """ + + _validation = { + 'type': {'required': True}, + 'diagnostics': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'serialization': {'key': 'serialization', 'type': 'Serialization'}, + 'diagnostics': {'key': 'diagnostics', 'type': 'Diagnostics'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'compression': {'key': 'compression', 'type': 'Compression'}, + 'partition_key': {'key': 'partitionKey', 'type': 'str'}, + 'datasource': {'key': 'datasource', 'type': 'ReferenceInputDataSource'}, + } + + def __init__( + self, + **kwargs + ): + super(ReferenceInputProperties, self).__init__(**kwargs) + self.type = 'Reference' # type: str + self.datasource = kwargs.get('datasource', None) + + +class ResourceTestStatus(msrest.serialization.Model): + """Describes the status of the test operation along with error information, if applicable. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar status: The status of the test operation. + :vartype status: str + :ivar error: Describes the error that occurred. + :vartype error: ~stream_analytics_management_client.models.ErrorResponse + """ + + _validation = { + 'status': {'readonly': True}, + 'error': {'readonly': True}, + } + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'ErrorResponse'}, + } + + def __init__( + self, + **kwargs + ): + super(ResourceTestStatus, self).__init__(**kwargs) + self.status = None + self.error = None + + +class ScalarFunctionProperties(FunctionProperties): + """The properties that are associated with a scalar function. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of function.Constant filled by server. + :type type: str + :ivar etag: The current entity tag for the function. This is an opaque string. You can use it + to detect whether the resource has changed between requests. You can also use it in the If- + Match or If-None-Match headers for write operations for optimistic concurrency. + :vartype etag: str + :param inputs: + :type inputs: list[~stream_analytics_management_client.models.FunctionInput] + :param output: Describes the output of a function. + :type output: ~stream_analytics_management_client.models.FunctionOutput + :param binding: The physical binding of the function. For example, in the Azure Machine + Learning web service’s case, this describes the endpoint. + :type binding: ~stream_analytics_management_client.models.FunctionBinding + """ + + _validation = { + 'type': {'required': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'inputs': {'key': 'properties.inputs', 'type': '[FunctionInput]'}, + 'output': {'key': 'properties.output', 'type': 'FunctionOutput'}, + 'binding': {'key': 'properties.binding', 'type': 'FunctionBinding'}, + } + + def __init__( + self, + **kwargs + ): + super(ScalarFunctionProperties, self).__init__(**kwargs) + self.type = 'Scalar' # type: str + + +class ServiceBusQueueOutputDataSource(OutputDataSource): + """Describes a Service Bus Queue output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :param queue_name: The name of the Service Bus Queue. Required on PUT (CreateOrReplace) + requests. + :type queue_name: str + :param property_columns: A string array of the names of output columns to be attached to + Service Bus messages as custom properties. + :type property_columns: list[str] + :param system_property_columns: Dictionary of :code:``. + :type system_property_columns: dict[str, str] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + 'queue_name': {'key': 'properties.queueName', 'type': 'str'}, + 'property_columns': {'key': 'properties.propertyColumns', 'type': '[str]'}, + 'system_property_columns': {'key': 'properties.systemPropertyColumns', 'type': '{str}'}, + } + + def __init__( + self, + **kwargs + ): + super(ServiceBusQueueOutputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.ServiceBus/Queue' # type: str + self.service_bus_namespace = kwargs.get('service_bus_namespace', None) + self.shared_access_policy_name = kwargs.get('shared_access_policy_name', None) + self.shared_access_policy_key = kwargs.get('shared_access_policy_key', None) + self.authentication_mode = kwargs.get('authentication_mode', None) + self.queue_name = kwargs.get('queue_name', None) + self.property_columns = kwargs.get('property_columns', None) + self.system_property_columns = kwargs.get('system_property_columns', None) + + +class ServiceBusQueueOutputDataSourceProperties(ServiceBusDataSourceProperties): + """The properties that are associated with a Service Bus Queue output. + + :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :param queue_name: The name of the Service Bus Queue. Required on PUT (CreateOrReplace) + requests. + :type queue_name: str + :param property_columns: A string array of the names of output columns to be attached to + Service Bus messages as custom properties. + :type property_columns: list[str] + :param system_property_columns: Dictionary of :code:``. + :type system_property_columns: dict[str, str] + """ + + _attribute_map = { + 'service_bus_namespace': {'key': 'serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + 'queue_name': {'key': 'queueName', 'type': 'str'}, + 'property_columns': {'key': 'propertyColumns', 'type': '[str]'}, + 'system_property_columns': {'key': 'systemPropertyColumns', 'type': '{str}'}, + } + + def __init__( + self, + **kwargs + ): + super(ServiceBusQueueOutputDataSourceProperties, self).__init__(**kwargs) + self.queue_name = kwargs.get('queue_name', None) + self.property_columns = kwargs.get('property_columns', None) + self.system_property_columns = kwargs.get('system_property_columns', None) + + +class ServiceBusTopicOutputDataSource(OutputDataSource): + """Describes a Service Bus Topic output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :param topic_name: The name of the Service Bus Topic. Required on PUT (CreateOrReplace) + requests. + :type topic_name: str + :param property_columns: A string array of the names of output columns to be attached to + Service Bus messages as custom properties. + :type property_columns: list[str] + :param system_property_columns: Dictionary of :code:``. + :type system_property_columns: dict[str, str] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + 'topic_name': {'key': 'properties.topicName', 'type': 'str'}, + 'property_columns': {'key': 'properties.propertyColumns', 'type': '[str]'}, + 'system_property_columns': {'key': 'properties.systemPropertyColumns', 'type': '{str}'}, + } + + def __init__( + self, + **kwargs + ): + super(ServiceBusTopicOutputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.ServiceBus/Topic' # type: str + self.service_bus_namespace = kwargs.get('service_bus_namespace', None) + self.shared_access_policy_name = kwargs.get('shared_access_policy_name', None) + self.shared_access_policy_key = kwargs.get('shared_access_policy_key', None) + self.authentication_mode = kwargs.get('authentication_mode', None) + self.topic_name = kwargs.get('topic_name', None) + self.property_columns = kwargs.get('property_columns', None) + self.system_property_columns = kwargs.get('system_property_columns', None) + + +class ServiceBusTopicOutputDataSourceProperties(ServiceBusDataSourceProperties): + """The properties that are associated with a Service Bus Topic output. + + :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :param topic_name: The name of the Service Bus Topic. Required on PUT (CreateOrReplace) + requests. + :type topic_name: str + :param property_columns: A string array of the names of output columns to be attached to + Service Bus messages as custom properties. + :type property_columns: list[str] + :param system_property_columns: Dictionary of :code:``. + :type system_property_columns: dict[str, str] + """ + + _attribute_map = { + 'service_bus_namespace': {'key': 'serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + 'topic_name': {'key': 'topicName', 'type': 'str'}, + 'property_columns': {'key': 'propertyColumns', 'type': '[str]'}, + 'system_property_columns': {'key': 'systemPropertyColumns', 'type': '{str}'}, + } + + def __init__( + self, + **kwargs + ): + super(ServiceBusTopicOutputDataSourceProperties, self).__init__(**kwargs) + self.topic_name = kwargs.get('topic_name', None) + self.property_columns = kwargs.get('property_columns', None) + self.system_property_columns = kwargs.get('system_property_columns', None) + + +class StartStreamingJobParameters(msrest.serialization.Model): + """Parameters supplied to the Start Streaming Job operation. + + :param output_start_mode: Value may be JobStartTime, CustomTime, or LastOutputEventTime to + indicate whether the starting point of the output event stream should start whenever the job is + started, start at a custom user time stamp specified via the outputStartTime property, or start + from the last event output time. Possible values include: "JobStartTime", "CustomTime", + "LastOutputEventTime". + :type output_start_mode: str or ~stream_analytics_management_client.models.OutputStartMode + :param output_start_time: Value is either an ISO-8601 formatted time stamp that indicates the + starting point of the output event stream, or null to indicate that the output event stream + will start whenever the streaming job is started. This property must have a value if + outputStartMode is set to CustomTime. + :type output_start_time: ~datetime.datetime + """ + + _attribute_map = { + 'output_start_mode': {'key': 'outputStartMode', 'type': 'str'}, + 'output_start_time': {'key': 'outputStartTime', 'type': 'iso-8601'}, + } + + def __init__( + self, + **kwargs + ): + super(StartStreamingJobParameters, self).__init__(**kwargs) + self.output_start_mode = kwargs.get('output_start_mode', None) + self.output_start_time = kwargs.get('output_start_time', None) + + +class StreamingJob(TrackedResource): + """A streaming job object, containing all information associated with the named streaming job. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: The geo-location where the resource lives. + :type location: str + :param identity: Describes the system-assigned managed identity assigned to this job that can + be used to authenticate with inputs and outputs. + :type identity: ~stream_analytics_management_client.models.Identity + :param sku: Describes the SKU of the streaming job. Required on PUT (CreateOrReplace) requests. + :type sku: ~stream_analytics_management_client.models.StreamingJobSku + :ivar job_id: A GUID uniquely identifying the streaming job. This GUID is generated upon + creation of the streaming job. + :vartype job_id: str + :ivar provisioning_state: Describes the provisioning status of the streaming job. + :vartype provisioning_state: str + :ivar job_state: Describes the state of the streaming job. + :vartype job_state: str + :param job_type: Describes the type of the job. Valid modes are ``Cloud`` and 'Edge'. Possible + values include: "Cloud", "Edge". + :type job_type: str or ~stream_analytics_management_client.models.JobType + :param output_start_mode: This property should only be utilized when it is desired that the job + be started immediately upon creation. Value may be JobStartTime, CustomTime, or + LastOutputEventTime to indicate whether the starting point of the output event stream should + start whenever the job is started, start at a custom user time stamp specified via the + outputStartTime property, or start from the last event output time. Possible values include: + "JobStartTime", "CustomTime", "LastOutputEventTime". + :type output_start_mode: str or ~stream_analytics_management_client.models.OutputStartMode + :param output_start_time: Value is either an ISO-8601 formatted time stamp that indicates the + starting point of the output event stream, or null to indicate that the output event stream + will start whenever the streaming job is started. This property must have a value if + outputStartMode is set to CustomTime. + :type output_start_time: ~datetime.datetime + :ivar last_output_event_time: Value is either an ISO-8601 formatted timestamp indicating the + last output event time of the streaming job or null indicating that output has not yet been + produced. In case of multiple outputs or multiple streams, this shows the latest value in that + set. + :vartype last_output_event_time: ~datetime.datetime + :param events_out_of_order_policy: Indicates the policy to apply to events that arrive out of + order in the input event stream. Possible values include: "Adjust", "Drop". + :type events_out_of_order_policy: str or + ~stream_analytics_management_client.models.EventsOutOfOrderPolicy + :param output_error_policy: Indicates the policy to apply to events that arrive at the output + and cannot be written to the external storage due to being malformed (missing column values, + column values of wrong type or size). Possible values include: "Stop", "Drop". + :type output_error_policy: str or ~stream_analytics_management_client.models.OutputErrorPolicy + :param events_out_of_order_max_delay_in_seconds: The maximum tolerable delay in seconds where + out-of-order events can be adjusted to be back in order. + :type events_out_of_order_max_delay_in_seconds: int + :param events_late_arrival_max_delay_in_seconds: The maximum tolerable delay in seconds where + events arriving late could be included. Supported range is -1 to 1814399 (20.23:59:59 days) + and -1 is used to specify wait indefinitely. If the property is absent, it is interpreted to + have a value of -1. + :type events_late_arrival_max_delay_in_seconds: int + :param data_locale: The data locale of the stream analytics job. Value should be the name of a + supported .NET Culture from the set https://msdn.microsoft.com/en- + us/library/system.globalization.culturetypes(v=vs.110).aspx. Defaults to 'en-US' if none + specified. + :type data_locale: str + :param compatibility_level: Controls certain runtime behaviors of the streaming job. Possible + values include: "1.0". + :type compatibility_level: str or ~stream_analytics_management_client.models.CompatibilityLevel + :ivar created_date: Value is an ISO-8601 formatted UTC timestamp indicating when the streaming + job was created. + :vartype created_date: ~datetime.datetime + :param inputs: A list of one or more inputs to the streaming job. The name property for each + input is required when specifying this property in a PUT request. This property cannot be + modify via a PATCH operation. You must use the PATCH API available for the individual input. + :type inputs: list[~stream_analytics_management_client.models.Input] + :param transformation: Indicates the query and the number of streaming units to use for the + streaming job. The name property of the transformation is required when specifying this + property in a PUT request. This property cannot be modify via a PATCH operation. You must use + the PATCH API available for the individual transformation. + :type transformation: ~stream_analytics_management_client.models.Transformation + :param outputs: A list of one or more outputs for the streaming job. The name property for each + output is required when specifying this property in a PUT request. This property cannot be + modify via a PATCH operation. You must use the PATCH API available for the individual output. + :type outputs: list[~stream_analytics_management_client.models.Output] + :param functions: A list of one or more functions for the streaming job. The name property for + each function is required when specifying this property in a PUT request. This property cannot + be modify via a PATCH operation. You must use the PATCH API available for the individual + transformation. + :type functions: list[~stream_analytics_management_client.models.Function] + :ivar etag: The current entity tag for the streaming job. This is an opaque string. You can use + it to detect whether the resource has changed between requests. You can also use it in the If- + Match or If-None-Match headers for write operations for optimistic concurrency. + :vartype etag: str + :param job_storage_account: The properties that are associated with an Azure Storage account + with MSI. + :type job_storage_account: ~stream_analytics_management_client.models.JobStorageAccount + :ivar content_storage_policy: Valid values are JobStorageAccount and SystemAccount. If set to + JobStorageAccount, this requires the user to also specify jobStorageAccount property. . + Possible values include: "SystemAccount", "JobStorageAccount". + :vartype content_storage_policy: str or + ~stream_analytics_management_client.models.ContentStoragePolicy + :param externals: The storage account where the custom code artifacts are located. + :type externals: ~stream_analytics_management_client.models.External + :param cluster: The cluster which streaming jobs will run on. + :type cluster: ~stream_analytics_management_client.models.ClusterInfo + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'job_id': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'job_state': {'readonly': True}, + 'last_output_event_time': {'readonly': True}, + 'created_date': {'readonly': True}, + 'etag': {'readonly': True}, + 'content_storage_policy': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'Identity'}, + 'sku': {'key': 'properties.sku', 'type': 'StreamingJobSku'}, + 'job_id': {'key': 'properties.jobId', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'job_state': {'key': 'properties.jobState', 'type': 'str'}, + 'job_type': {'key': 'properties.jobType', 'type': 'str'}, + 'output_start_mode': {'key': 'properties.outputStartMode', 'type': 'str'}, + 'output_start_time': {'key': 'properties.outputStartTime', 'type': 'iso-8601'}, + 'last_output_event_time': {'key': 'properties.lastOutputEventTime', 'type': 'iso-8601'}, + 'events_out_of_order_policy': {'key': 'properties.eventsOutOfOrderPolicy', 'type': 'str'}, + 'output_error_policy': {'key': 'properties.outputErrorPolicy', 'type': 'str'}, + 'events_out_of_order_max_delay_in_seconds': {'key': 'properties.eventsOutOfOrderMaxDelayInSeconds', 'type': 'int'}, + 'events_late_arrival_max_delay_in_seconds': {'key': 'properties.eventsLateArrivalMaxDelayInSeconds', 'type': 'int'}, + 'data_locale': {'key': 'properties.dataLocale', 'type': 'str'}, + 'compatibility_level': {'key': 'properties.compatibilityLevel', 'type': 'str'}, + 'created_date': {'key': 'properties.createdDate', 'type': 'iso-8601'}, + 'inputs': {'key': 'properties.inputs', 'type': '[Input]'}, + 'transformation': {'key': 'properties.transformation', 'type': 'Transformation'}, + 'outputs': {'key': 'properties.outputs', 'type': '[Output]'}, + 'functions': {'key': 'properties.functions', 'type': '[Function]'}, + 'etag': {'key': 'properties.etag', 'type': 'str'}, + 'job_storage_account': {'key': 'properties.jobStorageAccount', 'type': 'JobStorageAccount'}, + 'content_storage_policy': {'key': 'properties.contentStoragePolicy', 'type': 'str'}, + 'externals': {'key': 'properties.externals', 'type': 'External'}, + 'cluster': {'key': 'properties.cluster', 'type': 'ClusterInfo'}, + } + + def __init__( + self, + **kwargs + ): + super(StreamingJob, self).__init__(**kwargs) + self.identity = kwargs.get('identity', None) + self.sku = kwargs.get('sku', None) + self.job_id = None + self.provisioning_state = None + self.job_state = None + self.job_type = kwargs.get('job_type', None) + self.output_start_mode = kwargs.get('output_start_mode', None) + self.output_start_time = kwargs.get('output_start_time', None) + self.last_output_event_time = None + self.events_out_of_order_policy = kwargs.get('events_out_of_order_policy', None) + self.output_error_policy = kwargs.get('output_error_policy', None) + self.events_out_of_order_max_delay_in_seconds = kwargs.get('events_out_of_order_max_delay_in_seconds', None) + self.events_late_arrival_max_delay_in_seconds = kwargs.get('events_late_arrival_max_delay_in_seconds', None) + self.data_locale = kwargs.get('data_locale', None) + self.compatibility_level = kwargs.get('compatibility_level', None) + self.created_date = None + self.inputs = kwargs.get('inputs', None) + self.transformation = kwargs.get('transformation', None) + self.outputs = kwargs.get('outputs', None) + self.functions = kwargs.get('functions', None) + self.etag = None + self.job_storage_account = kwargs.get('job_storage_account', None) + self.content_storage_policy = None + self.externals = kwargs.get('externals', None) + self.cluster = kwargs.get('cluster', None) + + +class StreamingJobListResult(msrest.serialization.Model): + """Object containing a list of streaming jobs. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: A list of streaming jobs. Populated by a 'List' operation. + :vartype value: list[~stream_analytics_management_client.models.StreamingJob] + :ivar next_link: The link (url) to the next page of results. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[StreamingJob]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(StreamingJobListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class StreamingJobSku(msrest.serialization.Model): + """The properties that are associated with a SKU. + + :param name: The name of the SKU. Required on PUT (CreateOrReplace) requests. Possible values + include: "Standard". + :type name: str or ~stream_analytics_management_client.models.StreamingJobSkuName + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(StreamingJobSku, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + + +class StreamInputProperties(InputProperties): + """The properties that are associated with an input containing stream data. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates whether the input is a source of reference data or stream + data. Required on PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param serialization: Describes how data from an input is serialized or how data is serialized + when written to an output. Required on PUT (CreateOrReplace) requests. + :type serialization: ~stream_analytics_management_client.models.Serialization + :ivar diagnostics: Describes conditions applicable to the Input, Output, or the job overall, + that warrant customer attention. + :vartype diagnostics: ~stream_analytics_management_client.models.Diagnostics + :ivar etag: The current entity tag for the input. This is an opaque string. You can use it to + detect whether the resource has changed between requests. You can also use it in the If-Match + or If-None-Match headers for write operations for optimistic concurrency. + :vartype etag: str + :param compression: Describes how input data is compressed. + :type compression: ~stream_analytics_management_client.models.Compression + :param partition_key: partitionKey Describes a key in the input data which is used for + partitioning the input data. + :type partition_key: str + :param datasource: Describes an input data source that contains stream data. Required on PUT + (CreateOrReplace) requests. + :type datasource: ~stream_analytics_management_client.models.StreamInputDataSource + """ + + _validation = { + 'type': {'required': True}, + 'diagnostics': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'serialization': {'key': 'serialization', 'type': 'Serialization'}, + 'diagnostics': {'key': 'diagnostics', 'type': 'Diagnostics'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'compression': {'key': 'compression', 'type': 'Compression'}, + 'partition_key': {'key': 'partitionKey', 'type': 'str'}, + 'datasource': {'key': 'datasource', 'type': 'StreamInputDataSource'}, + } + + def __init__( + self, + **kwargs + ): + super(StreamInputProperties, self).__init__(**kwargs) + self.type = 'Stream' # type: str + self.datasource = kwargs.get('datasource', None) + + +class SubscriptionQuota(SubResource): + """Describes the current quota for the subscription. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource Id. + :vartype id: str + :param name: Resource name. + :type name: str + :ivar type: Resource type. + :vartype type: str + :ivar max_count: The max permitted usage of this resource. + :vartype max_count: int + :ivar current_count: The current usage of this resource. + :vartype current_count: int + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'max_count': {'readonly': True}, + 'current_count': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_count': {'key': 'properties.maxCount', 'type': 'int'}, + 'current_count': {'key': 'properties.currentCount', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(SubscriptionQuota, self).__init__(**kwargs) + self.max_count = None + self.current_count = None + + +class SubscriptionQuotasListResult(msrest.serialization.Model): + """Result of the GetQuotas operation. It contains a list of quotas for the subscription in a particular region. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of quotas for the subscription in a particular region. + :vartype value: list[~stream_analytics_management_client.models.SubscriptionQuota] + """ + + _validation = { + 'value': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[SubscriptionQuota]'}, + } + + def __init__( + self, + **kwargs + ): + super(SubscriptionQuotasListResult, self).__init__(**kwargs) + self.value = None + + +class Transformation(SubResource): + """A transformation object, containing all information associated with the named transformation. All transformations are contained under a streaming job. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource Id. + :vartype id: str + :param name: Resource name. + :type name: str + :ivar type: Resource type. + :vartype type: str + :param streaming_units: Specifies the number of streaming units that the streaming job uses. + :type streaming_units: int + :param query: Specifies the query that will be run in the streaming job. You can learn more + about the Stream Analytics Query Language (SAQL) here: + https://msdn.microsoft.com/library/azure/dn834998 . Required on PUT (CreateOrReplace) requests. + :type query: str + :ivar etag: The current entity tag for the transformation. This is an opaque string. You can + use it to detect whether the resource has changed between requests. You can also use it in the + If-Match or If-None-Match headers for write operations for optimistic concurrency. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'streaming_units': {'key': 'properties.streamingUnits', 'type': 'int'}, + 'query': {'key': 'properties.query', 'type': 'str'}, + 'etag': {'key': 'properties.etag', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Transformation, self).__init__(**kwargs) + self.streaming_units = kwargs.get('streaming_units', None) + self.query = kwargs.get('query', None) + self.etag = None diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py new file mode 100644 index 000000000000..aa07f713986e --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py @@ -0,0 +1,5166 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +import datetime +from typing import Dict, List, Optional, Union + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + +from ._stream_analytics_management_client_enums import * + + +class FunctionProperties(msrest.serialization.Model): + """The properties that are associated with a function. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AggregateFunctionProperties, ScalarFunctionProperties. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of function.Constant filled by server. + :type type: str + :ivar etag: The current entity tag for the function. This is an opaque string. You can use it + to detect whether the resource has changed between requests. You can also use it in the If- + Match or If-None-Match headers for write operations for optimistic concurrency. + :vartype etag: str + :param inputs: + :type inputs: list[~stream_analytics_management_client.models.FunctionInput] + :param output: Describes the output of a function. + :type output: ~stream_analytics_management_client.models.FunctionOutput + :param binding: The physical binding of the function. For example, in the Azure Machine + Learning web service’s case, this describes the endpoint. + :type binding: ~stream_analytics_management_client.models.FunctionBinding + """ + + _validation = { + 'type': {'required': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'inputs': {'key': 'properties.inputs', 'type': '[FunctionInput]'}, + 'output': {'key': 'properties.output', 'type': 'FunctionOutput'}, + 'binding': {'key': 'properties.binding', 'type': 'FunctionBinding'}, + } + + _subtype_map = { + 'type': {'Aggregate': 'AggregateFunctionProperties', 'Scalar': 'ScalarFunctionProperties'} + } + + def __init__( + self, + *, + inputs: Optional[List["FunctionInput"]] = None, + output: Optional["FunctionOutput"] = None, + binding: Optional["FunctionBinding"] = None, + **kwargs + ): + super(FunctionProperties, self).__init__(**kwargs) + self.type = None # type: Optional[str] + self.etag = None + self.inputs = inputs + self.output = output + self.binding = binding + + +class AggregateFunctionProperties(FunctionProperties): + """The properties that are associated with an aggregate function. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of function.Constant filled by server. + :type type: str + :ivar etag: The current entity tag for the function. This is an opaque string. You can use it + to detect whether the resource has changed between requests. You can also use it in the If- + Match or If-None-Match headers for write operations for optimistic concurrency. + :vartype etag: str + :param inputs: + :type inputs: list[~stream_analytics_management_client.models.FunctionInput] + :param output: Describes the output of a function. + :type output: ~stream_analytics_management_client.models.FunctionOutput + :param binding: The physical binding of the function. For example, in the Azure Machine + Learning web service’s case, this describes the endpoint. + :type binding: ~stream_analytics_management_client.models.FunctionBinding + """ + + _validation = { + 'type': {'required': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'inputs': {'key': 'properties.inputs', 'type': '[FunctionInput]'}, + 'output': {'key': 'properties.output', 'type': 'FunctionOutput'}, + 'binding': {'key': 'properties.binding', 'type': 'FunctionBinding'}, + } + + def __init__( + self, + *, + inputs: Optional[List["FunctionInput"]] = None, + output: Optional["FunctionOutput"] = None, + binding: Optional["FunctionBinding"] = None, + **kwargs + ): + super(AggregateFunctionProperties, self).__init__(inputs=inputs, output=output, binding=binding, **kwargs) + self.type = 'Aggregate' # type: str + + +class Serialization(msrest.serialization.Model): + """Describes how data from an input is serialized or how data is serialized when written to an output. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AvroSerialization, CsvSerialization, CustomClrSerialization, JsonSerialization, ParquetSerialization. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of serialization that the input or output uses. + Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: + "Csv", "Avro", "Json", "CustomClr", "Parquet". + :type type: str or ~stream_analytics_management_client.models.EventSerializationType + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Avro': 'AvroSerialization', 'Csv': 'CsvSerialization', 'CustomClr': 'CustomClrSerialization', 'Json': 'JsonSerialization', 'Parquet': 'ParquetSerialization'} + } + + def __init__( + self, + **kwargs + ): + super(Serialization, self).__init__(**kwargs) + self.type = None # type: Optional[str] + + +class AvroSerialization(Serialization): + """Describes how data from an input is serialized or how data is serialized when written to an output in Avro format. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of serialization that the input or output uses. + Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: + "Csv", "Avro", "Json", "CustomClr", "Parquet". + :type type: str or ~stream_analytics_management_client.models.EventSerializationType + :param properties: The properties that are associated with the Avro serialization type. + Required on PUT (CreateOrReplace) requests. + :type properties: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'object'}, + } + + def __init__( + self, + *, + properties: Optional[object] = None, + **kwargs + ): + super(AvroSerialization, self).__init__(**kwargs) + self.type = 'Avro' # type: str + self.properties = properties + + +class OutputDataSource(msrest.serialization.Model): + """Describes the data source that output will be written to. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureFunctionOutputDataSource, AzureDataLakeStoreOutputDataSource, EventHubV2OutputDataSource, EventHubOutputDataSource, ServiceBusQueueOutputDataSource, ServiceBusTopicOutputDataSource, AzureSynapseOutputDataSource, AzureSqlDatabaseOutputDataSource, BlobOutputDataSource, DocumentDbOutputDataSource, AzureTableOutputDataSource, PowerBIOutputDataSource. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Microsoft.AzureFunction': 'AzureFunctionOutputDataSource', 'Microsoft.DataLake/Accounts': 'AzureDataLakeStoreOutputDataSource', 'Microsoft.EventHub/EventHub': 'EventHubV2OutputDataSource', 'Microsoft.ServiceBus/EventHub': 'EventHubOutputDataSource', 'Microsoft.ServiceBus/Queue': 'ServiceBusQueueOutputDataSource', 'Microsoft.ServiceBus/Topic': 'ServiceBusTopicOutputDataSource', 'Microsoft.Sql/Server/DataWarehouse': 'AzureSynapseOutputDataSource', 'Microsoft.Sql/Server/Database': 'AzureSqlDatabaseOutputDataSource', 'Microsoft.Storage/Blob': 'BlobOutputDataSource', 'Microsoft.Storage/DocumentDB': 'DocumentDbOutputDataSource', 'Microsoft.Storage/Table': 'AzureTableOutputDataSource', 'PowerBI': 'PowerBIOutputDataSource'} + } + + def __init__( + self, + **kwargs + ): + super(OutputDataSource, self).__init__(**kwargs) + self.type = None # type: Optional[str] + + +class AzureDataLakeStoreOutputDataSource(OutputDataSource): + """Describes an Azure Data Lake Store output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param refresh_token: A refresh token that can be used to obtain a valid access token that can + then be used to authenticate with the data source. A valid refresh token is currently only + obtainable via the Azure Portal. It is recommended to put a dummy string value here when + creating the data source and then going to the Azure Portal to authenticate the data source + which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) + requests. + :type refresh_token: str + :param token_user_principal_name: The user principal name (UPN) of the user that was used to + obtain the refresh token. Use this property to help remember which user was used to obtain the + refresh token. + :type token_user_principal_name: str + :param token_user_display_name: The user display name of the user that was used to obtain the + refresh token. Use this property to help remember which user was used to obtain the refresh + token. + :type token_user_display_name: str + :param account_name: The name of the Azure Data Lake Store account. Required on PUT + (CreateOrReplace) requests. + :type account_name: str + :param tenant_id: The tenant id of the user used to obtain the refresh token. Required on PUT + (CreateOrReplace) requests. + :type tenant_id: str + :param file_path_prefix: The location of the file to which the output should be written to. + Required on PUT (CreateOrReplace) requests. + :type file_path_prefix: str + :param date_format: The date format. Wherever {date} appears in filePathPrefix, the value of + this property is used as the date format instead. + :type date_format: str + :param time_format: The time format. Wherever {time} appears in filePathPrefix, the value of + this property is used as the time format instead. + :type time_format: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'refresh_token': {'key': 'properties.refreshToken', 'type': 'str'}, + 'token_user_principal_name': {'key': 'properties.tokenUserPrincipalName', 'type': 'str'}, + 'token_user_display_name': {'key': 'properties.tokenUserDisplayName', 'type': 'str'}, + 'account_name': {'key': 'properties.accountName', 'type': 'str'}, + 'tenant_id': {'key': 'properties.tenantId', 'type': 'str'}, + 'file_path_prefix': {'key': 'properties.filePathPrefix', 'type': 'str'}, + 'date_format': {'key': 'properties.dateFormat', 'type': 'str'}, + 'time_format': {'key': 'properties.timeFormat', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + } + + def __init__( + self, + *, + refresh_token: Optional[str] = None, + token_user_principal_name: Optional[str] = None, + token_user_display_name: Optional[str] = None, + account_name: Optional[str] = None, + tenant_id: Optional[str] = None, + file_path_prefix: Optional[str] = None, + date_format: Optional[str] = None, + time_format: Optional[str] = None, + authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, + **kwargs + ): + super(AzureDataLakeStoreOutputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.DataLake/Accounts' # type: str + self.refresh_token = refresh_token + self.token_user_principal_name = token_user_principal_name + self.token_user_display_name = token_user_display_name + self.account_name = account_name + self.tenant_id = tenant_id + self.file_path_prefix = file_path_prefix + self.date_format = date_format + self.time_format = time_format + self.authentication_mode = authentication_mode + + +class OAuthBasedDataSourceProperties(msrest.serialization.Model): + """The properties that are associated with data sources that use OAuth as their authentication model. + + :param refresh_token: A refresh token that can be used to obtain a valid access token that can + then be used to authenticate with the data source. A valid refresh token is currently only + obtainable via the Azure Portal. It is recommended to put a dummy string value here when + creating the data source and then going to the Azure Portal to authenticate the data source + which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) + requests. + :type refresh_token: str + :param token_user_principal_name: The user principal name (UPN) of the user that was used to + obtain the refresh token. Use this property to help remember which user was used to obtain the + refresh token. + :type token_user_principal_name: str + :param token_user_display_name: The user display name of the user that was used to obtain the + refresh token. Use this property to help remember which user was used to obtain the refresh + token. + :type token_user_display_name: str + """ + + _attribute_map = { + 'refresh_token': {'key': 'refreshToken', 'type': 'str'}, + 'token_user_principal_name': {'key': 'tokenUserPrincipalName', 'type': 'str'}, + 'token_user_display_name': {'key': 'tokenUserDisplayName', 'type': 'str'}, + } + + def __init__( + self, + *, + refresh_token: Optional[str] = None, + token_user_principal_name: Optional[str] = None, + token_user_display_name: Optional[str] = None, + **kwargs + ): + super(OAuthBasedDataSourceProperties, self).__init__(**kwargs) + self.refresh_token = refresh_token + self.token_user_principal_name = token_user_principal_name + self.token_user_display_name = token_user_display_name + + +class AzureDataLakeStoreOutputDataSourceProperties(OAuthBasedDataSourceProperties): + """The properties that are associated with an Azure Data Lake Store. + + :param refresh_token: A refresh token that can be used to obtain a valid access token that can + then be used to authenticate with the data source. A valid refresh token is currently only + obtainable via the Azure Portal. It is recommended to put a dummy string value here when + creating the data source and then going to the Azure Portal to authenticate the data source + which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) + requests. + :type refresh_token: str + :param token_user_principal_name: The user principal name (UPN) of the user that was used to + obtain the refresh token. Use this property to help remember which user was used to obtain the + refresh token. + :type token_user_principal_name: str + :param token_user_display_name: The user display name of the user that was used to obtain the + refresh token. Use this property to help remember which user was used to obtain the refresh + token. + :type token_user_display_name: str + :param account_name: The name of the Azure Data Lake Store account. Required on PUT + (CreateOrReplace) requests. + :type account_name: str + :param tenant_id: The tenant id of the user used to obtain the refresh token. Required on PUT + (CreateOrReplace) requests. + :type tenant_id: str + :param file_path_prefix: The location of the file to which the output should be written to. + Required on PUT (CreateOrReplace) requests. + :type file_path_prefix: str + :param date_format: The date format. Wherever {date} appears in filePathPrefix, the value of + this property is used as the date format instead. + :type date_format: str + :param time_format: The time format. Wherever {time} appears in filePathPrefix, the value of + this property is used as the time format instead. + :type time_format: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + """ + + _attribute_map = { + 'refresh_token': {'key': 'refreshToken', 'type': 'str'}, + 'token_user_principal_name': {'key': 'tokenUserPrincipalName', 'type': 'str'}, + 'token_user_display_name': {'key': 'tokenUserDisplayName', 'type': 'str'}, + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'file_path_prefix': {'key': 'filePathPrefix', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'str'}, + 'time_format': {'key': 'timeFormat', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + } + + def __init__( + self, + *, + refresh_token: Optional[str] = None, + token_user_principal_name: Optional[str] = None, + token_user_display_name: Optional[str] = None, + account_name: Optional[str] = None, + tenant_id: Optional[str] = None, + file_path_prefix: Optional[str] = None, + date_format: Optional[str] = None, + time_format: Optional[str] = None, + authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, + **kwargs + ): + super(AzureDataLakeStoreOutputDataSourceProperties, self).__init__(refresh_token=refresh_token, token_user_principal_name=token_user_principal_name, token_user_display_name=token_user_display_name, **kwargs) + self.account_name = account_name + self.tenant_id = tenant_id + self.file_path_prefix = file_path_prefix + self.date_format = date_format + self.time_format = time_format + self.authentication_mode = authentication_mode + + +class AzureFunctionOutputDataSource(OutputDataSource): + """Defines the metadata of AzureFunctionOutputDataSource. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param function_app_name: The name of your Azure Functions app. + :type function_app_name: str + :param function_name: The name of the function in your Azure Functions app. + :type function_name: str + :param api_key: If you want to use an Azure Function from another subscription, you can do so + by providing the key to access your function. + :type api_key: str + :param max_batch_size: A property that lets you set the maximum size for each output batch + that's sent to your Azure function. The input unit is in bytes. By default, this value is + 262,144 bytes (256 KB). + :type max_batch_size: float + :param max_batch_count: A property that lets you specify the maximum number of events in each + batch that's sent to Azure Functions. The default value is 100. + :type max_batch_count: float + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'function_app_name': {'key': 'properties.functionAppName', 'type': 'str'}, + 'function_name': {'key': 'properties.functionName', 'type': 'str'}, + 'api_key': {'key': 'properties.apiKey', 'type': 'str'}, + 'max_batch_size': {'key': 'properties.maxBatchSize', 'type': 'float'}, + 'max_batch_count': {'key': 'properties.maxBatchCount', 'type': 'float'}, + } + + def __init__( + self, + *, + function_app_name: Optional[str] = None, + function_name: Optional[str] = None, + api_key: Optional[str] = None, + max_batch_size: Optional[float] = None, + max_batch_count: Optional[float] = None, + **kwargs + ): + super(AzureFunctionOutputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.AzureFunction' # type: str + self.function_app_name = function_app_name + self.function_name = function_name + self.api_key = api_key + self.max_batch_size = max_batch_size + self.max_batch_count = max_batch_count + + +class FunctionBinding(msrest.serialization.Model): + """The physical binding of the function. For example, in the Azure Machine Learning web service’s case, this describes the endpoint. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureMachineLearningStudioFunctionBinding, AzureMachineLearningServiceFunctionBinding, CSharpFunctionBinding, JavaScriptFunctionBinding. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the function binding type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Microsoft.MachineLearning/WebService': 'AzureMachineLearningStudioFunctionBinding', 'Microsoft.MachineLearningServices': 'AzureMachineLearningServiceFunctionBinding', 'Microsoft.StreamAnalytics/CLRUdf': 'CSharpFunctionBinding', 'Microsoft.StreamAnalytics/JavascriptUdf': 'JavaScriptFunctionBinding'} + } + + def __init__( + self, + **kwargs + ): + super(FunctionBinding, self).__init__(**kwargs) + self.type = None # type: Optional[str] + + +class AzureMachineLearningServiceFunctionBinding(FunctionBinding): + """The binding to an Azure Machine Learning web service. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the function binding type.Constant filled by server. + :type type: str + :param endpoint: The Request-Response execute endpoint of the Azure Machine Learning web + service. + :type endpoint: str + :param api_key: The API key used to authenticate with Request-Response endpoint. + :type api_key: str + :param inputs: The inputs for the Azure Machine Learning web service endpoint. + :type inputs: + list[~stream_analytics_management_client.models.AzureMachineLearningServiceInputColumn] + :param outputs: A list of outputs from the Azure Machine Learning web service endpoint + execution. + :type outputs: + list[~stream_analytics_management_client.models.AzureMachineLearningServiceOutputColumn] + :param batch_size: Number between 1 and 10000 describing maximum number of rows for every Azure + ML RRS execute request. Default is 1000. + :type batch_size: int + :param number_of_parallel_requests: The number of parallel requests that will be sent per + partition of your job to the machine learning service. Default is 1. + :type number_of_parallel_requests: int + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, + 'api_key': {'key': 'properties.apiKey', 'type': 'str'}, + 'inputs': {'key': 'properties.inputs', 'type': '[AzureMachineLearningServiceInputColumn]'}, + 'outputs': {'key': 'properties.outputs', 'type': '[AzureMachineLearningServiceOutputColumn]'}, + 'batch_size': {'key': 'properties.batchSize', 'type': 'int'}, + 'number_of_parallel_requests': {'key': 'properties.numberOfParallelRequests', 'type': 'int'}, + } + + def __init__( + self, + *, + endpoint: Optional[str] = None, + api_key: Optional[str] = None, + inputs: Optional[List["AzureMachineLearningServiceInputColumn"]] = None, + outputs: Optional[List["AzureMachineLearningServiceOutputColumn"]] = None, + batch_size: Optional[int] = None, + number_of_parallel_requests: Optional[int] = None, + **kwargs + ): + super(AzureMachineLearningServiceFunctionBinding, self).__init__(**kwargs) + self.type = 'Microsoft.MachineLearningServices' # type: str + self.endpoint = endpoint + self.api_key = api_key + self.inputs = inputs + self.outputs = outputs + self.batch_size = batch_size + self.number_of_parallel_requests = number_of_parallel_requests + + +class FunctionRetrieveDefaultDefinitionParameters(msrest.serialization.Model): + """Parameters used to specify the type of function to retrieve the default definition for. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters, AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters, CSharpFunctionRetrieveDefaultDefinitionParameters, JavaScriptFunctionRetrieveDefaultDefinitionParameters. + + All required parameters must be populated in order to send to Azure. + + :param binding_type: Required. Indicates the function binding type.Constant filled by server. + :type binding_type: str + """ + + _validation = { + 'binding_type': {'required': True}, + } + + _attribute_map = { + 'binding_type': {'key': 'bindingType', 'type': 'str'}, + } + + _subtype_map = { + 'binding_type': {'Microsoft.MachineLearning/WebService': 'AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters', 'Microsoft.MachineLearningServices': 'AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters', 'Microsoft.StreamAnalytics/CLRUdf': 'CSharpFunctionRetrieveDefaultDefinitionParameters', 'Microsoft.StreamAnalytics/JavascriptUdf': 'JavaScriptFunctionRetrieveDefaultDefinitionParameters'} + } + + def __init__( + self, + **kwargs + ): + super(FunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) + self.binding_type = None # type: Optional[str] + + +class AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): + """The parameters needed to retrieve the default function definition for an Azure Machine Learning web service function. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param binding_type: Required. Indicates the function binding type.Constant filled by server. + :type binding_type: str + :param execute_endpoint: The Request-Response execute endpoint of the Azure Machine Learning + web service. + :type execute_endpoint: str + :ivar udf_type: The function type. Default value: "Scalar". + :vartype udf_type: str + """ + + _validation = { + 'binding_type': {'required': True}, + 'udf_type': {'constant': True}, + } + + _attribute_map = { + 'binding_type': {'key': 'bindingType', 'type': 'str'}, + 'execute_endpoint': {'key': 'bindingRetrievalProperties.executeEndpoint', 'type': 'str'}, + 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, + } + + udf_type = "Scalar" + + def __init__( + self, + *, + execute_endpoint: Optional[str] = None, + **kwargs + ): + super(AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) + self.binding_type = 'Microsoft.MachineLearningServices' # type: str + self.execute_endpoint = execute_endpoint + + +class AzureMachineLearningServiceInputColumn(msrest.serialization.Model): + """Describes an input column for the Azure Machine Learning web service endpoint. + + :param name: The name of the input column. + :type name: str + :param data_type: The (Azure Machine Learning supported) data type of the input column. + :type data_type: str + :param map_to: The zero based index of the function parameter this input maps to. + :type map_to: int + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'map_to': {'key': 'mapTo', 'type': 'int'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + data_type: Optional[str] = None, + map_to: Optional[int] = None, + **kwargs + ): + super(AzureMachineLearningServiceInputColumn, self).__init__(**kwargs) + self.name = name + self.data_type = data_type + self.map_to = map_to + + +class AzureMachineLearningServiceInputs(msrest.serialization.Model): + """The inputs for the Azure Machine Learning web service endpoint. + + :param name: The name of the input. This is the name provided while authoring the endpoint. + :type name: str + :param column_names: A list of input columns for the Azure Machine Learning web service + endpoint. + :type column_names: + list[~stream_analytics_management_client.models.AzureMachineLearningServiceInputColumn] + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'column_names': {'key': 'columnNames', 'type': '[AzureMachineLearningServiceInputColumn]'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + column_names: Optional[List["AzureMachineLearningServiceInputColumn"]] = None, + **kwargs + ): + super(AzureMachineLearningServiceInputs, self).__init__(**kwargs) + self.name = name + self.column_names = column_names + + +class AzureMachineLearningServiceOutputColumn(msrest.serialization.Model): + """Describes an output column for the Azure Machine Learning web service endpoint. + + :param name: The name of the output column. + :type name: str + :param data_type: The (Azure Machine Learning supported) data type of the output column. + :type data_type: str + :param map_to: The zero based index of the function parameter this input maps to. + :type map_to: int + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'map_to': {'key': 'mapTo', 'type': 'int'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + data_type: Optional[str] = None, + map_to: Optional[int] = None, + **kwargs + ): + super(AzureMachineLearningServiceOutputColumn, self).__init__(**kwargs) + self.name = name + self.data_type = data_type + self.map_to = map_to + + +class AzureMachineLearningStudioFunctionBinding(FunctionBinding): + """The binding to an Azure Machine Learning Studio. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the function binding type.Constant filled by server. + :type type: str + :param endpoint: The Request-Response execute endpoint of the Azure Machine Learning Studio. + Find out more here: https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning- + consume-web-services#request-response-service-rrs. + :type endpoint: str + :param api_key: The API key used to authenticate with Request-Response endpoint. + :type api_key: str + :param inputs: The inputs for the Azure Machine Learning Studio endpoint. + :type inputs: ~stream_analytics_management_client.models.AzureMachineLearningStudioInputs + :param outputs: A list of outputs from the Azure Machine Learning Studio endpoint execution. + :type outputs: + list[~stream_analytics_management_client.models.AzureMachineLearningStudioOutputColumn] + :param batch_size: Number between 1 and 10000 describing maximum number of rows for every Azure + ML RRS execute request. Default is 1000. + :type batch_size: int + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, + 'api_key': {'key': 'properties.apiKey', 'type': 'str'}, + 'inputs': {'key': 'properties.inputs', 'type': 'AzureMachineLearningStudioInputs'}, + 'outputs': {'key': 'properties.outputs', 'type': '[AzureMachineLearningStudioOutputColumn]'}, + 'batch_size': {'key': 'properties.batchSize', 'type': 'int'}, + } + + def __init__( + self, + *, + endpoint: Optional[str] = None, + api_key: Optional[str] = None, + inputs: Optional["AzureMachineLearningStudioInputs"] = None, + outputs: Optional[List["AzureMachineLearningStudioOutputColumn"]] = None, + batch_size: Optional[int] = None, + **kwargs + ): + super(AzureMachineLearningStudioFunctionBinding, self).__init__(**kwargs) + self.type = 'Microsoft.MachineLearning/WebService' # type: str + self.endpoint = endpoint + self.api_key = api_key + self.inputs = inputs + self.outputs = outputs + self.batch_size = batch_size + + +class AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): + """The parameters needed to retrieve the default function definition for an Azure Machine Learning Studio function. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param binding_type: Required. Indicates the function binding type.Constant filled by server. + :type binding_type: str + :param execute_endpoint: The Request-Response execute endpoint of the Azure Machine Learning + Studio. Find out more here: https://docs.microsoft.com/en-us/azure/machine-learning/machine- + learning-consume-web-services#request-response-service-rrs. + :type execute_endpoint: str + :ivar udf_type: The function type. Default value: "Scalar". + :vartype udf_type: str + """ + + _validation = { + 'binding_type': {'required': True}, + 'udf_type': {'constant': True}, + } + + _attribute_map = { + 'binding_type': {'key': 'bindingType', 'type': 'str'}, + 'execute_endpoint': {'key': 'bindingRetrievalProperties.executeEndpoint', 'type': 'str'}, + 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, + } + + udf_type = "Scalar" + + def __init__( + self, + *, + execute_endpoint: Optional[str] = None, + **kwargs + ): + super(AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) + self.binding_type = 'Microsoft.MachineLearning/WebService' # type: str + self.execute_endpoint = execute_endpoint + + +class AzureMachineLearningStudioInputColumn(msrest.serialization.Model): + """Describes an input column for the Azure Machine Learning Studio endpoint. + + :param name: The name of the input column. + :type name: str + :param data_type: The (Azure Machine Learning supported) data type of the input column. A list + of valid Azure Machine Learning data types are described at https://msdn.microsoft.com/en- + us/library/azure/dn905923.aspx . + :type data_type: str + :param map_to: The zero based index of the function parameter this input maps to. + :type map_to: int + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'map_to': {'key': 'mapTo', 'type': 'int'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + data_type: Optional[str] = None, + map_to: Optional[int] = None, + **kwargs + ): + super(AzureMachineLearningStudioInputColumn, self).__init__(**kwargs) + self.name = name + self.data_type = data_type + self.map_to = map_to + + +class AzureMachineLearningStudioInputs(msrest.serialization.Model): + """The inputs for the Azure Machine Learning Studio endpoint. + + :param name: The name of the input. This is the name provided while authoring the endpoint. + :type name: str + :param column_names: A list of input columns for the Azure Machine Learning Studio endpoint. + :type column_names: + list[~stream_analytics_management_client.models.AzureMachineLearningStudioInputColumn] + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'column_names': {'key': 'columnNames', 'type': '[AzureMachineLearningStudioInputColumn]'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + column_names: Optional[List["AzureMachineLearningStudioInputColumn"]] = None, + **kwargs + ): + super(AzureMachineLearningStudioInputs, self).__init__(**kwargs) + self.name = name + self.column_names = column_names + + +class AzureMachineLearningStudioOutputColumn(msrest.serialization.Model): + """Describes an output column for the Azure Machine Learning Studio endpoint. + + :param name: The name of the output column. + :type name: str + :param data_type: The (Azure Machine Learning supported) data type of the output column. A list + of valid Azure Machine Learning data types are described at https://msdn.microsoft.com/en- + us/library/azure/dn905923.aspx . + :type data_type: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + data_type: Optional[str] = None, + **kwargs + ): + super(AzureMachineLearningStudioOutputColumn, self).__init__(**kwargs) + self.name = name + self.data_type = data_type + + +class AzureSqlDatabaseDataSourceProperties(msrest.serialization.Model): + """The properties that are associated with an Azure SQL database data source. + + :param server: The name of the SQL server containing the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type server: str + :param database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) + requests. + :type database: str + :param user: The user name that will be used to connect to the Azure SQL database. Required on + PUT (CreateOrReplace) requests. + :type user: str + :param password: The password that will be used to connect to the Azure SQL database. Required + on PUT (CreateOrReplace) requests. + :type password: str + :param table: The name of the table in the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type table: str + :param max_batch_count: Max Batch count for write to Sql database, the default value is 10,000. + Optional on PUT requests. + :type max_batch_count: float + :param max_writer_count: Max Write r count, currently only 1(single writer) and 0(based on + query partition) are available. Optional on PUT requests. + :type max_writer_count: float + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + """ + + _attribute_map = { + 'server': {'key': 'server', 'type': 'str'}, + 'database': {'key': 'database', 'type': 'str'}, + 'user': {'key': 'user', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'str'}, + 'table': {'key': 'table', 'type': 'str'}, + 'max_batch_count': {'key': 'maxBatchCount', 'type': 'float'}, + 'max_writer_count': {'key': 'maxWriterCount', 'type': 'float'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + } + + def __init__( + self, + *, + server: Optional[str] = None, + database: Optional[str] = None, + user: Optional[str] = None, + password: Optional[str] = None, + table: Optional[str] = None, + max_batch_count: Optional[float] = None, + max_writer_count: Optional[float] = None, + authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, + **kwargs + ): + super(AzureSqlDatabaseDataSourceProperties, self).__init__(**kwargs) + self.server = server + self.database = database + self.user = user + self.password = password + self.table = table + self.max_batch_count = max_batch_count + self.max_writer_count = max_writer_count + self.authentication_mode = authentication_mode + + +class AzureSqlDatabaseOutputDataSource(OutputDataSource): + """Describes an Azure SQL database output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param server: The name of the SQL server containing the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type server: str + :param database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) + requests. + :type database: str + :param user: The user name that will be used to connect to the Azure SQL database. Required on + PUT (CreateOrReplace) requests. + :type user: str + :param password: The password that will be used to connect to the Azure SQL database. Required + on PUT (CreateOrReplace) requests. + :type password: str + :param table: The name of the table in the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type table: str + :param max_batch_count: Max Batch count for write to Sql database, the default value is 10,000. + Optional on PUT requests. + :type max_batch_count: float + :param max_writer_count: Max Write r count, currently only 1(single writer) and 0(based on + query partition) are available. Optional on PUT requests. + :type max_writer_count: float + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'properties.server', 'type': 'str'}, + 'database': {'key': 'properties.database', 'type': 'str'}, + 'user': {'key': 'properties.user', 'type': 'str'}, + 'password': {'key': 'properties.password', 'type': 'str'}, + 'table': {'key': 'properties.table', 'type': 'str'}, + 'max_batch_count': {'key': 'properties.maxBatchCount', 'type': 'float'}, + 'max_writer_count': {'key': 'properties.maxWriterCount', 'type': 'float'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + } + + def __init__( + self, + *, + server: Optional[str] = None, + database: Optional[str] = None, + user: Optional[str] = None, + password: Optional[str] = None, + table: Optional[str] = None, + max_batch_count: Optional[float] = None, + max_writer_count: Optional[float] = None, + authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, + **kwargs + ): + super(AzureSqlDatabaseOutputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.Sql/Server/Database' # type: str + self.server = server + self.database = database + self.user = user + self.password = password + self.table = table + self.max_batch_count = max_batch_count + self.max_writer_count = max_writer_count + self.authentication_mode = authentication_mode + + +class AzureSqlDatabaseOutputDataSourceProperties(AzureSqlDatabaseDataSourceProperties): + """The properties that are associated with an Azure SQL database output. + + :param server: The name of the SQL server containing the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type server: str + :param database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) + requests. + :type database: str + :param user: The user name that will be used to connect to the Azure SQL database. Required on + PUT (CreateOrReplace) requests. + :type user: str + :param password: The password that will be used to connect to the Azure SQL database. Required + on PUT (CreateOrReplace) requests. + :type password: str + :param table: The name of the table in the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type table: str + :param max_batch_count: Max Batch count for write to Sql database, the default value is 10,000. + Optional on PUT requests. + :type max_batch_count: float + :param max_writer_count: Max Write r count, currently only 1(single writer) and 0(based on + query partition) are available. Optional on PUT requests. + :type max_writer_count: float + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + """ + + _attribute_map = { + 'server': {'key': 'server', 'type': 'str'}, + 'database': {'key': 'database', 'type': 'str'}, + 'user': {'key': 'user', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'str'}, + 'table': {'key': 'table', 'type': 'str'}, + 'max_batch_count': {'key': 'maxBatchCount', 'type': 'float'}, + 'max_writer_count': {'key': 'maxWriterCount', 'type': 'float'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + } + + def __init__( + self, + *, + server: Optional[str] = None, + database: Optional[str] = None, + user: Optional[str] = None, + password: Optional[str] = None, + table: Optional[str] = None, + max_batch_count: Optional[float] = None, + max_writer_count: Optional[float] = None, + authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, + **kwargs + ): + super(AzureSqlDatabaseOutputDataSourceProperties, self).__init__(server=server, database=database, user=user, password=password, table=table, max_batch_count=max_batch_count, max_writer_count=max_writer_count, authentication_mode=authentication_mode, **kwargs) + + +class ReferenceInputDataSource(msrest.serialization.Model): + """Describes an input data source that contains reference data. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureSqlReferenceInputDataSource, BlobReferenceInputDataSource. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of input data source containing reference data. + Required on PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Microsoft.Sql/Server/Database': 'AzureSqlReferenceInputDataSource', 'Microsoft.Storage/Blob': 'BlobReferenceInputDataSource'} + } + + def __init__( + self, + **kwargs + ): + super(ReferenceInputDataSource, self).__init__(**kwargs) + self.type = None # type: Optional[str] + + +class AzureSqlReferenceInputDataSource(ReferenceInputDataSource): + """Describes an Azure SQL database reference input data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of input data source containing reference data. + Required on PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param properties: + :type properties: + ~stream_analytics_management_client.models.AzureSqlReferenceInputDataSourceProperties + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'AzureSqlReferenceInputDataSourceProperties'}, + } + + def __init__( + self, + *, + properties: Optional["AzureSqlReferenceInputDataSourceProperties"] = None, + **kwargs + ): + super(AzureSqlReferenceInputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.Sql/Server/Database' # type: str + self.properties = properties + + +class AzureSqlReferenceInputDataSourceProperties(msrest.serialization.Model): + """AzureSqlReferenceInputDataSourceProperties. + + :param server: This element is associated with the datasource element. This is the name of the + server that contains the database that will be written to. + :type server: str + :param database: This element is associated with the datasource element. This is the name of + the database that output will be written to. + :type database: str + :param user: This element is associated with the datasource element. This is the user name that + will be used to connect to the SQL Database instance. + :type user: str + :param password: This element is associated with the datasource element. This is the password + that will be used to connect to the SQL Database instance. + :type password: str + :param table: This element is associated with the datasource element. The name of the table in + the Azure SQL database.. + :type table: str + :param refresh_type: This element is associated with the datasource element. This element is of + enum type. It indicates what kind of data refresh option do we want to + use:Static/RefreshPeriodicallyWithFull/RefreshPeriodicallyWithDelta. + :type refresh_type: str + :param refresh_rate: This element is associated with the datasource element. This indicates how + frequently the data will be fetched from the database. It is of DateTime format. + :type refresh_rate: str + :param full_snapshot_query: This element is associated with the datasource element. This query + is used to fetch data from the sql database. + :type full_snapshot_query: str + :param delta_snapshot_query: This element is associated with the datasource element. This query + is used to fetch incremental changes from the SQL database. To use this option, we recommend + using temporal tables in Azure SQL Database. + :type delta_snapshot_query: str + """ + + _attribute_map = { + 'server': {'key': 'server', 'type': 'str'}, + 'database': {'key': 'database', 'type': 'str'}, + 'user': {'key': 'user', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'str'}, + 'table': {'key': 'table', 'type': 'str'}, + 'refresh_type': {'key': 'refreshType', 'type': 'str'}, + 'refresh_rate': {'key': 'refreshRate', 'type': 'str'}, + 'full_snapshot_query': {'key': 'fullSnapshotQuery', 'type': 'str'}, + 'delta_snapshot_query': {'key': 'deltaSnapshotQuery', 'type': 'str'}, + } + + def __init__( + self, + *, + server: Optional[str] = None, + database: Optional[str] = None, + user: Optional[str] = None, + password: Optional[str] = None, + table: Optional[str] = None, + refresh_type: Optional[str] = None, + refresh_rate: Optional[str] = None, + full_snapshot_query: Optional[str] = None, + delta_snapshot_query: Optional[str] = None, + **kwargs + ): + super(AzureSqlReferenceInputDataSourceProperties, self).__init__(**kwargs) + self.server = server + self.database = database + self.user = user + self.password = password + self.table = table + self.refresh_type = refresh_type + self.refresh_rate = refresh_rate + self.full_snapshot_query = full_snapshot_query + self.delta_snapshot_query = delta_snapshot_query + + +class AzureSynapseDataSourceProperties(msrest.serialization.Model): + """The properties that are associated with an Azure SQL database data source. + + :param server: The name of the SQL server containing the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type server: str + :param database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) + requests. + :type database: str + :param table: The name of the table in the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type table: str + :param user: The user name that will be used to connect to the Azure SQL database. Required on + PUT (CreateOrReplace) requests. + :type user: str + :param password: The password that will be used to connect to the Azure SQL database. Required + on PUT (CreateOrReplace) requests. + :type password: str + """ + + _attribute_map = { + 'server': {'key': 'server', 'type': 'str'}, + 'database': {'key': 'database', 'type': 'str'}, + 'table': {'key': 'table', 'type': 'str'}, + 'user': {'key': 'user', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'str'}, + } + + def __init__( + self, + *, + server: Optional[str] = None, + database: Optional[str] = None, + table: Optional[str] = None, + user: Optional[str] = None, + password: Optional[str] = None, + **kwargs + ): + super(AzureSynapseDataSourceProperties, self).__init__(**kwargs) + self.server = server + self.database = database + self.table = table + self.user = user + self.password = password + + +class AzureSynapseOutputDataSource(OutputDataSource): + """Describes an Azure Synapse output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param server: The name of the SQL server containing the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type server: str + :param database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) + requests. + :type database: str + :param table: The name of the table in the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type table: str + :param user: The user name that will be used to connect to the Azure SQL database. Required on + PUT (CreateOrReplace) requests. + :type user: str + :param password: The password that will be used to connect to the Azure SQL database. Required + on PUT (CreateOrReplace) requests. + :type password: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'properties.server', 'type': 'str'}, + 'database': {'key': 'properties.database', 'type': 'str'}, + 'table': {'key': 'properties.table', 'type': 'str'}, + 'user': {'key': 'properties.user', 'type': 'str'}, + 'password': {'key': 'properties.password', 'type': 'str'}, + } + + def __init__( + self, + *, + server: Optional[str] = None, + database: Optional[str] = None, + table: Optional[str] = None, + user: Optional[str] = None, + password: Optional[str] = None, + **kwargs + ): + super(AzureSynapseOutputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.Sql/Server/DataWarehouse' # type: str + self.server = server + self.database = database + self.table = table + self.user = user + self.password = password + + +class AzureSynapseOutputDataSourceProperties(AzureSynapseDataSourceProperties): + """The properties that are associated with an Azure Synapse output. + + :param server: The name of the SQL server containing the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type server: str + :param database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) + requests. + :type database: str + :param table: The name of the table in the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type table: str + :param user: The user name that will be used to connect to the Azure SQL database. Required on + PUT (CreateOrReplace) requests. + :type user: str + :param password: The password that will be used to connect to the Azure SQL database. Required + on PUT (CreateOrReplace) requests. + :type password: str + """ + + _attribute_map = { + 'server': {'key': 'server', 'type': 'str'}, + 'database': {'key': 'database', 'type': 'str'}, + 'table': {'key': 'table', 'type': 'str'}, + 'user': {'key': 'user', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'str'}, + } + + def __init__( + self, + *, + server: Optional[str] = None, + database: Optional[str] = None, + table: Optional[str] = None, + user: Optional[str] = None, + password: Optional[str] = None, + **kwargs + ): + super(AzureSynapseOutputDataSourceProperties, self).__init__(server=server, database=database, table=table, user=user, password=password, **kwargs) + + +class AzureTableOutputDataSource(OutputDataSource): + """Describes an Azure Table output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param account_name: The name of the Azure Storage account. Required on PUT (CreateOrReplace) + requests. + :type account_name: str + :param account_key: The account key for the Azure Storage account. Required on PUT + (CreateOrReplace) requests. + :type account_key: str + :param table: The name of the Azure Table. Required on PUT (CreateOrReplace) requests. + :type table: str + :param partition_key: This element indicates the name of a column from the SELECT statement in + the query that will be used as the partition key for the Azure Table. Required on PUT + (CreateOrReplace) requests. + :type partition_key: str + :param row_key: This element indicates the name of a column from the SELECT statement in the + query that will be used as the row key for the Azure Table. Required on PUT (CreateOrReplace) + requests. + :type row_key: str + :param columns_to_remove: If specified, each item in the array is the name of a column to + remove (if present) from output event entities. + :type columns_to_remove: list[str] + :param batch_size: The number of rows to write to the Azure Table at a time. + :type batch_size: int + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'account_name': {'key': 'properties.accountName', 'type': 'str'}, + 'account_key': {'key': 'properties.accountKey', 'type': 'str'}, + 'table': {'key': 'properties.table', 'type': 'str'}, + 'partition_key': {'key': 'properties.partitionKey', 'type': 'str'}, + 'row_key': {'key': 'properties.rowKey', 'type': 'str'}, + 'columns_to_remove': {'key': 'properties.columnsToRemove', 'type': '[str]'}, + 'batch_size': {'key': 'properties.batchSize', 'type': 'int'}, + } + + def __init__( + self, + *, + account_name: Optional[str] = None, + account_key: Optional[str] = None, + table: Optional[str] = None, + partition_key: Optional[str] = None, + row_key: Optional[str] = None, + columns_to_remove: Optional[List[str]] = None, + batch_size: Optional[int] = None, + **kwargs + ): + super(AzureTableOutputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.Storage/Table' # type: str + self.account_name = account_name + self.account_key = account_key + self.table = table + self.partition_key = partition_key + self.row_key = row_key + self.columns_to_remove = columns_to_remove + self.batch_size = batch_size + + +class BlobDataSourceProperties(msrest.serialization.Model): + """The properties that are associated with a blob data source. + + :param storage_accounts: A list of one or more Azure Storage accounts. Required on PUT + (CreateOrReplace) requests. + :type storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] + :param container: The name of a container within the associated Storage account. This container + contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) + requests. + :type container: str + :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern + against which blob names will be matched to determine whether or not they should be included as + input or output to the job. See https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and + example. + :type path_pattern: str + :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this + property is used as the date format instead. + :type date_format: str + :param time_format: The time format. Wherever {time} appears in pathPattern, the value of this + property is used as the time format instead. + :type time_format: str + """ + + _attribute_map = { + 'storage_accounts': {'key': 'storageAccounts', 'type': '[StorageAccount]'}, + 'container': {'key': 'container', 'type': 'str'}, + 'path_pattern': {'key': 'pathPattern', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'str'}, + 'time_format': {'key': 'timeFormat', 'type': 'str'}, + } + + def __init__( + self, + *, + storage_accounts: Optional[List["StorageAccount"]] = None, + container: Optional[str] = None, + path_pattern: Optional[str] = None, + date_format: Optional[str] = None, + time_format: Optional[str] = None, + **kwargs + ): + super(BlobDataSourceProperties, self).__init__(**kwargs) + self.storage_accounts = storage_accounts + self.container = container + self.path_pattern = path_pattern + self.date_format = date_format + self.time_format = time_format + + +class BlobOutputDataSource(OutputDataSource): + """Describes a blob output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param storage_accounts: A list of one or more Azure Storage accounts. Required on PUT + (CreateOrReplace) requests. + :type storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] + :param container: The name of a container within the associated Storage account. This container + contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) + requests. + :type container: str + :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern + against which blob names will be matched to determine whether or not they should be included as + input or output to the job. See https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and + example. + :type path_pattern: str + :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this + property is used as the date format instead. + :type date_format: str + :param time_format: The time format. Wherever {time} appears in pathPattern, the value of this + property is used as the time format instead. + :type time_format: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'storage_accounts': {'key': 'properties.storageAccounts', 'type': '[StorageAccount]'}, + 'container': {'key': 'properties.container', 'type': 'str'}, + 'path_pattern': {'key': 'properties.pathPattern', 'type': 'str'}, + 'date_format': {'key': 'properties.dateFormat', 'type': 'str'}, + 'time_format': {'key': 'properties.timeFormat', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + } + + def __init__( + self, + *, + storage_accounts: Optional[List["StorageAccount"]] = None, + container: Optional[str] = None, + path_pattern: Optional[str] = None, + date_format: Optional[str] = None, + time_format: Optional[str] = None, + authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, + **kwargs + ): + super(BlobOutputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.Storage/Blob' # type: str + self.storage_accounts = storage_accounts + self.container = container + self.path_pattern = path_pattern + self.date_format = date_format + self.time_format = time_format + self.authentication_mode = authentication_mode + + +class BlobOutputDataSourceProperties(BlobDataSourceProperties): + """The properties that are associated with a blob output. + + :param storage_accounts: A list of one or more Azure Storage accounts. Required on PUT + (CreateOrReplace) requests. + :type storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] + :param container: The name of a container within the associated Storage account. This container + contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) + requests. + :type container: str + :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern + against which blob names will be matched to determine whether or not they should be included as + input or output to the job. See https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and + example. + :type path_pattern: str + :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this + property is used as the date format instead. + :type date_format: str + :param time_format: The time format. Wherever {time} appears in pathPattern, the value of this + property is used as the time format instead. + :type time_format: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + """ + + _attribute_map = { + 'storage_accounts': {'key': 'storageAccounts', 'type': '[StorageAccount]'}, + 'container': {'key': 'container', 'type': 'str'}, + 'path_pattern': {'key': 'pathPattern', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'str'}, + 'time_format': {'key': 'timeFormat', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + } + + def __init__( + self, + *, + storage_accounts: Optional[List["StorageAccount"]] = None, + container: Optional[str] = None, + path_pattern: Optional[str] = None, + date_format: Optional[str] = None, + time_format: Optional[str] = None, + authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, + **kwargs + ): + super(BlobOutputDataSourceProperties, self).__init__(storage_accounts=storage_accounts, container=container, path_pattern=path_pattern, date_format=date_format, time_format=time_format, **kwargs) + self.authentication_mode = authentication_mode + + +class BlobReferenceInputDataSource(ReferenceInputDataSource): + """Describes a blob input data source that contains reference data. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of input data source containing reference data. + Required on PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param storage_accounts: A list of one or more Azure Storage accounts. Required on PUT + (CreateOrReplace) requests. + :type storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] + :param container: The name of a container within the associated Storage account. This container + contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) + requests. + :type container: str + :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern + against which blob names will be matched to determine whether or not they should be included as + input or output to the job. See https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and + example. + :type path_pattern: str + :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this + property is used as the date format instead. + :type date_format: str + :param time_format: The time format. Wherever {time} appears in pathPattern, the value of this + property is used as the time format instead. + :type time_format: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'storage_accounts': {'key': 'properties.storageAccounts', 'type': '[StorageAccount]'}, + 'container': {'key': 'properties.container', 'type': 'str'}, + 'path_pattern': {'key': 'properties.pathPattern', 'type': 'str'}, + 'date_format': {'key': 'properties.dateFormat', 'type': 'str'}, + 'time_format': {'key': 'properties.timeFormat', 'type': 'str'}, + } + + def __init__( + self, + *, + storage_accounts: Optional[List["StorageAccount"]] = None, + container: Optional[str] = None, + path_pattern: Optional[str] = None, + date_format: Optional[str] = None, + time_format: Optional[str] = None, + **kwargs + ): + super(BlobReferenceInputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.Storage/Blob' # type: str + self.storage_accounts = storage_accounts + self.container = container + self.path_pattern = path_pattern + self.date_format = date_format + self.time_format = time_format + + +class BlobReferenceInputDataSourceProperties(BlobDataSourceProperties): + """The properties that are associated with a blob input containing reference data. + + :param storage_accounts: A list of one or more Azure Storage accounts. Required on PUT + (CreateOrReplace) requests. + :type storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] + :param container: The name of a container within the associated Storage account. This container + contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) + requests. + :type container: str + :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern + against which blob names will be matched to determine whether or not they should be included as + input or output to the job. See https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and + example. + :type path_pattern: str + :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this + property is used as the date format instead. + :type date_format: str + :param time_format: The time format. Wherever {time} appears in pathPattern, the value of this + property is used as the time format instead. + :type time_format: str + """ + + _attribute_map = { + 'storage_accounts': {'key': 'storageAccounts', 'type': '[StorageAccount]'}, + 'container': {'key': 'container', 'type': 'str'}, + 'path_pattern': {'key': 'pathPattern', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'str'}, + 'time_format': {'key': 'timeFormat', 'type': 'str'}, + } + + def __init__( + self, + *, + storage_accounts: Optional[List["StorageAccount"]] = None, + container: Optional[str] = None, + path_pattern: Optional[str] = None, + date_format: Optional[str] = None, + time_format: Optional[str] = None, + **kwargs + ): + super(BlobReferenceInputDataSourceProperties, self).__init__(storage_accounts=storage_accounts, container=container, path_pattern=path_pattern, date_format=date_format, time_format=time_format, **kwargs) + + +class StreamInputDataSource(msrest.serialization.Model): + """Describes an input data source that contains stream data. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: IoTHubStreamInputDataSource, EventHubV2StreamInputDataSource, EventHubStreamInputDataSource, BlobStreamInputDataSource. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of input data source containing stream data. Required + on PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Microsoft.Devices/IotHubs': 'IoTHubStreamInputDataSource', 'Microsoft.EventHub/EventHub': 'EventHubV2StreamInputDataSource', 'Microsoft.ServiceBus/EventHub': 'EventHubStreamInputDataSource', 'Microsoft.Storage/Blob': 'BlobStreamInputDataSource'} + } + + def __init__( + self, + **kwargs + ): + super(StreamInputDataSource, self).__init__(**kwargs) + self.type = None # type: Optional[str] + + +class BlobStreamInputDataSource(StreamInputDataSource): + """Describes a blob input data source that contains stream data. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of input data source containing stream data. Required + on PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param storage_accounts: A list of one or more Azure Storage accounts. Required on PUT + (CreateOrReplace) requests. + :type storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] + :param container: The name of a container within the associated Storage account. This container + contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) + requests. + :type container: str + :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern + against which blob names will be matched to determine whether or not they should be included as + input or output to the job. See https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and + example. + :type path_pattern: str + :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this + property is used as the date format instead. + :type date_format: str + :param time_format: The time format. Wherever {time} appears in pathPattern, the value of this + property is used as the time format instead. + :type time_format: str + :param source_partition_count: The partition count of the blob input data source. Range 1 - + 256. + :type source_partition_count: int + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'storage_accounts': {'key': 'properties.storageAccounts', 'type': '[StorageAccount]'}, + 'container': {'key': 'properties.container', 'type': 'str'}, + 'path_pattern': {'key': 'properties.pathPattern', 'type': 'str'}, + 'date_format': {'key': 'properties.dateFormat', 'type': 'str'}, + 'time_format': {'key': 'properties.timeFormat', 'type': 'str'}, + 'source_partition_count': {'key': 'properties.sourcePartitionCount', 'type': 'int'}, + } + + def __init__( + self, + *, + storage_accounts: Optional[List["StorageAccount"]] = None, + container: Optional[str] = None, + path_pattern: Optional[str] = None, + date_format: Optional[str] = None, + time_format: Optional[str] = None, + source_partition_count: Optional[int] = None, + **kwargs + ): + super(BlobStreamInputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.Storage/Blob' # type: str + self.storage_accounts = storage_accounts + self.container = container + self.path_pattern = path_pattern + self.date_format = date_format + self.time_format = time_format + self.source_partition_count = source_partition_count + + +class BlobStreamInputDataSourceProperties(BlobDataSourceProperties): + """The properties that are associated with a blob input containing stream data. + + :param storage_accounts: A list of one or more Azure Storage accounts. Required on PUT + (CreateOrReplace) requests. + :type storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] + :param container: The name of a container within the associated Storage account. This container + contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) + requests. + :type container: str + :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern + against which blob names will be matched to determine whether or not they should be included as + input or output to the job. See https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and + example. + :type path_pattern: str + :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this + property is used as the date format instead. + :type date_format: str + :param time_format: The time format. Wherever {time} appears in pathPattern, the value of this + property is used as the time format instead. + :type time_format: str + :param source_partition_count: The partition count of the blob input data source. Range 1 - + 256. + :type source_partition_count: int + """ + + _attribute_map = { + 'storage_accounts': {'key': 'storageAccounts', 'type': '[StorageAccount]'}, + 'container': {'key': 'container', 'type': 'str'}, + 'path_pattern': {'key': 'pathPattern', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'str'}, + 'time_format': {'key': 'timeFormat', 'type': 'str'}, + 'source_partition_count': {'key': 'sourcePartitionCount', 'type': 'int'}, + } + + def __init__( + self, + *, + storage_accounts: Optional[List["StorageAccount"]] = None, + container: Optional[str] = None, + path_pattern: Optional[str] = None, + date_format: Optional[str] = None, + time_format: Optional[str] = None, + source_partition_count: Optional[int] = None, + **kwargs + ): + super(BlobStreamInputDataSourceProperties, self).__init__(storage_accounts=storage_accounts, container=container, path_pattern=path_pattern, date_format=date_format, time_format=time_format, **kwargs) + self.source_partition_count = source_partition_count + + +class Resource(msrest.serialization.Model): + """Resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + + +class TrackedResource(Resource): + """The resource model definition for a ARM tracked top level resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: The geo-location where the resource lives. + :type location: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + } + + def __init__( + self, + *, + tags: Optional[Dict[str, str]] = None, + location: Optional[str] = None, + **kwargs + ): + super(TrackedResource, self).__init__(**kwargs) + self.tags = tags + self.location = location + + +class Cluster(TrackedResource): + """A Stream Analytics Cluster object. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: The geo-location where the resource lives. + :type location: str + :param sku: The SKU of the cluster. This determines the size/capacity of the cluster. Required + on PUT (CreateOrUpdate) requests. + :type sku: ~stream_analytics_management_client.models.ClusterSku + :ivar etag: The current entity tag for the cluster. This is an opaque string. You can use it to + detect whether the resource has changed between requests. You can also use it in the If-Match + or If-None-Match headers for write operations for optimistic concurrency. + :vartype etag: str + :param properties: The properties associated with a Stream Analytics cluster. + :type properties: ~stream_analytics_management_client.models.ClusterProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'sku': {'key': 'sku', 'type': 'ClusterSku'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'ClusterProperties'}, + } + + def __init__( + self, + *, + tags: Optional[Dict[str, str]] = None, + location: Optional[str] = None, + sku: Optional["ClusterSku"] = None, + properties: Optional["ClusterProperties"] = None, + **kwargs + ): + super(Cluster, self).__init__(tags=tags, location=location, **kwargs) + self.sku = sku + self.etag = None + self.properties = properties + + +class ClusterInfo(msrest.serialization.Model): + """The properties associated with a Stream Analytics cluster. + + :param id: The resource id of cluster. + :type id: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__( + self, + *, + id: Optional[str] = None, + **kwargs + ): + super(ClusterInfo, self).__init__(**kwargs) + self.id = id + + +class ClusterJob(msrest.serialization.Model): + """A streaming job. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource ID of the streaming job. + :vartype id: str + :ivar streaming_units: The number of streaming units that are used by the streaming job. + :vartype streaming_units: int + :ivar job_state: The current execution state of the streaming job. Possible values include: + "Created", "Starting", "Running", "Stopping", "Stopped", "Deleting", "Failed", "Degraded", + "Restarting", "Scaling". + :vartype job_state: str or ~stream_analytics_management_client.models.JobState + """ + + _validation = { + 'id': {'readonly': True}, + 'streaming_units': {'readonly': True}, + 'job_state': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'streaming_units': {'key': 'streamingUnits', 'type': 'int'}, + 'job_state': {'key': 'jobState', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ClusterJob, self).__init__(**kwargs) + self.id = None + self.streaming_units = None + self.job_state = None + + +class ClusterJobListResult(msrest.serialization.Model): + """A list of streaming jobs. Populated by a List operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: A list of streaming jobs. + :vartype value: list[~stream_analytics_management_client.models.ClusterJob] + :ivar next_link: The URL to fetch the next set of streaming jobs. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ClusterJob]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ClusterJobListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class ClusterListResult(msrest.serialization.Model): + """A list of clusters populated by a 'list' operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: A list of clusters. + :vartype value: list[~stream_analytics_management_client.models.Cluster] + :ivar next_link: The URL to fetch the next set of clusters. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Cluster]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ClusterListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class ClusterProperties(msrest.serialization.Model): + """The properties associated with a Stream Analytics cluster. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar created_date: The date this cluster was created. + :vartype created_date: ~datetime.datetime + :ivar cluster_id: Unique identifier for the cluster. + :vartype cluster_id: str + :ivar provisioning_state: The status of the cluster provisioning. The three terminal states + are: Succeeded, Failed and Canceled. Possible values include: "Succeeded", "Failed", + "Canceled", "InProgress". + :vartype provisioning_state: str or + ~stream_analytics_management_client.models.ClusterProvisioningState + :ivar capacity_allocated: Represents the number of streaming units currently being used on the + cluster. + :vartype capacity_allocated: int + :ivar capacity_assigned: Represents the sum of the SUs of all streaming jobs associated with + the cluster. If all of the jobs were running, this would be the capacity allocated. + :vartype capacity_assigned: int + """ + + _validation = { + 'created_date': {'readonly': True}, + 'cluster_id': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'capacity_allocated': {'readonly': True}, + 'capacity_assigned': {'readonly': True}, + } + + _attribute_map = { + 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, + 'cluster_id': {'key': 'clusterId', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'capacity_allocated': {'key': 'capacityAllocated', 'type': 'int'}, + 'capacity_assigned': {'key': 'capacityAssigned', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(ClusterProperties, self).__init__(**kwargs) + self.created_date = None + self.cluster_id = None + self.provisioning_state = None + self.capacity_allocated = None + self.capacity_assigned = None + + +class ClusterSku(msrest.serialization.Model): + """The SKU of the cluster. This determines the size/capacity of the cluster. Required on PUT (CreateOrUpdate) requests. + + :param name: Specifies the SKU name of the cluster. Required on PUT (CreateOrUpdate) requests. + Possible values include: "Default". + :type name: str or ~stream_analytics_management_client.models.ClusterSkuName + :param capacity: Denotes the number of streaming units the cluster can support. Valid values + for this property are multiples of 36 with a minimum value of 36 and maximum value of 216. + Required on PUT (CreateOrUpdate) requests. + :type capacity: int + """ + + _validation = { + 'capacity': {'maximum': 216, 'minimum': 36}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'capacity': {'key': 'capacity', 'type': 'int'}, + } + + def __init__( + self, + *, + name: Optional[Union[str, "ClusterSkuName"]] = None, + capacity: Optional[int] = None, + **kwargs + ): + super(ClusterSku, self).__init__(**kwargs) + self.name = name + self.capacity = capacity + + +class Compression(msrest.serialization.Model): + """Describes how input data is compressed. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + *, + type: str, + **kwargs + ): + super(Compression, self).__init__(**kwargs) + self.type = type + + +class CSharpFunctionBinding(FunctionBinding): + """The binding to a CSharp function. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the function binding type.Constant filled by server. + :type type: str + :param script: The Csharp code containing a single function definition. + :type script: str + :param dll_path: The Csharp code containing a single function definition. + :type dll_path: str + :param class_property: The Csharp code containing a single function definition. + :type class_property: str + :param method: The Csharp code containing a single function definition. + :type method: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'script': {'key': 'properties.script', 'type': 'str'}, + 'dll_path': {'key': 'properties.dllPath', 'type': 'str'}, + 'class_property': {'key': 'properties.class', 'type': 'str'}, + 'method': {'key': 'properties.method', 'type': 'str'}, + } + + def __init__( + self, + *, + script: Optional[str] = None, + dll_path: Optional[str] = None, + class_property: Optional[str] = None, + method: Optional[str] = None, + **kwargs + ): + super(CSharpFunctionBinding, self).__init__(**kwargs) + self.type = 'Microsoft.StreamAnalytics/CLRUdf' # type: str + self.script = script + self.dll_path = dll_path + self.class_property = class_property + self.method = method + + +class CSharpFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): + """The parameters needed to retrieve the default function definition for a CSharp function. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param binding_type: Required. Indicates the function binding type.Constant filled by server. + :type binding_type: str + :param script: The CSharp code containing a single function definition. + :type script: str + :ivar udf_type: The function type. Default value: "Scalar". + :vartype udf_type: str + """ + + _validation = { + 'binding_type': {'required': True}, + 'udf_type': {'constant': True}, + } + + _attribute_map = { + 'binding_type': {'key': 'bindingType', 'type': 'str'}, + 'script': {'key': 'bindingRetrievalProperties.script', 'type': 'str'}, + 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, + } + + udf_type = "Scalar" + + def __init__( + self, + *, + script: Optional[str] = None, + **kwargs + ): + super(CSharpFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) + self.binding_type = 'Microsoft.StreamAnalytics/CLRUdf' # type: str + self.script = script + + +class CsvSerialization(Serialization): + """Describes how data from an input is serialized or how data is serialized when written to an output in CSV format. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of serialization that the input or output uses. + Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: + "Csv", "Avro", "Json", "CustomClr", "Parquet". + :type type: str or ~stream_analytics_management_client.models.EventSerializationType + :param field_delimiter: Specifies the delimiter that will be used to separate comma-separated + value (CSV) records. See https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream- + analytics-input or https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics- + output for a list of supported values. Required on PUT (CreateOrReplace) requests. + :type field_delimiter: str + :param encoding: Specifies the encoding of the incoming data in the case of input and the + encoding of outgoing data in the case of output. Required on PUT (CreateOrReplace) requests. + Possible values include: "UTF8". + :type encoding: str or ~stream_analytics_management_client.models.Encoding + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'field_delimiter': {'key': 'properties.fieldDelimiter', 'type': 'str'}, + 'encoding': {'key': 'properties.encoding', 'type': 'str'}, + } + + def __init__( + self, + *, + field_delimiter: Optional[str] = None, + encoding: Optional[Union[str, "Encoding"]] = None, + **kwargs + ): + super(CsvSerialization, self).__init__(**kwargs) + self.type = 'Csv' # type: str + self.field_delimiter = field_delimiter + self.encoding = encoding + + +class CustomClrSerialization(Serialization): + """Describes how data from an input is serialized or how data is serialized when written to an output in custom format. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of serialization that the input or output uses. + Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: + "Csv", "Avro", "Json", "CustomClr", "Parquet". + :type type: str or ~stream_analytics_management_client.models.EventSerializationType + :param serialization_dll_path: The serialization library path. + :type serialization_dll_path: str + :param serialization_class_name: The serialization class name. + :type serialization_class_name: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'serialization_dll_path': {'key': 'properties.serializationDllPath', 'type': 'str'}, + 'serialization_class_name': {'key': 'properties.serializationClassName', 'type': 'str'}, + } + + def __init__( + self, + *, + serialization_dll_path: Optional[str] = None, + serialization_class_name: Optional[str] = None, + **kwargs + ): + super(CustomClrSerialization, self).__init__(**kwargs) + self.type = 'CustomClr' # type: str + self.serialization_dll_path = serialization_dll_path + self.serialization_class_name = serialization_class_name + + +class DiagnosticCondition(msrest.serialization.Model): + """Condition applicable to the resource, or to the job overall, that warrant customer attention. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar since: The UTC timestamp of when the condition started. Customers should be able to find + a corresponding event in the ops log around this time. + :vartype since: str + :ivar code: The opaque diagnostic code. + :vartype code: str + :ivar message: The human-readable message describing the condition in detail. Localized in the + Accept-Language of the client request. + :vartype message: str + """ + + _validation = { + 'since': {'readonly': True}, + 'code': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'since': {'key': 'since', 'type': 'str'}, + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DiagnosticCondition, self).__init__(**kwargs) + self.since = None + self.code = None + self.message = None + + +class Diagnostics(msrest.serialization.Model): + """Describes conditions applicable to the Input, Output, or the job overall, that warrant customer attention. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar conditions: A collection of zero or more conditions applicable to the resource, or to the + job overall, that warrant customer attention. + :vartype conditions: list[~stream_analytics_management_client.models.DiagnosticCondition] + """ + + _validation = { + 'conditions': {'readonly': True}, + } + + _attribute_map = { + 'conditions': {'key': 'conditions', 'type': '[DiagnosticCondition]'}, + } + + def __init__( + self, + **kwargs + ): + super(Diagnostics, self).__init__(**kwargs) + self.conditions = None + + +class DocumentDbOutputDataSource(OutputDataSource): + """Describes a DocumentDB output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param account_id: The DocumentDB account name or ID. Required on PUT (CreateOrReplace) + requests. + :type account_id: str + :param account_key: The account key for the DocumentDB account. Required on PUT + (CreateOrReplace) requests. + :type account_key: str + :param database: The name of the DocumentDB database. Required on PUT (CreateOrReplace) + requests. + :type database: str + :param collection_name_pattern: The collection name pattern for the collections to be used. The + collection name format can be constructed using the optional {partition} token, where + partitions start from 0. See the DocumentDB section of https://docs.microsoft.com/en- + us/rest/api/streamanalytics/stream-analytics-output for more information. Required on PUT + (CreateOrReplace) requests. + :type collection_name_pattern: str + :param partition_key: The name of the field in output events used to specify the key for + partitioning output across collections. If 'collectionNamePattern' contains the {partition} + token, this property is required to be specified. + :type partition_key: str + :param document_id: The name of the field in output events used to specify the primary key + which insert or update operations are based on. + :type document_id: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'account_id': {'key': 'properties.accountId', 'type': 'str'}, + 'account_key': {'key': 'properties.accountKey', 'type': 'str'}, + 'database': {'key': 'properties.database', 'type': 'str'}, + 'collection_name_pattern': {'key': 'properties.collectionNamePattern', 'type': 'str'}, + 'partition_key': {'key': 'properties.partitionKey', 'type': 'str'}, + 'document_id': {'key': 'properties.documentId', 'type': 'str'}, + } + + def __init__( + self, + *, + account_id: Optional[str] = None, + account_key: Optional[str] = None, + database: Optional[str] = None, + collection_name_pattern: Optional[str] = None, + partition_key: Optional[str] = None, + document_id: Optional[str] = None, + **kwargs + ): + super(DocumentDbOutputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.Storage/DocumentDB' # type: str + self.account_id = account_id + self.account_key = account_key + self.database = database + self.collection_name_pattern = collection_name_pattern + self.partition_key = partition_key + self.document_id = document_id + + +class Error(msrest.serialization.Model): + """Common error representation. + + :param error: Error definition properties. + :type error: ~stream_analytics_management_client.models.ErrorAutoGenerated + """ + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorAutoGenerated'}, + } + + def __init__( + self, + *, + error: Optional["ErrorAutoGenerated"] = None, + **kwargs + ): + super(Error, self).__init__(**kwargs) + self.error = error + + +class ErrorAutoGenerated(msrest.serialization.Model): + """Error definition properties. + + :param code: Error code. + :type code: str + :param message: Error message. + :type message: str + :param target: Error target. + :type target: str + :param details: Error details. + :type details: list[~stream_analytics_management_client.models.ErrorDetails] + """ + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ErrorDetails]'}, + } + + def __init__( + self, + *, + code: Optional[str] = None, + message: Optional[str] = None, + target: Optional[str] = None, + details: Optional[List["ErrorDetails"]] = None, + **kwargs + ): + super(ErrorAutoGenerated, self).__init__(**kwargs) + self.code = code + self.message = message + self.target = target + self.details = details + + +class ErrorDetails(msrest.serialization.Model): + """Common error details representation. + + :param code: Error code. + :type code: str + :param target: Error target. + :type target: str + :param message: Error message. + :type message: str + """ + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__( + self, + *, + code: Optional[str] = None, + target: Optional[str] = None, + message: Optional[str] = None, + **kwargs + ): + super(ErrorDetails, self).__init__(**kwargs) + self.code = code + self.target = target + self.message = message + + +class ErrorResponse(msrest.serialization.Model): + """Describes the error that occurred. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar code: Error code associated with the error that occurred. + :vartype code: str + :ivar message: Describes the error in detail. + :vartype message: str + """ + + _validation = { + 'code': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorResponse, self).__init__(**kwargs) + self.code = None + self.message = None + + +class ServiceBusDataSourceProperties(msrest.serialization.Model): + """The common properties that are associated with Service Bus data sources (Queues, Topics, Event Hubs, etc.). + + :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + """ + + _attribute_map = { + 'service_bus_namespace': {'key': 'serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + } + + def __init__( + self, + *, + service_bus_namespace: Optional[str] = None, + shared_access_policy_name: Optional[str] = None, + shared_access_policy_key: Optional[str] = None, + authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, + **kwargs + ): + super(ServiceBusDataSourceProperties, self).__init__(**kwargs) + self.service_bus_namespace = service_bus_namespace + self.shared_access_policy_name = shared_access_policy_name + self.shared_access_policy_key = shared_access_policy_key + self.authentication_mode = authentication_mode + + +class EventHubDataSourceProperties(ServiceBusDataSourceProperties): + """The common properties that are associated with Event Hub data sources. + + :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :param event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + :type event_hub_name: str + """ + + _attribute_map = { + 'service_bus_namespace': {'key': 'serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + 'event_hub_name': {'key': 'eventHubName', 'type': 'str'}, + } + + def __init__( + self, + *, + service_bus_namespace: Optional[str] = None, + shared_access_policy_name: Optional[str] = None, + shared_access_policy_key: Optional[str] = None, + authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, + event_hub_name: Optional[str] = None, + **kwargs + ): + super(EventHubDataSourceProperties, self).__init__(service_bus_namespace=service_bus_namespace, shared_access_policy_name=shared_access_policy_name, shared_access_policy_key=shared_access_policy_key, authentication_mode=authentication_mode, **kwargs) + self.event_hub_name = event_hub_name + + +class EventHubOutputDataSource(OutputDataSource): + """Describes an Event Hub output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :param event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + :type event_hub_name: str + :param partition_key: The key/column that is used to determine to which partition to send event + data. + :type partition_key: str + :param property_columns: + :type property_columns: list[str] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + 'event_hub_name': {'key': 'properties.eventHubName', 'type': 'str'}, + 'partition_key': {'key': 'properties.partitionKey', 'type': 'str'}, + 'property_columns': {'key': 'properties.propertyColumns', 'type': '[str]'}, + } + + def __init__( + self, + *, + service_bus_namespace: Optional[str] = None, + shared_access_policy_name: Optional[str] = None, + shared_access_policy_key: Optional[str] = None, + authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, + event_hub_name: Optional[str] = None, + partition_key: Optional[str] = None, + property_columns: Optional[List[str]] = None, + **kwargs + ): + super(EventHubOutputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.ServiceBus/EventHub' # type: str + self.service_bus_namespace = service_bus_namespace + self.shared_access_policy_name = shared_access_policy_name + self.shared_access_policy_key = shared_access_policy_key + self.authentication_mode = authentication_mode + self.event_hub_name = event_hub_name + self.partition_key = partition_key + self.property_columns = property_columns + + +class EventHubOutputDataSourceProperties(EventHubDataSourceProperties): + """The properties that are associated with an Event Hub output. + + :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :param event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + :type event_hub_name: str + :param partition_key: The key/column that is used to determine to which partition to send event + data. + :type partition_key: str + :param property_columns: + :type property_columns: list[str] + """ + + _attribute_map = { + 'service_bus_namespace': {'key': 'serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + 'event_hub_name': {'key': 'eventHubName', 'type': 'str'}, + 'partition_key': {'key': 'partitionKey', 'type': 'str'}, + 'property_columns': {'key': 'propertyColumns', 'type': '[str]'}, + } + + def __init__( + self, + *, + service_bus_namespace: Optional[str] = None, + shared_access_policy_name: Optional[str] = None, + shared_access_policy_key: Optional[str] = None, + authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, + event_hub_name: Optional[str] = None, + partition_key: Optional[str] = None, + property_columns: Optional[List[str]] = None, + **kwargs + ): + super(EventHubOutputDataSourceProperties, self).__init__(service_bus_namespace=service_bus_namespace, shared_access_policy_name=shared_access_policy_name, shared_access_policy_key=shared_access_policy_key, authentication_mode=authentication_mode, event_hub_name=event_hub_name, **kwargs) + self.partition_key = partition_key + self.property_columns = property_columns + + +class EventHubStreamInputDataSource(StreamInputDataSource): + """Describes an Event Hub input data source that contains stream data. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of input data source containing stream data. Required + on PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :param event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + :type event_hub_name: str + :param consumer_group_name: The name of an Event Hub Consumer Group that should be used to read + events from the Event Hub. Specifying distinct consumer group names for multiple inputs allows + each of those inputs to receive the same events from the Event Hub. If not specified, the input + uses the Event Hub’s default consumer group. + :type consumer_group_name: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + 'event_hub_name': {'key': 'properties.eventHubName', 'type': 'str'}, + 'consumer_group_name': {'key': 'properties.consumerGroupName', 'type': 'str'}, + } + + def __init__( + self, + *, + service_bus_namespace: Optional[str] = None, + shared_access_policy_name: Optional[str] = None, + shared_access_policy_key: Optional[str] = None, + authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, + event_hub_name: Optional[str] = None, + consumer_group_name: Optional[str] = None, + **kwargs + ): + super(EventHubStreamInputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.ServiceBus/EventHub' # type: str + self.service_bus_namespace = service_bus_namespace + self.shared_access_policy_name = shared_access_policy_name + self.shared_access_policy_key = shared_access_policy_key + self.authentication_mode = authentication_mode + self.event_hub_name = event_hub_name + self.consumer_group_name = consumer_group_name + + +class EventHubStreamInputDataSourceProperties(EventHubDataSourceProperties): + """The properties that are associated with a Event Hub input containing stream data. + + :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :param event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + :type event_hub_name: str + :param consumer_group_name: The name of an Event Hub Consumer Group that should be used to read + events from the Event Hub. Specifying distinct consumer group names for multiple inputs allows + each of those inputs to receive the same events from the Event Hub. If not specified, the input + uses the Event Hub’s default consumer group. + :type consumer_group_name: str + """ + + _attribute_map = { + 'service_bus_namespace': {'key': 'serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + 'event_hub_name': {'key': 'eventHubName', 'type': 'str'}, + 'consumer_group_name': {'key': 'consumerGroupName', 'type': 'str'}, + } + + def __init__( + self, + *, + service_bus_namespace: Optional[str] = None, + shared_access_policy_name: Optional[str] = None, + shared_access_policy_key: Optional[str] = None, + authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, + event_hub_name: Optional[str] = None, + consumer_group_name: Optional[str] = None, + **kwargs + ): + super(EventHubStreamInputDataSourceProperties, self).__init__(service_bus_namespace=service_bus_namespace, shared_access_policy_name=shared_access_policy_name, shared_access_policy_key=shared_access_policy_key, authentication_mode=authentication_mode, event_hub_name=event_hub_name, **kwargs) + self.consumer_group_name = consumer_group_name + + +class EventHubV2OutputDataSource(OutputDataSource): + """Describes an Event Hub output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :param event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + :type event_hub_name: str + :param partition_key: The key/column that is used to determine to which partition to send event + data. + :type partition_key: str + :param property_columns: + :type property_columns: list[str] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + 'event_hub_name': {'key': 'properties.eventHubName', 'type': 'str'}, + 'partition_key': {'key': 'properties.partitionKey', 'type': 'str'}, + 'property_columns': {'key': 'properties.propertyColumns', 'type': '[str]'}, + } + + def __init__( + self, + *, + service_bus_namespace: Optional[str] = None, + shared_access_policy_name: Optional[str] = None, + shared_access_policy_key: Optional[str] = None, + authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, + event_hub_name: Optional[str] = None, + partition_key: Optional[str] = None, + property_columns: Optional[List[str]] = None, + **kwargs + ): + super(EventHubV2OutputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.EventHub/EventHub' # type: str + self.service_bus_namespace = service_bus_namespace + self.shared_access_policy_name = shared_access_policy_name + self.shared_access_policy_key = shared_access_policy_key + self.authentication_mode = authentication_mode + self.event_hub_name = event_hub_name + self.partition_key = partition_key + self.property_columns = property_columns + + +class EventHubV2StreamInputDataSource(StreamInputDataSource): + """Describes an Event Hub input data source that contains stream data. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of input data source containing stream data. Required + on PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :param event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + :type event_hub_name: str + :param consumer_group_name: The name of an Event Hub Consumer Group that should be used to read + events from the Event Hub. Specifying distinct consumer group names for multiple inputs allows + each of those inputs to receive the same events from the Event Hub. If not specified, the input + uses the Event Hub’s default consumer group. + :type consumer_group_name: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + 'event_hub_name': {'key': 'properties.eventHubName', 'type': 'str'}, + 'consumer_group_name': {'key': 'properties.consumerGroupName', 'type': 'str'}, + } + + def __init__( + self, + *, + service_bus_namespace: Optional[str] = None, + shared_access_policy_name: Optional[str] = None, + shared_access_policy_key: Optional[str] = None, + authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, + event_hub_name: Optional[str] = None, + consumer_group_name: Optional[str] = None, + **kwargs + ): + super(EventHubV2StreamInputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.EventHub/EventHub' # type: str + self.service_bus_namespace = service_bus_namespace + self.shared_access_policy_name = shared_access_policy_name + self.shared_access_policy_key = shared_access_policy_key + self.authentication_mode = authentication_mode + self.event_hub_name = event_hub_name + self.consumer_group_name = consumer_group_name + + +class External(msrest.serialization.Model): + """The storage account where the custom code artifacts are located. + + :param storage_account: The properties that are associated with an Azure Storage account. + :type storage_account: ~stream_analytics_management_client.models.StorageAccount + :param container: + :type container: str + :param path: + :type path: str + """ + + _attribute_map = { + 'storage_account': {'key': 'storageAccount', 'type': 'StorageAccount'}, + 'container': {'key': 'container', 'type': 'str'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__( + self, + *, + storage_account: Optional["StorageAccount"] = None, + container: Optional[str] = None, + path: Optional[str] = None, + **kwargs + ): + super(External, self).__init__(**kwargs) + self.storage_account = storage_account + self.container = container + self.path = path + + +class SubResource(msrest.serialization.Model): + """The base sub-resource model definition. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource Id. + :vartype id: str + :param name: Resource name. + :type name: str + :ivar type: Resource type. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + **kwargs + ): + super(SubResource, self).__init__(**kwargs) + self.id = None + self.name = name + self.type = None + + +class Function(SubResource): + """A function object, containing all information associated with the named function. All functions are contained under a streaming job. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource Id. + :vartype id: str + :param name: Resource name. + :type name: str + :ivar type: Resource type. + :vartype type: str + :param properties: The properties that are associated with a function. + :type properties: ~stream_analytics_management_client.models.FunctionProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'FunctionProperties'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + properties: Optional["FunctionProperties"] = None, + **kwargs + ): + super(Function, self).__init__(name=name, **kwargs) + self.properties = properties + + +class FunctionInput(msrest.serialization.Model): + """Describes one input parameter of a function. + + :param data_type: The (Azure Stream Analytics supported) data type of the function input + parameter. A list of valid Azure Stream Analytics data types are described at + https://msdn.microsoft.com/en-us/library/azure/dn835065.aspx. + :type data_type: str + :param is_configuration_parameter: A flag indicating if the parameter is a configuration + parameter. True if this input parameter is expected to be a constant. Default is false. + :type is_configuration_parameter: bool + """ + + _attribute_map = { + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'is_configuration_parameter': {'key': 'isConfigurationParameter', 'type': 'bool'}, + } + + def __init__( + self, + *, + data_type: Optional[str] = None, + is_configuration_parameter: Optional[bool] = None, + **kwargs + ): + super(FunctionInput, self).__init__(**kwargs) + self.data_type = data_type + self.is_configuration_parameter = is_configuration_parameter + + +class FunctionListResult(msrest.serialization.Model): + """Object containing a list of functions under a streaming job. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: A list of functions under a streaming job. Populated by a 'List' operation. + :vartype value: list[~stream_analytics_management_client.models.Function] + :ivar next_link: The link (url) to the next page of results. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Function]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(FunctionListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class FunctionOutput(msrest.serialization.Model): + """Describes the output of a function. + + :param data_type: The (Azure Stream Analytics supported) data type of the function output. A + list of valid Azure Stream Analytics data types are described at https://msdn.microsoft.com/en- + us/library/azure/dn835065.aspx. + :type data_type: str + """ + + _attribute_map = { + 'data_type': {'key': 'dataType', 'type': 'str'}, + } + + def __init__( + self, + *, + data_type: Optional[str] = None, + **kwargs + ): + super(FunctionOutput, self).__init__(**kwargs) + self.data_type = data_type + + +class Identity(msrest.serialization.Model): + """Describes how identity is verified. + + :param tenant_id: + :type tenant_id: str + :param principal_id: + :type principal_id: str + :param type: + :type type: str + """ + + _attribute_map = { + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + *, + tenant_id: Optional[str] = None, + principal_id: Optional[str] = None, + type: Optional[str] = None, + **kwargs + ): + super(Identity, self).__init__(**kwargs) + self.tenant_id = tenant_id + self.principal_id = principal_id + self.type = type + + +class Input(SubResource): + """An input object, containing all information associated with the named input. All inputs are contained under a streaming job. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource Id. + :vartype id: str + :param name: Resource name. + :type name: str + :ivar type: Resource type. + :vartype type: str + :param properties: The properties that are associated with an input. Required on PUT + (CreateOrReplace) requests. + :type properties: ~stream_analytics_management_client.models.InputProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'InputProperties'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + properties: Optional["InputProperties"] = None, + **kwargs + ): + super(Input, self).__init__(name=name, **kwargs) + self.properties = properties + + +class InputListResult(msrest.serialization.Model): + """Object containing a list of inputs under a streaming job. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: A list of inputs under a streaming job. Populated by a 'List' operation. + :vartype value: list[~stream_analytics_management_client.models.Input] + :ivar next_link: The link (url) to the next page of results. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Input]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(InputListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class InputProperties(msrest.serialization.Model): + """The properties that are associated with an input. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ReferenceInputProperties, StreamInputProperties. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates whether the input is a source of reference data or stream + data. Required on PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param serialization: Describes how data from an input is serialized or how data is serialized + when written to an output. Required on PUT (CreateOrReplace) requests. + :type serialization: ~stream_analytics_management_client.models.Serialization + :ivar diagnostics: Describes conditions applicable to the Input, Output, or the job overall, + that warrant customer attention. + :vartype diagnostics: ~stream_analytics_management_client.models.Diagnostics + :ivar etag: The current entity tag for the input. This is an opaque string. You can use it to + detect whether the resource has changed between requests. You can also use it in the If-Match + or If-None-Match headers for write operations for optimistic concurrency. + :vartype etag: str + :param compression: Describes how input data is compressed. + :type compression: ~stream_analytics_management_client.models.Compression + :param partition_key: partitionKey Describes a key in the input data which is used for + partitioning the input data. + :type partition_key: str + """ + + _validation = { + 'type': {'required': True}, + 'diagnostics': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'serialization': {'key': 'serialization', 'type': 'Serialization'}, + 'diagnostics': {'key': 'diagnostics', 'type': 'Diagnostics'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'compression': {'key': 'compression', 'type': 'Compression'}, + 'partition_key': {'key': 'partitionKey', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Reference': 'ReferenceInputProperties', 'Stream': 'StreamInputProperties'} + } + + def __init__( + self, + *, + serialization: Optional["Serialization"] = None, + compression: Optional["Compression"] = None, + partition_key: Optional[str] = None, + **kwargs + ): + super(InputProperties, self).__init__(**kwargs) + self.type = None # type: Optional[str] + self.serialization = serialization + self.diagnostics = None + self.etag = None + self.compression = compression + self.partition_key = partition_key + + +class IoTHubStreamInputDataSource(StreamInputDataSource): + """Describes an IoT Hub input data source that contains stream data. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of input data source containing stream data. Required + on PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param iot_hub_namespace: The name or the URI of the IoT Hub. Required on PUT (CreateOrReplace) + requests. + :type iot_hub_namespace: str + :param shared_access_policy_name: The shared access policy name for the IoT Hub. This policy + must contain at least the Service connect permission. Required on PUT (CreateOrReplace) + requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_key: str + :param consumer_group_name: The name of an IoT Hub Consumer Group that should be used to read + events from the IoT Hub. If not specified, the input uses the Iot Hub’s default consumer group. + :type consumer_group_name: str + :param endpoint: The IoT Hub endpoint to connect to (ie. messages/events, + messages/operationsMonitoringEvents, etc.). + :type endpoint: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'iot_hub_namespace': {'key': 'properties.iotHubNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, + 'consumer_group_name': {'key': 'properties.consumerGroupName', 'type': 'str'}, + 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, + } + + def __init__( + self, + *, + iot_hub_namespace: Optional[str] = None, + shared_access_policy_name: Optional[str] = None, + shared_access_policy_key: Optional[str] = None, + consumer_group_name: Optional[str] = None, + endpoint: Optional[str] = None, + **kwargs + ): + super(IoTHubStreamInputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.Devices/IotHubs' # type: str + self.iot_hub_namespace = iot_hub_namespace + self.shared_access_policy_name = shared_access_policy_name + self.shared_access_policy_key = shared_access_policy_key + self.consumer_group_name = consumer_group_name + self.endpoint = endpoint + + +class JavaScriptFunctionBinding(FunctionBinding): + """The binding to a JavaScript function. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the function binding type.Constant filled by server. + :type type: str + :param script: The JavaScript code containing a single function definition. For example: + 'function (x, y) { return x + y; }'. + :type script: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'script': {'key': 'properties.script', 'type': 'str'}, + } + + def __init__( + self, + *, + script: Optional[str] = None, + **kwargs + ): + super(JavaScriptFunctionBinding, self).__init__(**kwargs) + self.type = 'Microsoft.StreamAnalytics/JavascriptUdf' # type: str + self.script = script + + +class JavaScriptFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): + """The parameters needed to retrieve the default function definition for a JavaScript function. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param binding_type: Required. Indicates the function binding type.Constant filled by server. + :type binding_type: str + :param script: The JavaScript code containing a single function definition. For example: + 'function (x, y) { return x + y; }'. + :type script: str + :ivar udf_type: The function type. Default value: "Scalar". + :vartype udf_type: str + """ + + _validation = { + 'binding_type': {'required': True}, + 'udf_type': {'constant': True}, + } + + _attribute_map = { + 'binding_type': {'key': 'bindingType', 'type': 'str'}, + 'script': {'key': 'bindingRetrievalProperties.script', 'type': 'str'}, + 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, + } + + udf_type = "Scalar" + + def __init__( + self, + *, + script: Optional[str] = None, + **kwargs + ): + super(JavaScriptFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) + self.binding_type = 'Microsoft.StreamAnalytics/JavascriptUdf' # type: str + self.script = script + + +class StorageAccount(msrest.serialization.Model): + """The properties that are associated with an Azure Storage account. + + :param account_name: The name of the Azure Storage account. Required on PUT (CreateOrReplace) + requests. + :type account_name: str + :param account_key: The account key for the Azure Storage account. Required on PUT + (CreateOrReplace) requests. + :type account_key: str + """ + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'account_key': {'key': 'accountKey', 'type': 'str'}, + } + + def __init__( + self, + *, + account_name: Optional[str] = None, + account_key: Optional[str] = None, + **kwargs + ): + super(StorageAccount, self).__init__(**kwargs) + self.account_name = account_name + self.account_key = account_key + + +class JobStorageAccount(StorageAccount): + """The properties that are associated with an Azure Storage account with MSI. + + :param account_name: The name of the Azure Storage account. Required on PUT (CreateOrReplace) + requests. + :type account_name: str + :param account_key: The account key for the Azure Storage account. Required on PUT + (CreateOrReplace) requests. + :type account_key: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + """ + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'account_key': {'key': 'accountKey', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + } + + def __init__( + self, + *, + account_name: Optional[str] = None, + account_key: Optional[str] = None, + authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, + **kwargs + ): + super(JobStorageAccount, self).__init__(account_name=account_name, account_key=account_key, **kwargs) + self.authentication_mode = authentication_mode + + +class JsonSerialization(Serialization): + """Describes how data from an input is serialized or how data is serialized when written to an output in JSON format. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of serialization that the input or output uses. + Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: + "Csv", "Avro", "Json", "CustomClr", "Parquet". + :type type: str or ~stream_analytics_management_client.models.EventSerializationType + :param encoding: Specifies the encoding of the incoming data in the case of input and the + encoding of outgoing data in the case of output. Required on PUT (CreateOrReplace) requests. + Possible values include: "UTF8". + :type encoding: str or ~stream_analytics_management_client.models.Encoding + :param format: This property only applies to JSON serialization of outputs only. It is not + applicable to inputs. This property specifies the format of the JSON the output will be written + in. The currently supported values are 'lineSeparated' indicating the output will be formatted + by having each JSON object separated by a new line and 'array' indicating the output will be + formatted as an array of JSON objects. Default value is 'lineSeparated' if left null. Possible + values include: "LineSeparated", "Array". + :type format: str or ~stream_analytics_management_client.models.JsonOutputSerializationFormat + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'encoding': {'key': 'properties.encoding', 'type': 'str'}, + 'format': {'key': 'properties.format', 'type': 'str'}, + } + + def __init__( + self, + *, + encoding: Optional[Union[str, "Encoding"]] = None, + format: Optional[Union[str, "JsonOutputSerializationFormat"]] = None, + **kwargs + ): + super(JsonSerialization, self).__init__(**kwargs) + self.type = 'Json' # type: str + self.encoding = encoding + self.format = format + + +class Operation(msrest.serialization.Model): + """A Stream Analytics REST API operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: The name of the operation being performed on this particular object. + :vartype name: str + :ivar display: Contains the localized display information for this particular operation / + action. + :vartype display: ~stream_analytics_management_client.models.OperationDisplay + """ + + _validation = { + 'name': {'readonly': True}, + 'display': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display': {'key': 'display', 'type': 'OperationDisplay'}, + } + + def __init__( + self, + **kwargs + ): + super(Operation, self).__init__(**kwargs) + self.name = None + self.display = None + + +class OperationDisplay(msrest.serialization.Model): + """Contains the localized display information for this particular operation / action. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar provider: The localized friendly form of the resource provider name. + :vartype provider: str + :ivar resource: The localized friendly form of the resource type related to this + action/operation. + :vartype resource: str + :ivar operation: The localized friendly name for the operation. + :vartype operation: str + :ivar description: The localized friendly description for the operation. + :vartype description: str + """ + + _validation = { + 'provider': {'readonly': True}, + 'resource': {'readonly': True}, + 'operation': {'readonly': True}, + 'description': {'readonly': True}, + } + + _attribute_map = { + 'provider': {'key': 'provider', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'str'}, + 'operation': {'key': 'operation', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(OperationDisplay, self).__init__(**kwargs) + self.provider = None + self.resource = None + self.operation = None + self.description = None + + +class OperationListResult(msrest.serialization.Model): + """Result of the request to list Stream Analytics operations. It contains a list of operations and a URL link to get the next set of results. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of Stream Analytics operations supported by the Microsoft.StreamAnalytics + resource provider. + :vartype value: list[~stream_analytics_management_client.models.Operation] + :ivar next_link: URL to get the next set of operation list results if there are any. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Operation]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(OperationListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class Output(SubResource): + """An output object, containing all information associated with the named output. All outputs are contained under a streaming job. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource Id. + :vartype id: str + :param name: Resource name. + :type name: str + :ivar type: Resource type. + :vartype type: str + :param datasource: Describes the data source that output will be written to. Required on PUT + (CreateOrReplace) requests. + :type datasource: ~stream_analytics_management_client.models.OutputDataSource + :param time_window: + :type time_window: str + :param size_window: + :type size_window: float + :param serialization: Describes how data from an input is serialized or how data is serialized + when written to an output. Required on PUT (CreateOrReplace) requests. + :type serialization: ~stream_analytics_management_client.models.Serialization + :ivar diagnostics: Describes conditions applicable to the Input, Output, or the job overall, + that warrant customer attention. + :vartype diagnostics: ~stream_analytics_management_client.models.Diagnostics + :ivar etag: The current entity tag for the output. This is an opaque string. You can use it to + detect whether the resource has changed between requests. You can also use it in the If-Match + or If-None-Match headers for write operations for optimistic concurrency. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'diagnostics': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'datasource': {'key': 'properties.datasource', 'type': 'OutputDataSource'}, + 'time_window': {'key': 'properties.timeWindow', 'type': 'str'}, + 'size_window': {'key': 'properties.sizeWindow', 'type': 'float'}, + 'serialization': {'key': 'properties.serialization', 'type': 'Serialization'}, + 'diagnostics': {'key': 'properties.diagnostics', 'type': 'Diagnostics'}, + 'etag': {'key': 'properties.etag', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + datasource: Optional["OutputDataSource"] = None, + time_window: Optional[str] = None, + size_window: Optional[float] = None, + serialization: Optional["Serialization"] = None, + **kwargs + ): + super(Output, self).__init__(name=name, **kwargs) + self.datasource = datasource + self.time_window = time_window + self.size_window = size_window + self.serialization = serialization + self.diagnostics = None + self.etag = None + + +class OutputListResult(msrest.serialization.Model): + """Object containing a list of outputs under a streaming job. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: A list of outputs under a streaming job. Populated by a 'List' operation. + :vartype value: list[~stream_analytics_management_client.models.Output] + :ivar next_link: The link (url) to the next page of results. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Output]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(OutputListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class ParquetSerialization(Serialization): + """Describes how data from an input is serialized or how data is serialized when written to an output in Parquet format. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of serialization that the input or output uses. + Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: + "Csv", "Avro", "Json", "CustomClr", "Parquet". + :type type: str or ~stream_analytics_management_client.models.EventSerializationType + :param properties: The properties that are associated with the Parquet serialization type. + Required on PUT (CreateOrReplace) requests. + :type properties: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'object'}, + } + + def __init__( + self, + *, + properties: Optional[object] = None, + **kwargs + ): + super(ParquetSerialization, self).__init__(**kwargs) + self.type = 'Parquet' # type: str + self.properties = properties + + +class PowerBIOutputDataSource(OutputDataSource): + """Describes a Power BI output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param refresh_token: A refresh token that can be used to obtain a valid access token that can + then be used to authenticate with the data source. A valid refresh token is currently only + obtainable via the Azure Portal. It is recommended to put a dummy string value here when + creating the data source and then going to the Azure Portal to authenticate the data source + which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) + requests. + :type refresh_token: str + :param token_user_principal_name: The user principal name (UPN) of the user that was used to + obtain the refresh token. Use this property to help remember which user was used to obtain the + refresh token. + :type token_user_principal_name: str + :param token_user_display_name: The user display name of the user that was used to obtain the + refresh token. Use this property to help remember which user was used to obtain the refresh + token. + :type token_user_display_name: str + :param dataset: The name of the Power BI dataset. Required on PUT (CreateOrReplace) requests. + :type dataset: str + :param table: The name of the Power BI table under the specified dataset. Required on PUT + (CreateOrReplace) requests. + :type table: str + :param group_id: The ID of the Power BI group. + :type group_id: str + :param group_name: The name of the Power BI group. Use this property to help remember which + specific Power BI group id was used. + :type group_name: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'refresh_token': {'key': 'properties.refreshToken', 'type': 'str'}, + 'token_user_principal_name': {'key': 'properties.tokenUserPrincipalName', 'type': 'str'}, + 'token_user_display_name': {'key': 'properties.tokenUserDisplayName', 'type': 'str'}, + 'dataset': {'key': 'properties.dataset', 'type': 'str'}, + 'table': {'key': 'properties.table', 'type': 'str'}, + 'group_id': {'key': 'properties.groupId', 'type': 'str'}, + 'group_name': {'key': 'properties.groupName', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + } + + def __init__( + self, + *, + refresh_token: Optional[str] = None, + token_user_principal_name: Optional[str] = None, + token_user_display_name: Optional[str] = None, + dataset: Optional[str] = None, + table: Optional[str] = None, + group_id: Optional[str] = None, + group_name: Optional[str] = None, + authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, + **kwargs + ): + super(PowerBIOutputDataSource, self).__init__(**kwargs) + self.type = 'PowerBI' # type: str + self.refresh_token = refresh_token + self.token_user_principal_name = token_user_principal_name + self.token_user_display_name = token_user_display_name + self.dataset = dataset + self.table = table + self.group_id = group_id + self.group_name = group_name + self.authentication_mode = authentication_mode + + +class PowerBIOutputDataSourceProperties(OAuthBasedDataSourceProperties): + """The properties that are associated with a Power BI output. + + :param refresh_token: A refresh token that can be used to obtain a valid access token that can + then be used to authenticate with the data source. A valid refresh token is currently only + obtainable via the Azure Portal. It is recommended to put a dummy string value here when + creating the data source and then going to the Azure Portal to authenticate the data source + which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) + requests. + :type refresh_token: str + :param token_user_principal_name: The user principal name (UPN) of the user that was used to + obtain the refresh token. Use this property to help remember which user was used to obtain the + refresh token. + :type token_user_principal_name: str + :param token_user_display_name: The user display name of the user that was used to obtain the + refresh token. Use this property to help remember which user was used to obtain the refresh + token. + :type token_user_display_name: str + :param dataset: The name of the Power BI dataset. Required on PUT (CreateOrReplace) requests. + :type dataset: str + :param table: The name of the Power BI table under the specified dataset. Required on PUT + (CreateOrReplace) requests. + :type table: str + :param group_id: The ID of the Power BI group. + :type group_id: str + :param group_name: The name of the Power BI group. Use this property to help remember which + specific Power BI group id was used. + :type group_name: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + """ + + _attribute_map = { + 'refresh_token': {'key': 'refreshToken', 'type': 'str'}, + 'token_user_principal_name': {'key': 'tokenUserPrincipalName', 'type': 'str'}, + 'token_user_display_name': {'key': 'tokenUserDisplayName', 'type': 'str'}, + 'dataset': {'key': 'dataset', 'type': 'str'}, + 'table': {'key': 'table', 'type': 'str'}, + 'group_id': {'key': 'groupId', 'type': 'str'}, + 'group_name': {'key': 'groupName', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + } + + def __init__( + self, + *, + refresh_token: Optional[str] = None, + token_user_principal_name: Optional[str] = None, + token_user_display_name: Optional[str] = None, + dataset: Optional[str] = None, + table: Optional[str] = None, + group_id: Optional[str] = None, + group_name: Optional[str] = None, + authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, + **kwargs + ): + super(PowerBIOutputDataSourceProperties, self).__init__(refresh_token=refresh_token, token_user_principal_name=token_user_principal_name, token_user_display_name=token_user_display_name, **kwargs) + self.dataset = dataset + self.table = table + self.group_id = group_id + self.group_name = group_name + self.authentication_mode = authentication_mode + + +class PrivateEndpoint(Resource): + """Complete information about the private endpoint. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param properties: The properties associated with a private endpoint. + :type properties: ~stream_analytics_management_client.models.PrivateEndpointProperties + :ivar etag: Unique opaque string (generally a GUID) that represents the metadata state of the + resource (private endpoint) and changes whenever the resource is updated. Required on PUT + (CreateOrUpdate) requests. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'PrivateEndpointProperties'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__( + self, + *, + properties: Optional["PrivateEndpointProperties"] = None, + **kwargs + ): + super(PrivateEndpoint, self).__init__(**kwargs) + self.properties = properties + self.etag = None + + +class PrivateEndpointListResult(msrest.serialization.Model): + """A list of private endpoints. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: A list of private endpoints. + :vartype value: list[~stream_analytics_management_client.models.PrivateEndpoint] + :ivar next_link: The URL to fetch the next set of private endpoints. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PrivateEndpoint]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateEndpointListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class PrivateEndpointProperties(msrest.serialization.Model): + """The properties associated with a private endpoint. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar created_date: The date when this private endpoint was created. + :vartype created_date: str + :param manual_private_link_service_connections: A list of connections to the remote resource. + Immutable after it is set. + :type manual_private_link_service_connections: + list[~stream_analytics_management_client.models.PrivateLinkServiceConnection] + """ + + _validation = { + 'created_date': {'readonly': True}, + } + + _attribute_map = { + 'created_date': {'key': 'createdDate', 'type': 'str'}, + 'manual_private_link_service_connections': {'key': 'manualPrivateLinkServiceConnections', 'type': '[PrivateLinkServiceConnection]'}, + } + + def __init__( + self, + *, + manual_private_link_service_connections: Optional[List["PrivateLinkServiceConnection"]] = None, + **kwargs + ): + super(PrivateEndpointProperties, self).__init__(**kwargs) + self.created_date = None + self.manual_private_link_service_connections = manual_private_link_service_connections + + +class PrivateLinkConnectionState(msrest.serialization.Model): + """A collection of read-only information about the state of the connection to the private remote resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar status: Indicates whether the connection has been Approved/Rejected/Removed by the owner + of the remote resource/service. + :vartype status: str + :ivar description: The reason for approval/rejection of the connection. + :vartype description: str + :ivar actions_required: A message indicating if changes on the service provider require any + updates on the consumer. + :vartype actions_required: str + """ + + _validation = { + 'status': {'readonly': True}, + 'description': {'readonly': True}, + 'actions_required': {'readonly': True}, + } + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateLinkConnectionState, self).__init__(**kwargs) + self.status = None + self.description = None + self.actions_required = None + + +class PrivateLinkServiceConnection(msrest.serialization.Model): + """A grouping of information about the connection to the remote resource. + + :param private_link_service_id: The resource id of the private link service. Required on PUT + (CreateOrUpdate) requests. + :type private_link_service_id: str + :param group_ids: The ID(s) of the group(s) obtained from the remote resource that this private + endpoint should connect to. Required on PUT (CreateOrUpdate) requests. + :type group_ids: list[str] + :param request_message: A message passed to the owner of the remote resource with this + connection request. Restricted to 140 chars. + :type request_message: str + :param private_link_service_connection_state: A collection of read-only information about the + state of the connection to the private remote resource. + :type private_link_service_connection_state: + ~stream_analytics_management_client.models.PrivateLinkConnectionState + """ + + _attribute_map = { + 'private_link_service_id': {'key': 'properties.privateLinkServiceId', 'type': 'str'}, + 'group_ids': {'key': 'properties.groupIds', 'type': '[str]'}, + 'request_message': {'key': 'properties.requestMessage', 'type': 'str'}, + 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkConnectionState'}, + } + + def __init__( + self, + *, + private_link_service_id: Optional[str] = None, + group_ids: Optional[List[str]] = None, + request_message: Optional[str] = None, + private_link_service_connection_state: Optional["PrivateLinkConnectionState"] = None, + **kwargs + ): + super(PrivateLinkServiceConnection, self).__init__(**kwargs) + self.private_link_service_id = private_link_service_id + self.group_ids = group_ids + self.request_message = request_message + self.private_link_service_connection_state = private_link_service_connection_state + + +class ProxyResource(Resource): + """The resource model definition for a ARM proxy resource. It will have everything other than required location and tags. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ProxyResource, self).__init__(**kwargs) + + +class ReferenceInputProperties(InputProperties): + """The properties that are associated with an input containing reference data. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates whether the input is a source of reference data or stream + data. Required on PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param serialization: Describes how data from an input is serialized or how data is serialized + when written to an output. Required on PUT (CreateOrReplace) requests. + :type serialization: ~stream_analytics_management_client.models.Serialization + :ivar diagnostics: Describes conditions applicable to the Input, Output, or the job overall, + that warrant customer attention. + :vartype diagnostics: ~stream_analytics_management_client.models.Diagnostics + :ivar etag: The current entity tag for the input. This is an opaque string. You can use it to + detect whether the resource has changed between requests. You can also use it in the If-Match + or If-None-Match headers for write operations for optimistic concurrency. + :vartype etag: str + :param compression: Describes how input data is compressed. + :type compression: ~stream_analytics_management_client.models.Compression + :param partition_key: partitionKey Describes a key in the input data which is used for + partitioning the input data. + :type partition_key: str + :param datasource: Describes an input data source that contains reference data. Required on PUT + (CreateOrReplace) requests. + :type datasource: ~stream_analytics_management_client.models.ReferenceInputDataSource + """ + + _validation = { + 'type': {'required': True}, + 'diagnostics': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'serialization': {'key': 'serialization', 'type': 'Serialization'}, + 'diagnostics': {'key': 'diagnostics', 'type': 'Diagnostics'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'compression': {'key': 'compression', 'type': 'Compression'}, + 'partition_key': {'key': 'partitionKey', 'type': 'str'}, + 'datasource': {'key': 'datasource', 'type': 'ReferenceInputDataSource'}, + } + + def __init__( + self, + *, + serialization: Optional["Serialization"] = None, + compression: Optional["Compression"] = None, + partition_key: Optional[str] = None, + datasource: Optional["ReferenceInputDataSource"] = None, + **kwargs + ): + super(ReferenceInputProperties, self).__init__(serialization=serialization, compression=compression, partition_key=partition_key, **kwargs) + self.type = 'Reference' # type: str + self.datasource = datasource + + +class ResourceTestStatus(msrest.serialization.Model): + """Describes the status of the test operation along with error information, if applicable. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar status: The status of the test operation. + :vartype status: str + :ivar error: Describes the error that occurred. + :vartype error: ~stream_analytics_management_client.models.ErrorResponse + """ + + _validation = { + 'status': {'readonly': True}, + 'error': {'readonly': True}, + } + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'ErrorResponse'}, + } + + def __init__( + self, + **kwargs + ): + super(ResourceTestStatus, self).__init__(**kwargs) + self.status = None + self.error = None + + +class ScalarFunctionProperties(FunctionProperties): + """The properties that are associated with a scalar function. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of function.Constant filled by server. + :type type: str + :ivar etag: The current entity tag for the function. This is an opaque string. You can use it + to detect whether the resource has changed between requests. You can also use it in the If- + Match or If-None-Match headers for write operations for optimistic concurrency. + :vartype etag: str + :param inputs: + :type inputs: list[~stream_analytics_management_client.models.FunctionInput] + :param output: Describes the output of a function. + :type output: ~stream_analytics_management_client.models.FunctionOutput + :param binding: The physical binding of the function. For example, in the Azure Machine + Learning web service’s case, this describes the endpoint. + :type binding: ~stream_analytics_management_client.models.FunctionBinding + """ + + _validation = { + 'type': {'required': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'inputs': {'key': 'properties.inputs', 'type': '[FunctionInput]'}, + 'output': {'key': 'properties.output', 'type': 'FunctionOutput'}, + 'binding': {'key': 'properties.binding', 'type': 'FunctionBinding'}, + } + + def __init__( + self, + *, + inputs: Optional[List["FunctionInput"]] = None, + output: Optional["FunctionOutput"] = None, + binding: Optional["FunctionBinding"] = None, + **kwargs + ): + super(ScalarFunctionProperties, self).__init__(inputs=inputs, output=output, binding=binding, **kwargs) + self.type = 'Scalar' # type: str + + +class ServiceBusQueueOutputDataSource(OutputDataSource): + """Describes a Service Bus Queue output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :param queue_name: The name of the Service Bus Queue. Required on PUT (CreateOrReplace) + requests. + :type queue_name: str + :param property_columns: A string array of the names of output columns to be attached to + Service Bus messages as custom properties. + :type property_columns: list[str] + :param system_property_columns: Dictionary of :code:``. + :type system_property_columns: dict[str, str] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + 'queue_name': {'key': 'properties.queueName', 'type': 'str'}, + 'property_columns': {'key': 'properties.propertyColumns', 'type': '[str]'}, + 'system_property_columns': {'key': 'properties.systemPropertyColumns', 'type': '{str}'}, + } + + def __init__( + self, + *, + service_bus_namespace: Optional[str] = None, + shared_access_policy_name: Optional[str] = None, + shared_access_policy_key: Optional[str] = None, + authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, + queue_name: Optional[str] = None, + property_columns: Optional[List[str]] = None, + system_property_columns: Optional[Dict[str, str]] = None, + **kwargs + ): + super(ServiceBusQueueOutputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.ServiceBus/Queue' # type: str + self.service_bus_namespace = service_bus_namespace + self.shared_access_policy_name = shared_access_policy_name + self.shared_access_policy_key = shared_access_policy_key + self.authentication_mode = authentication_mode + self.queue_name = queue_name + self.property_columns = property_columns + self.system_property_columns = system_property_columns + + +class ServiceBusQueueOutputDataSourceProperties(ServiceBusDataSourceProperties): + """The properties that are associated with a Service Bus Queue output. + + :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :param queue_name: The name of the Service Bus Queue. Required on PUT (CreateOrReplace) + requests. + :type queue_name: str + :param property_columns: A string array of the names of output columns to be attached to + Service Bus messages as custom properties. + :type property_columns: list[str] + :param system_property_columns: Dictionary of :code:``. + :type system_property_columns: dict[str, str] + """ + + _attribute_map = { + 'service_bus_namespace': {'key': 'serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + 'queue_name': {'key': 'queueName', 'type': 'str'}, + 'property_columns': {'key': 'propertyColumns', 'type': '[str]'}, + 'system_property_columns': {'key': 'systemPropertyColumns', 'type': '{str}'}, + } + + def __init__( + self, + *, + service_bus_namespace: Optional[str] = None, + shared_access_policy_name: Optional[str] = None, + shared_access_policy_key: Optional[str] = None, + authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, + queue_name: Optional[str] = None, + property_columns: Optional[List[str]] = None, + system_property_columns: Optional[Dict[str, str]] = None, + **kwargs + ): + super(ServiceBusQueueOutputDataSourceProperties, self).__init__(service_bus_namespace=service_bus_namespace, shared_access_policy_name=shared_access_policy_name, shared_access_policy_key=shared_access_policy_key, authentication_mode=authentication_mode, **kwargs) + self.queue_name = queue_name + self.property_columns = property_columns + self.system_property_columns = system_property_columns + + +class ServiceBusTopicOutputDataSource(OutputDataSource): + """Describes a Service Bus Topic output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :param topic_name: The name of the Service Bus Topic. Required on PUT (CreateOrReplace) + requests. + :type topic_name: str + :param property_columns: A string array of the names of output columns to be attached to + Service Bus messages as custom properties. + :type property_columns: list[str] + :param system_property_columns: Dictionary of :code:``. + :type system_property_columns: dict[str, str] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + 'topic_name': {'key': 'properties.topicName', 'type': 'str'}, + 'property_columns': {'key': 'properties.propertyColumns', 'type': '[str]'}, + 'system_property_columns': {'key': 'properties.systemPropertyColumns', 'type': '{str}'}, + } + + def __init__( + self, + *, + service_bus_namespace: Optional[str] = None, + shared_access_policy_name: Optional[str] = None, + shared_access_policy_key: Optional[str] = None, + authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, + topic_name: Optional[str] = None, + property_columns: Optional[List[str]] = None, + system_property_columns: Optional[Dict[str, str]] = None, + **kwargs + ): + super(ServiceBusTopicOutputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.ServiceBus/Topic' # type: str + self.service_bus_namespace = service_bus_namespace + self.shared_access_policy_name = shared_access_policy_name + self.shared_access_policy_key = shared_access_policy_key + self.authentication_mode = authentication_mode + self.topic_name = topic_name + self.property_columns = property_columns + self.system_property_columns = system_property_columns + + +class ServiceBusTopicOutputDataSourceProperties(ServiceBusDataSourceProperties): + """The properties that are associated with a Service Bus Topic output. + + :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :param topic_name: The name of the Service Bus Topic. Required on PUT (CreateOrReplace) + requests. + :type topic_name: str + :param property_columns: A string array of the names of output columns to be attached to + Service Bus messages as custom properties. + :type property_columns: list[str] + :param system_property_columns: Dictionary of :code:``. + :type system_property_columns: dict[str, str] + """ + + _attribute_map = { + 'service_bus_namespace': {'key': 'serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + 'topic_name': {'key': 'topicName', 'type': 'str'}, + 'property_columns': {'key': 'propertyColumns', 'type': '[str]'}, + 'system_property_columns': {'key': 'systemPropertyColumns', 'type': '{str}'}, + } + + def __init__( + self, + *, + service_bus_namespace: Optional[str] = None, + shared_access_policy_name: Optional[str] = None, + shared_access_policy_key: Optional[str] = None, + authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, + topic_name: Optional[str] = None, + property_columns: Optional[List[str]] = None, + system_property_columns: Optional[Dict[str, str]] = None, + **kwargs + ): + super(ServiceBusTopicOutputDataSourceProperties, self).__init__(service_bus_namespace=service_bus_namespace, shared_access_policy_name=shared_access_policy_name, shared_access_policy_key=shared_access_policy_key, authentication_mode=authentication_mode, **kwargs) + self.topic_name = topic_name + self.property_columns = property_columns + self.system_property_columns = system_property_columns + + +class StartStreamingJobParameters(msrest.serialization.Model): + """Parameters supplied to the Start Streaming Job operation. + + :param output_start_mode: Value may be JobStartTime, CustomTime, or LastOutputEventTime to + indicate whether the starting point of the output event stream should start whenever the job is + started, start at a custom user time stamp specified via the outputStartTime property, or start + from the last event output time. Possible values include: "JobStartTime", "CustomTime", + "LastOutputEventTime". + :type output_start_mode: str or ~stream_analytics_management_client.models.OutputStartMode + :param output_start_time: Value is either an ISO-8601 formatted time stamp that indicates the + starting point of the output event stream, or null to indicate that the output event stream + will start whenever the streaming job is started. This property must have a value if + outputStartMode is set to CustomTime. + :type output_start_time: ~datetime.datetime + """ + + _attribute_map = { + 'output_start_mode': {'key': 'outputStartMode', 'type': 'str'}, + 'output_start_time': {'key': 'outputStartTime', 'type': 'iso-8601'}, + } + + def __init__( + self, + *, + output_start_mode: Optional[Union[str, "OutputStartMode"]] = None, + output_start_time: Optional[datetime.datetime] = None, + **kwargs + ): + super(StartStreamingJobParameters, self).__init__(**kwargs) + self.output_start_mode = output_start_mode + self.output_start_time = output_start_time + + +class StreamingJob(TrackedResource): + """A streaming job object, containing all information associated with the named streaming job. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: The geo-location where the resource lives. + :type location: str + :param identity: Describes the system-assigned managed identity assigned to this job that can + be used to authenticate with inputs and outputs. + :type identity: ~stream_analytics_management_client.models.Identity + :param sku: Describes the SKU of the streaming job. Required on PUT (CreateOrReplace) requests. + :type sku: ~stream_analytics_management_client.models.StreamingJobSku + :ivar job_id: A GUID uniquely identifying the streaming job. This GUID is generated upon + creation of the streaming job. + :vartype job_id: str + :ivar provisioning_state: Describes the provisioning status of the streaming job. + :vartype provisioning_state: str + :ivar job_state: Describes the state of the streaming job. + :vartype job_state: str + :param job_type: Describes the type of the job. Valid modes are ``Cloud`` and 'Edge'. Possible + values include: "Cloud", "Edge". + :type job_type: str or ~stream_analytics_management_client.models.JobType + :param output_start_mode: This property should only be utilized when it is desired that the job + be started immediately upon creation. Value may be JobStartTime, CustomTime, or + LastOutputEventTime to indicate whether the starting point of the output event stream should + start whenever the job is started, start at a custom user time stamp specified via the + outputStartTime property, or start from the last event output time. Possible values include: + "JobStartTime", "CustomTime", "LastOutputEventTime". + :type output_start_mode: str or ~stream_analytics_management_client.models.OutputStartMode + :param output_start_time: Value is either an ISO-8601 formatted time stamp that indicates the + starting point of the output event stream, or null to indicate that the output event stream + will start whenever the streaming job is started. This property must have a value if + outputStartMode is set to CustomTime. + :type output_start_time: ~datetime.datetime + :ivar last_output_event_time: Value is either an ISO-8601 formatted timestamp indicating the + last output event time of the streaming job or null indicating that output has not yet been + produced. In case of multiple outputs or multiple streams, this shows the latest value in that + set. + :vartype last_output_event_time: ~datetime.datetime + :param events_out_of_order_policy: Indicates the policy to apply to events that arrive out of + order in the input event stream. Possible values include: "Adjust", "Drop". + :type events_out_of_order_policy: str or + ~stream_analytics_management_client.models.EventsOutOfOrderPolicy + :param output_error_policy: Indicates the policy to apply to events that arrive at the output + and cannot be written to the external storage due to being malformed (missing column values, + column values of wrong type or size). Possible values include: "Stop", "Drop". + :type output_error_policy: str or ~stream_analytics_management_client.models.OutputErrorPolicy + :param events_out_of_order_max_delay_in_seconds: The maximum tolerable delay in seconds where + out-of-order events can be adjusted to be back in order. + :type events_out_of_order_max_delay_in_seconds: int + :param events_late_arrival_max_delay_in_seconds: The maximum tolerable delay in seconds where + events arriving late could be included. Supported range is -1 to 1814399 (20.23:59:59 days) + and -1 is used to specify wait indefinitely. If the property is absent, it is interpreted to + have a value of -1. + :type events_late_arrival_max_delay_in_seconds: int + :param data_locale: The data locale of the stream analytics job. Value should be the name of a + supported .NET Culture from the set https://msdn.microsoft.com/en- + us/library/system.globalization.culturetypes(v=vs.110).aspx. Defaults to 'en-US' if none + specified. + :type data_locale: str + :param compatibility_level: Controls certain runtime behaviors of the streaming job. Possible + values include: "1.0". + :type compatibility_level: str or ~stream_analytics_management_client.models.CompatibilityLevel + :ivar created_date: Value is an ISO-8601 formatted UTC timestamp indicating when the streaming + job was created. + :vartype created_date: ~datetime.datetime + :param inputs: A list of one or more inputs to the streaming job. The name property for each + input is required when specifying this property in a PUT request. This property cannot be + modify via a PATCH operation. You must use the PATCH API available for the individual input. + :type inputs: list[~stream_analytics_management_client.models.Input] + :param transformation: Indicates the query and the number of streaming units to use for the + streaming job. The name property of the transformation is required when specifying this + property in a PUT request. This property cannot be modify via a PATCH operation. You must use + the PATCH API available for the individual transformation. + :type transformation: ~stream_analytics_management_client.models.Transformation + :param outputs: A list of one or more outputs for the streaming job. The name property for each + output is required when specifying this property in a PUT request. This property cannot be + modify via a PATCH operation. You must use the PATCH API available for the individual output. + :type outputs: list[~stream_analytics_management_client.models.Output] + :param functions: A list of one or more functions for the streaming job. The name property for + each function is required when specifying this property in a PUT request. This property cannot + be modify via a PATCH operation. You must use the PATCH API available for the individual + transformation. + :type functions: list[~stream_analytics_management_client.models.Function] + :ivar etag: The current entity tag for the streaming job. This is an opaque string. You can use + it to detect whether the resource has changed between requests. You can also use it in the If- + Match or If-None-Match headers for write operations for optimistic concurrency. + :vartype etag: str + :param job_storage_account: The properties that are associated with an Azure Storage account + with MSI. + :type job_storage_account: ~stream_analytics_management_client.models.JobStorageAccount + :ivar content_storage_policy: Valid values are JobStorageAccount and SystemAccount. If set to + JobStorageAccount, this requires the user to also specify jobStorageAccount property. . + Possible values include: "SystemAccount", "JobStorageAccount". + :vartype content_storage_policy: str or + ~stream_analytics_management_client.models.ContentStoragePolicy + :param externals: The storage account where the custom code artifacts are located. + :type externals: ~stream_analytics_management_client.models.External + :param cluster: The cluster which streaming jobs will run on. + :type cluster: ~stream_analytics_management_client.models.ClusterInfo + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'job_id': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'job_state': {'readonly': True}, + 'last_output_event_time': {'readonly': True}, + 'created_date': {'readonly': True}, + 'etag': {'readonly': True}, + 'content_storage_policy': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'Identity'}, + 'sku': {'key': 'properties.sku', 'type': 'StreamingJobSku'}, + 'job_id': {'key': 'properties.jobId', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'job_state': {'key': 'properties.jobState', 'type': 'str'}, + 'job_type': {'key': 'properties.jobType', 'type': 'str'}, + 'output_start_mode': {'key': 'properties.outputStartMode', 'type': 'str'}, + 'output_start_time': {'key': 'properties.outputStartTime', 'type': 'iso-8601'}, + 'last_output_event_time': {'key': 'properties.lastOutputEventTime', 'type': 'iso-8601'}, + 'events_out_of_order_policy': {'key': 'properties.eventsOutOfOrderPolicy', 'type': 'str'}, + 'output_error_policy': {'key': 'properties.outputErrorPolicy', 'type': 'str'}, + 'events_out_of_order_max_delay_in_seconds': {'key': 'properties.eventsOutOfOrderMaxDelayInSeconds', 'type': 'int'}, + 'events_late_arrival_max_delay_in_seconds': {'key': 'properties.eventsLateArrivalMaxDelayInSeconds', 'type': 'int'}, + 'data_locale': {'key': 'properties.dataLocale', 'type': 'str'}, + 'compatibility_level': {'key': 'properties.compatibilityLevel', 'type': 'str'}, + 'created_date': {'key': 'properties.createdDate', 'type': 'iso-8601'}, + 'inputs': {'key': 'properties.inputs', 'type': '[Input]'}, + 'transformation': {'key': 'properties.transformation', 'type': 'Transformation'}, + 'outputs': {'key': 'properties.outputs', 'type': '[Output]'}, + 'functions': {'key': 'properties.functions', 'type': '[Function]'}, + 'etag': {'key': 'properties.etag', 'type': 'str'}, + 'job_storage_account': {'key': 'properties.jobStorageAccount', 'type': 'JobStorageAccount'}, + 'content_storage_policy': {'key': 'properties.contentStoragePolicy', 'type': 'str'}, + 'externals': {'key': 'properties.externals', 'type': 'External'}, + 'cluster': {'key': 'properties.cluster', 'type': 'ClusterInfo'}, + } + + def __init__( + self, + *, + tags: Optional[Dict[str, str]] = None, + location: Optional[str] = None, + identity: Optional["Identity"] = None, + sku: Optional["StreamingJobSku"] = None, + job_type: Optional[Union[str, "JobType"]] = None, + output_start_mode: Optional[Union[str, "OutputStartMode"]] = None, + output_start_time: Optional[datetime.datetime] = None, + events_out_of_order_policy: Optional[Union[str, "EventsOutOfOrderPolicy"]] = None, + output_error_policy: Optional[Union[str, "OutputErrorPolicy"]] = None, + events_out_of_order_max_delay_in_seconds: Optional[int] = None, + events_late_arrival_max_delay_in_seconds: Optional[int] = None, + data_locale: Optional[str] = None, + compatibility_level: Optional[Union[str, "CompatibilityLevel"]] = None, + inputs: Optional[List["Input"]] = None, + transformation: Optional["Transformation"] = None, + outputs: Optional[List["Output"]] = None, + functions: Optional[List["Function"]] = None, + job_storage_account: Optional["JobStorageAccount"] = None, + externals: Optional["External"] = None, + cluster: Optional["ClusterInfo"] = None, + **kwargs + ): + super(StreamingJob, self).__init__(tags=tags, location=location, **kwargs) + self.identity = identity + self.sku = sku + self.job_id = None + self.provisioning_state = None + self.job_state = None + self.job_type = job_type + self.output_start_mode = output_start_mode + self.output_start_time = output_start_time + self.last_output_event_time = None + self.events_out_of_order_policy = events_out_of_order_policy + self.output_error_policy = output_error_policy + self.events_out_of_order_max_delay_in_seconds = events_out_of_order_max_delay_in_seconds + self.events_late_arrival_max_delay_in_seconds = events_late_arrival_max_delay_in_seconds + self.data_locale = data_locale + self.compatibility_level = compatibility_level + self.created_date = None + self.inputs = inputs + self.transformation = transformation + self.outputs = outputs + self.functions = functions + self.etag = None + self.job_storage_account = job_storage_account + self.content_storage_policy = None + self.externals = externals + self.cluster = cluster + + +class StreamingJobListResult(msrest.serialization.Model): + """Object containing a list of streaming jobs. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: A list of streaming jobs. Populated by a 'List' operation. + :vartype value: list[~stream_analytics_management_client.models.StreamingJob] + :ivar next_link: The link (url) to the next page of results. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[StreamingJob]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(StreamingJobListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class StreamingJobSku(msrest.serialization.Model): + """The properties that are associated with a SKU. + + :param name: The name of the SKU. Required on PUT (CreateOrReplace) requests. Possible values + include: "Standard". + :type name: str or ~stream_analytics_management_client.models.StreamingJobSkuName + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[Union[str, "StreamingJobSkuName"]] = None, + **kwargs + ): + super(StreamingJobSku, self).__init__(**kwargs) + self.name = name + + +class StreamInputProperties(InputProperties): + """The properties that are associated with an input containing stream data. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates whether the input is a source of reference data or stream + data. Required on PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param serialization: Describes how data from an input is serialized or how data is serialized + when written to an output. Required on PUT (CreateOrReplace) requests. + :type serialization: ~stream_analytics_management_client.models.Serialization + :ivar diagnostics: Describes conditions applicable to the Input, Output, or the job overall, + that warrant customer attention. + :vartype diagnostics: ~stream_analytics_management_client.models.Diagnostics + :ivar etag: The current entity tag for the input. This is an opaque string. You can use it to + detect whether the resource has changed between requests. You can also use it in the If-Match + or If-None-Match headers for write operations for optimistic concurrency. + :vartype etag: str + :param compression: Describes how input data is compressed. + :type compression: ~stream_analytics_management_client.models.Compression + :param partition_key: partitionKey Describes a key in the input data which is used for + partitioning the input data. + :type partition_key: str + :param datasource: Describes an input data source that contains stream data. Required on PUT + (CreateOrReplace) requests. + :type datasource: ~stream_analytics_management_client.models.StreamInputDataSource + """ + + _validation = { + 'type': {'required': True}, + 'diagnostics': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'serialization': {'key': 'serialization', 'type': 'Serialization'}, + 'diagnostics': {'key': 'diagnostics', 'type': 'Diagnostics'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'compression': {'key': 'compression', 'type': 'Compression'}, + 'partition_key': {'key': 'partitionKey', 'type': 'str'}, + 'datasource': {'key': 'datasource', 'type': 'StreamInputDataSource'}, + } + + def __init__( + self, + *, + serialization: Optional["Serialization"] = None, + compression: Optional["Compression"] = None, + partition_key: Optional[str] = None, + datasource: Optional["StreamInputDataSource"] = None, + **kwargs + ): + super(StreamInputProperties, self).__init__(serialization=serialization, compression=compression, partition_key=partition_key, **kwargs) + self.type = 'Stream' # type: str + self.datasource = datasource + + +class SubscriptionQuota(SubResource): + """Describes the current quota for the subscription. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource Id. + :vartype id: str + :param name: Resource name. + :type name: str + :ivar type: Resource type. + :vartype type: str + :ivar max_count: The max permitted usage of this resource. + :vartype max_count: int + :ivar current_count: The current usage of this resource. + :vartype current_count: int + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'max_count': {'readonly': True}, + 'current_count': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_count': {'key': 'properties.maxCount', 'type': 'int'}, + 'current_count': {'key': 'properties.currentCount', 'type': 'int'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + **kwargs + ): + super(SubscriptionQuota, self).__init__(name=name, **kwargs) + self.max_count = None + self.current_count = None + + +class SubscriptionQuotasListResult(msrest.serialization.Model): + """Result of the GetQuotas operation. It contains a list of quotas for the subscription in a particular region. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of quotas for the subscription in a particular region. + :vartype value: list[~stream_analytics_management_client.models.SubscriptionQuota] + """ + + _validation = { + 'value': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[SubscriptionQuota]'}, + } + + def __init__( + self, + **kwargs + ): + super(SubscriptionQuotasListResult, self).__init__(**kwargs) + self.value = None + + +class Transformation(SubResource): + """A transformation object, containing all information associated with the named transformation. All transformations are contained under a streaming job. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource Id. + :vartype id: str + :param name: Resource name. + :type name: str + :ivar type: Resource type. + :vartype type: str + :param streaming_units: Specifies the number of streaming units that the streaming job uses. + :type streaming_units: int + :param query: Specifies the query that will be run in the streaming job. You can learn more + about the Stream Analytics Query Language (SAQL) here: + https://msdn.microsoft.com/library/azure/dn834998 . Required on PUT (CreateOrReplace) requests. + :type query: str + :ivar etag: The current entity tag for the transformation. This is an opaque string. You can + use it to detect whether the resource has changed between requests. You can also use it in the + If-Match or If-None-Match headers for write operations for optimistic concurrency. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'streaming_units': {'key': 'properties.streamingUnits', 'type': 'int'}, + 'query': {'key': 'properties.query', 'type': 'str'}, + 'etag': {'key': 'properties.etag', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + streaming_units: Optional[int] = None, + query: Optional[str] = None, + **kwargs + ): + super(Transformation, self).__init__(name=name, **kwargs) + self.streaming_units = streaming_units + self.query = query + self.etag = None diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_stream_analytics_management_client_enums.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_stream_analytics_management_client_enums.py new file mode 100644 index 000000000000..4ddfdea9c290 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_stream_analytics_management_client_enums.py @@ -0,0 +1,148 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum, EnumMeta +from six import with_metaclass + +class _CaseInsensitiveEnumMeta(EnumMeta): + def __getitem__(self, name): + return super().__getitem__(name.upper()) + + def __getattr__(cls, name): + """Return the enum member matching `name` + We use __getattr__ instead of descriptors or inserting into the enum + class' __dict__ in order to support `name` and `value` being both + properties for enum members (which live in the class' __dict__) and + enum members themselves. + """ + try: + return cls._member_map_[name.upper()] + except KeyError: + raise AttributeError(name) + + +class AuthenticationMode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Authentication Mode. Valid modes are ``ConnectionString``\ , ``Msi`` and 'UserToken'. + """ + + MSI = "Msi" + USER_TOKEN = "UserToken" + CONNECTION_STRING = "ConnectionString" + +class ClusterProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The status of the cluster provisioning. The three terminal states are: Succeeded, Failed and + Canceled + """ + + SUCCEEDED = "Succeeded" #: The cluster provisioning succeeded. + FAILED = "Failed" #: The cluster provisioning failed. + CANCELED = "Canceled" #: The cluster provisioning was canceled. + IN_PROGRESS = "InProgress" #: The cluster provisioning was inprogress. + +class ClusterSkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Specifies the SKU name of the cluster. Required on PUT (CreateOrUpdate) requests. + """ + + DEFAULT = "Default" #: The default SKU. + +class CompatibilityLevel(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Controls certain runtime behaviors of the streaming job. + """ + + ONE0 = "1.0" + +class ContentStoragePolicy(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Valid values are JobStorageAccount and SystemAccount. If set to JobStorageAccount, this + requires the user to also specify jobStorageAccount property. . + """ + + SYSTEM_ACCOUNT = "SystemAccount" + JOB_STORAGE_ACCOUNT = "JobStorageAccount" + +class Encoding(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Specifies the encoding of the incoming data in the case of input and the encoding of outgoing + data in the case of output. + """ + + UTF8 = "UTF8" + +class EventSerializationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Indicates the type of serialization that the input or output uses. Required on PUT + (CreateOrReplace) requests. + """ + + CSV = "Csv" + AVRO = "Avro" + JSON = "Json" + CUSTOM_CLR = "CustomClr" + PARQUET = "Parquet" + +class EventsOutOfOrderPolicy(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Indicates the policy to apply to events that arrive out of order in the input event stream. + """ + + ADJUST = "Adjust" + DROP = "Drop" + +class JobState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The current execution state of the streaming job. + """ + + CREATED = "Created" #: The job is currently in the Created state. + STARTING = "Starting" #: The job is currently in the Starting state. + RUNNING = "Running" #: The job is currently in the Running state. + STOPPING = "Stopping" #: The job is currently in the Stopping state. + STOPPED = "Stopped" #: The job is currently in the Stopped state. + DELETING = "Deleting" #: The job is currently in the Deleting state. + FAILED = "Failed" #: The job is currently in the Failed state. + DEGRADED = "Degraded" #: The job is currently in the Degraded state. + RESTARTING = "Restarting" #: The job is currently in the Restarting state. + SCALING = "Scaling" #: The job is currently in the Scaling state. + +class JobType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Describes the type of the job. Valid modes are ``Cloud`` and 'Edge'. + """ + + CLOUD = "Cloud" + EDGE = "Edge" + +class JsonOutputSerializationFormat(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Specifies the format of the JSON the output will be written in. The currently supported values + are 'lineSeparated' indicating the output will be formatted by having each JSON object + separated by a new line and 'array' indicating the output will be formatted as an array of JSON + objects. + """ + + LINE_SEPARATED = "LineSeparated" + ARRAY = "Array" + +class OutputErrorPolicy(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Indicates the policy to apply to events that arrive at the output and cannot be written to the + external storage due to being malformed (missing column values, column values of wrong type or + size). + """ + + STOP = "Stop" + DROP = "Drop" + +class OutputStartMode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Value may be JobStartTime, CustomTime, or LastOutputEventTime to indicate whether the starting + point of the output event stream should start whenever the job is started, start at a custom + user time stamp specified via the outputStartTime property, or start from the last event output + time. + """ + + JOB_START_TIME = "JobStartTime" + CUSTOM_TIME = "CustomTime" + LAST_OUTPUT_EVENT_TIME = "LastOutputEventTime" + +class StreamingJobSkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The name of the SKU. Required on PUT (CreateOrReplace) requests. + """ + + STANDARD = "Standard" diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/__init__.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/__init__.py new file mode 100644 index 000000000000..a247559efb05 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/__init__.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._functions_operations import FunctionsOperations +from ._inputs_operations import InputsOperations +from ._outputs_operations import OutputsOperations +from ._streaming_jobs_operations import StreamingJobsOperations +from ._subscriptions_operations import SubscriptionsOperations +from ._transformations_operations import TransformationsOperations +from ._operations import Operations +from ._clusters_operations import ClustersOperations +from ._private_endpoints_operations import PrivateEndpointsOperations + +__all__ = [ + 'FunctionsOperations', + 'InputsOperations', + 'OutputsOperations', + 'StreamingJobsOperations', + 'SubscriptionsOperations', + 'TransformationsOperations', + 'Operations', + 'ClustersOperations', + 'PrivateEndpointsOperations', +] diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_clusters_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_clusters_operations.py new file mode 100644 index 000000000000..2b0d23f60677 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_clusters_operations.py @@ -0,0 +1,706 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class ClustersOperations(object): + """ClustersOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~stream_analytics_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def _create_or_update_initial( + self, + resource_group_name, # type: str + cluster_name, # type: str + cluster, # type: "models.Cluster" + if_match=None, # type: Optional[str] + if_none_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.Cluster" + cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-03-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._create_or_update_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(cluster, 'Cluster') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('Cluster', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('Cluster', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} # type: ignore + + def begin_create_or_update( + self, + resource_group_name, # type: str + cluster_name, # type: str + cluster, # type: "models.Cluster" + if_match=None, # type: Optional[str] + if_none_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.Cluster"] + """Creates a Stream Analytics Cluster or replaces an already existing cluster. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the cluster. + :type cluster_name: str + :param cluster: The definition of the cluster that will be used to create a new cluster or + replace the existing one. + :type cluster: ~stream_analytics_management_client.models.Cluster + :param if_match: The ETag of the resource. Omit this value to always overwrite the current + record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent + changes. + :type if_match: str + :param if_none_match: Set to '*' to allow a new resource to be created, but to prevent updating + an existing record set. Other values will result in a 412 Pre-condition Failed response. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either Cluster or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~stream_analytics_management_client.models.Cluster] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + cluster_name=cluster_name, + cluster=cluster, + if_match=if_match, + if_none_match=if_none_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('Cluster', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} # type: ignore + + def _update_initial( + self, + resource_group_name, # type: str + cluster_name, # type: str + cluster, # type: "models.Cluster" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Optional["models.Cluster"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Cluster"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-03-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._update_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(cluster, 'Cluster') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Cluster', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} # type: ignore + + def begin_update( + self, + resource_group_name, # type: str + cluster_name, # type: str + cluster, # type: "models.Cluster" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.Cluster"] + """Updates an existing cluster. This can be used to partially update (ie. update one or two + properties) a cluster without affecting the rest of the cluster definition. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the cluster. + :type cluster_name: str + :param cluster: The properties specified here will overwrite the corresponding properties in + the existing cluster (ie. Those properties will be updated). + :type cluster: ~stream_analytics_management_client.models.Cluster + :param if_match: The ETag of the resource. Omit this value to always overwrite the current + record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent + changes. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either Cluster or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~stream_analytics_management_client.models.Cluster] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + cluster_name=cluster_name, + cluster=cluster, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('Cluster', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + cluster_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.Cluster" + """Gets information about the specified cluster. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the cluster. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Cluster, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.Cluster + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-03-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Cluster', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} # type: ignore + + def _delete_initial( + self, + resource_group_name, # type: str + cluster_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-03-01-preview" + accept = "application/json" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} # type: ignore + + def begin_delete( + self, + resource_group_name, # type: str + cluster_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Deletes the specified cluster. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the cluster. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + cluster_name=cluster_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} # type: ignore + + def list_by_subscription( + self, + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.ClusterListResult"] + """Lists all of the clusters in the given subscription. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ClusterListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.ClusterListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ClusterListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-03-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_subscription.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('ClusterListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.Error, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/clusters'} # type: ignore + + def list_by_resource_group( + self, + resource_group_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.ClusterListResult"] + """Lists all of the clusters in the given resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ClusterListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.ClusterListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ClusterListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-03-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('ClusterListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.Error, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters'} # type: ignore + + def list_streaming_jobs( + self, + resource_group_name, # type: str + cluster_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.ClusterJobListResult"] + """Lists all of the streaming jobs in the given cluster. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the cluster. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ClusterJobListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.ClusterJobListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ClusterJobListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-03-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_streaming_jobs.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('ClusterJobListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.Error, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_streaming_jobs.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/listStreamingJobs'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_functions_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_functions_operations.py new file mode 100644 index 000000000000..31063c85850b --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_functions_operations.py @@ -0,0 +1,642 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class FunctionsOperations(object): + """FunctionsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~stream_analytics_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def create_or_replace( + self, + resource_group_name, # type: str + job_name, # type: str + function_name, # type: str + function, # type: "models.Function" + if_match=None, # type: Optional[str] + if_none_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.Function" + """Creates a function or replaces an already existing function under an existing streaming job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param function_name: The name of the function. + :type function_name: str + :param function: The definition of the function that will be used to create a new function or + replace the existing one under the streaming job. + :type function: ~stream_analytics_management_client.models.Function + :param if_match: The ETag of the function. Omit this value to always overwrite the current + function. Specify the last-seen ETag value to prevent accidentally overwriting concurrent + changes. + :type if_match: str + :param if_none_match: Set to '*' to allow a new function to be created, but to prevent updating + an existing function. Other values will result in a 412 Pre-condition Failed response. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Function, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.Function + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_replace.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'functionName': self._serialize.url("function_name", function_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(function, 'Function') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Function', pipeline_response) + + if response.status_code == 201: + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Function', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + create_or_replace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}'} # type: ignore + + def update( + self, + resource_group_name, # type: str + job_name, # type: str + function_name, # type: str + function, # type: "models.Function" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.Function" + """Updates an existing function under an existing streaming job. This can be used to partially + update (ie. update one or two properties) a function without affecting the rest the job or + function definition. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param function_name: The name of the function. + :type function_name: str + :param function: A function object. The properties specified here will overwrite the + corresponding properties in the existing function (ie. Those properties will be updated). Any + properties that are set to null here will mean that the corresponding property in the existing + function will remain the same and not change as a result of this PATCH operation. + :type function: ~stream_analytics_management_client.models.Function + :param if_match: The ETag of the function. Omit this value to always overwrite the current + function. Specify the last-seen ETag value to prevent accidentally overwriting concurrent + changes. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Function, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.Function + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'functionName': self._serialize.url("function_name", function_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(function, 'Function') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Function', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + job_name, # type: str + function_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes a function from the streaming job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param function_name: The name of the function. + :type function_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'functionName': self._serialize.url("function_name", function_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + job_name, # type: str + function_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.Function" + """Gets details about the specified function. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param function_name: The name of the function. + :type function_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Function, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.Function + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'functionName': self._serialize.url("function_name", function_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Function', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}'} # type: ignore + + def list_by_streaming_job( + self, + resource_group_name, # type: str + job_name, # type: str + select=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.FunctionListResult"] + """Lists all of the functions under the specified streaming job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param select: The $select OData query parameter. This is a comma-separated list of structural + properties to include in the response, or "\ *" to include all properties. By default, all + properties are returned except diagnostics. Currently only accepts '*\ ' as a valid value. + :type select: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either FunctionListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.FunctionListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.FunctionListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_streaming_job.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('FunctionListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_streaming_job.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions'} # type: ignore + + def _test_initial( + self, + resource_group_name, # type: str + job_name, # type: str + function_name, # type: str + function=None, # type: Optional["models.Function"] + **kwargs # type: Any + ): + # type: (...) -> Optional["models.ResourceTestStatus"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ResourceTestStatus"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._test_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'functionName': self._serialize.url("function_name", function_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if function is not None: + body_content = self._serialize.body(function, 'Function') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('ResourceTestStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _test_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/test'} # type: ignore + + def begin_test( + self, + resource_group_name, # type: str + job_name, # type: str + function_name, # type: str + function=None, # type: Optional["models.Function"] + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.ResourceTestStatus"] + """Tests if the information provided for a function is valid. This can range from testing the + connection to the underlying web service behind the function or making sure the function code + provided is syntactically correct. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param function_name: The name of the function. + :type function_name: str + :param function: If the function specified does not already exist, this parameter must contain + the full function definition intended to be tested. If the function specified already exists, + this parameter can be left null to test the existing function as is or if specified, the + properties specified will overwrite the corresponding properties in the existing function + (exactly like a PATCH operation) and the resulting function will be tested. + :type function: ~stream_analytics_management_client.models.Function + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either ResourceTestStatus or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~stream_analytics_management_client.models.ResourceTestStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.ResourceTestStatus"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._test_initial( + resource_group_name=resource_group_name, + job_name=job_name, + function_name=function_name, + function=function, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('ResourceTestStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_test.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/test'} # type: ignore + + def retrieve_default_definition( + self, + resource_group_name, # type: str + job_name, # type: str + function_name, # type: str + function_retrieve_default_definition_parameters=None, # type: Optional["models.FunctionRetrieveDefaultDefinitionParameters"] + **kwargs # type: Any + ): + # type: (...) -> "models.Function" + """Retrieves the default definition of a function based on the parameters specified. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param function_name: The name of the function. + :type function_name: str + :param function_retrieve_default_definition_parameters: Parameters used to specify the type of + function to retrieve the default definition for. + :type function_retrieve_default_definition_parameters: ~stream_analytics_management_client.models.FunctionRetrieveDefaultDefinitionParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Function, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.Function + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.retrieve_default_definition.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'functionName': self._serialize.url("function_name", function_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if function_retrieve_default_definition_parameters is not None: + body_content = self._serialize.body(function_retrieve_default_definition_parameters, 'FunctionRetrieveDefaultDefinitionParameters') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Function', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + retrieve_default_definition.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/RetrieveDefaultDefinition'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_inputs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_inputs_operations.py new file mode 100644 index 000000000000..890d33f1b8b1 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_inputs_operations.py @@ -0,0 +1,564 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class InputsOperations(object): + """InputsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~stream_analytics_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def create_or_replace( + self, + resource_group_name, # type: str + job_name, # type: str + input_name, # type: str + input, # type: "models.Input" + if_match=None, # type: Optional[str] + if_none_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.Input" + """Creates an input or replaces an already existing input under an existing streaming job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param input_name: The name of the input. + :type input_name: str + :param input: The definition of the input that will be used to create a new input or replace + the existing one under the streaming job. + :type input: ~stream_analytics_management_client.models.Input + :param if_match: The ETag of the input. Omit this value to always overwrite the current input. + Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. + :type if_match: str + :param if_none_match: Set to '*' to allow a new input to be created, but to prevent updating an + existing input. Other values will result in a 412 Pre-condition Failed response. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Input, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.Input + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Input"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_replace.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'inputName': self._serialize.url("input_name", input_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(input, 'Input') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Input', pipeline_response) + + if response.status_code == 201: + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Input', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + create_or_replace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}'} # type: ignore + + def update( + self, + resource_group_name, # type: str + job_name, # type: str + input_name, # type: str + input, # type: "models.Input" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.Input" + """Updates an existing input under an existing streaming job. This can be used to partially update + (ie. update one or two properties) an input without affecting the rest the job or input + definition. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param input_name: The name of the input. + :type input_name: str + :param input: An Input object. The properties specified here will overwrite the corresponding + properties in the existing input (ie. Those properties will be updated). Any properties that + are set to null here will mean that the corresponding property in the existing input will + remain the same and not change as a result of this PATCH operation. + :type input: ~stream_analytics_management_client.models.Input + :param if_match: The ETag of the input. Omit this value to always overwrite the current input. + Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Input, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.Input + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Input"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'inputName': self._serialize.url("input_name", input_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(input, 'Input') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Input', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + job_name, # type: str + input_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes an input from the streaming job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param input_name: The name of the input. + :type input_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'inputName': self._serialize.url("input_name", input_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + job_name, # type: str + input_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.Input" + """Gets details about the specified input. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param input_name: The name of the input. + :type input_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Input, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.Input + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Input"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'inputName': self._serialize.url("input_name", input_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Input', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}'} # type: ignore + + def list_by_streaming_job( + self, + resource_group_name, # type: str + job_name, # type: str + select=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.InputListResult"] + """Lists all of the inputs under the specified streaming job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param select: The $select OData query parameter. This is a comma-separated list of structural + properties to include in the response, or "\ *" to include all properties. By default, all + properties are returned except diagnostics. Currently only accepts '*\ ' as a valid value. + :type select: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either InputListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.InputListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.InputListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_streaming_job.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('InputListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_streaming_job.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs'} # type: ignore + + def _test_initial( + self, + resource_group_name, # type: str + job_name, # type: str + input_name, # type: str + input=None, # type: Optional["models.Input"] + **kwargs # type: Any + ): + # type: (...) -> Optional["models.ResourceTestStatus"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ResourceTestStatus"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._test_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'inputName': self._serialize.url("input_name", input_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if input is not None: + body_content = self._serialize.body(input, 'Input') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('ResourceTestStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _test_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}/test'} # type: ignore + + def begin_test( + self, + resource_group_name, # type: str + job_name, # type: str + input_name, # type: str + input=None, # type: Optional["models.Input"] + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.ResourceTestStatus"] + """Tests whether an input’s datasource is reachable and usable by the Azure Stream Analytics + service. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param input_name: The name of the input. + :type input_name: str + :param input: If the input specified does not already exist, this parameter must contain the + full input definition intended to be tested. If the input specified already exists, this + parameter can be left null to test the existing input as is or if specified, the properties + specified will overwrite the corresponding properties in the existing input (exactly like a + PATCH operation) and the resulting input will be tested. + :type input: ~stream_analytics_management_client.models.Input + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either ResourceTestStatus or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~stream_analytics_management_client.models.ResourceTestStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.ResourceTestStatus"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._test_initial( + resource_group_name=resource_group_name, + job_name=job_name, + input_name=input_name, + input=input, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('ResourceTestStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_test.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}/test'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_operations.py new file mode 100644 index 000000000000..1a63db586859 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_operations.py @@ -0,0 +1,109 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class Operations(object): + """Operations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~stream_analytics_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.OperationListResult"] + """Lists all of the available Stream Analytics related operations. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OperationListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.OperationListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('OperationListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/providers/Microsoft.StreamAnalytics/operations'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_outputs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_outputs_operations.py new file mode 100644 index 000000000000..a18f17686979 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_outputs_operations.py @@ -0,0 +1,566 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class OutputsOperations(object): + """OutputsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~stream_analytics_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def create_or_replace( + self, + resource_group_name, # type: str + job_name, # type: str + output_name, # type: str + output, # type: "models.Output" + if_match=None, # type: Optional[str] + if_none_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.Output" + """Creates an output or replaces an already existing output under an existing streaming job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param output_name: The name of the output. + :type output_name: str + :param output: The definition of the output that will be used to create a new output or replace + the existing one under the streaming job. + :type output: ~stream_analytics_management_client.models.Output + :param if_match: The ETag of the output. Omit this value to always overwrite the current + output. Specify the last-seen ETag value to prevent accidentally overwriting concurrent + changes. + :type if_match: str + :param if_none_match: Set to '*' to allow a new output to be created, but to prevent updating + an existing output. Other values will result in a 412 Pre-condition Failed response. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Output, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.Output + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Output"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_replace.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'outputName': self._serialize.url("output_name", output_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(output, 'Output') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Output', pipeline_response) + + if response.status_code == 201: + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Output', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + create_or_replace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}'} # type: ignore + + def update( + self, + resource_group_name, # type: str + job_name, # type: str + output_name, # type: str + output, # type: "models.Output" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.Output" + """Updates an existing output under an existing streaming job. This can be used to partially + update (ie. update one or two properties) an output without affecting the rest the job or + output definition. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param output_name: The name of the output. + :type output_name: str + :param output: An Output object. The properties specified here will overwrite the corresponding + properties in the existing output (ie. Those properties will be updated). Any properties that + are set to null here will mean that the corresponding property in the existing output will + remain the same and not change as a result of this PATCH operation. + :type output: ~stream_analytics_management_client.models.Output + :param if_match: The ETag of the output. Omit this value to always overwrite the current + output. Specify the last-seen ETag value to prevent accidentally overwriting concurrent + changes. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Output, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.Output + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Output"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'outputName': self._serialize.url("output_name", output_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(output, 'Output') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Output', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + job_name, # type: str + output_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes an output from the streaming job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param output_name: The name of the output. + :type output_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'outputName': self._serialize.url("output_name", output_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + job_name, # type: str + output_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.Output" + """Gets details about the specified output. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param output_name: The name of the output. + :type output_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Output, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.Output + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Output"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'outputName': self._serialize.url("output_name", output_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Output', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}'} # type: ignore + + def list_by_streaming_job( + self, + resource_group_name, # type: str + job_name, # type: str + select=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.OutputListResult"] + """Lists all of the outputs under the specified streaming job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param select: The $select OData query parameter. This is a comma-separated list of structural + properties to include in the response, or "\ *" to include all properties. By default, all + properties are returned except diagnostics. Currently only accepts '*\ ' as a valid value. + :type select: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OutputListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.OutputListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.OutputListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_streaming_job.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('OutputListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_streaming_job.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs'} # type: ignore + + def _test_initial( + self, + resource_group_name, # type: str + job_name, # type: str + output_name, # type: str + output=None, # type: Optional["models.Output"] + **kwargs # type: Any + ): + # type: (...) -> Optional["models.ResourceTestStatus"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ResourceTestStatus"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._test_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'outputName': self._serialize.url("output_name", output_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if output is not None: + body_content = self._serialize.body(output, 'Output') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('ResourceTestStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _test_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}/test'} # type: ignore + + def begin_test( + self, + resource_group_name, # type: str + job_name, # type: str + output_name, # type: str + output=None, # type: Optional["models.Output"] + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.ResourceTestStatus"] + """Tests whether an output’s datasource is reachable and usable by the Azure Stream Analytics + service. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param output_name: The name of the output. + :type output_name: str + :param output: If the output specified does not already exist, this parameter must contain the + full output definition intended to be tested. If the output specified already exists, this + parameter can be left null to test the existing output as is or if specified, the properties + specified will overwrite the corresponding properties in the existing output (exactly like a + PATCH operation) and the resulting output will be tested. + :type output: ~stream_analytics_management_client.models.Output + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either ResourceTestStatus or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~stream_analytics_management_client.models.ResourceTestStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.ResourceTestStatus"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._test_initial( + resource_group_name=resource_group_name, + job_name=job_name, + output_name=output_name, + output=output, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('ResourceTestStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_test.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}/test'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_private_endpoints_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_private_endpoints_operations.py new file mode 100644 index 000000000000..e194d816d90c --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_private_endpoints_operations.py @@ -0,0 +1,389 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class PrivateEndpointsOperations(object): + """PrivateEndpointsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~stream_analytics_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def create_or_update( + self, + resource_group_name, # type: str + cluster_name, # type: str + private_endpoint_name, # type: str + private_endpoint, # type: "models.PrivateEndpoint" + if_match=None, # type: Optional[str] + if_none_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.PrivateEndpoint" + """Creates a Stream Analytics Private Endpoint or replaces an already existing Private Endpoint. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the cluster. + :type cluster_name: str + :param private_endpoint_name: The name of the private endpoint. + :type private_endpoint_name: str + :param private_endpoint: The definition of the private endpoint that will be used to create a + new cluster or replace the existing one. + :type private_endpoint: ~stream_analytics_management_client.models.PrivateEndpoint + :param if_match: The ETag of the resource. Omit this value to always overwrite the current + record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent + changes. + :type if_match: str + :param if_none_match: Set to '*' to allow a new resource to be created, but to prevent updating + an existing record set. Other values will result in a 412 Pre-condition Failed response. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpoint, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.PrivateEndpoint + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpoint"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-03-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + 'privateEndpointName': self._serialize.url("private_endpoint_name", private_endpoint_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(private_endpoint, 'PrivateEndpoint') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('PrivateEndpoint', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('PrivateEndpoint', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + cluster_name, # type: str + private_endpoint_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.PrivateEndpoint" + """Gets information about the specified Private Endpoint. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the cluster. + :type cluster_name: str + :param private_endpoint_name: The name of the private endpoint. + :type private_endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpoint, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.PrivateEndpoint + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpoint"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-03-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + 'privateEndpointName': self._serialize.url("private_endpoint_name", private_endpoint_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateEndpoint', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}'} # type: ignore + + def _delete_initial( + self, + resource_group_name, # type: str + cluster_name, # type: str + private_endpoint_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-03-01-preview" + accept = "application/json" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + 'privateEndpointName': self._serialize.url("private_endpoint_name", private_endpoint_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}'} # type: ignore + + def begin_delete( + self, + resource_group_name, # type: str + cluster_name, # type: str + private_endpoint_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Delete the specified private endpoint. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the cluster. + :type cluster_name: str + :param private_endpoint_name: The name of the private endpoint. + :type private_endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + cluster_name=cluster_name, + private_endpoint_name=private_endpoint_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}'} # type: ignore + + def list_by_cluster( + self, + resource_group_name, # type: str + cluster_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.PrivateEndpointListResult"] + """Lists the private endpoints in the cluster. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the cluster. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PrivateEndpointListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.PrivateEndpointListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-03-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_cluster.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('PrivateEndpointListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.Error, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_cluster.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_streaming_jobs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_streaming_jobs_operations.py new file mode 100644 index 000000000000..f3228537a7ed --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_streaming_jobs_operations.py @@ -0,0 +1,821 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class StreamingJobsOperations(object): + """StreamingJobsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~stream_analytics_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def _create_or_replace_initial( + self, + resource_group_name, # type: str + job_name, # type: str + streaming_job, # type: "models.StreamingJob" + if_match=None, # type: Optional[str] + if_none_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.StreamingJob" + cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._create_or_replace_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(streaming_job, 'StreamingJob') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('StreamingJob', pipeline_response) + + if response.status_code == 201: + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('StreamingJob', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + _create_or_replace_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}'} # type: ignore + + def begin_create_or_replace( + self, + resource_group_name, # type: str + job_name, # type: str + streaming_job, # type: "models.StreamingJob" + if_match=None, # type: Optional[str] + if_none_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.StreamingJob"] + """Creates a streaming job or replaces an already existing streaming job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param streaming_job: The definition of the streaming job that will be used to create a new + streaming job or replace the existing one. + :type streaming_job: ~stream_analytics_management_client.models.StreamingJob + :param if_match: The ETag of the streaming job. Omit this value to always overwrite the current + record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent + changes. + :type if_match: str + :param if_none_match: Set to '*' to allow a new streaming job to be created, but to prevent + updating an existing record set. Other values will result in a 412 Pre-condition Failed + response. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either StreamingJob or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~stream_analytics_management_client.models.StreamingJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_replace_initial( + resource_group_name=resource_group_name, + job_name=job_name, + streaming_job=streaming_job, + if_match=if_match, + if_none_match=if_none_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + response_headers = {} + response = pipeline_response.http_response + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('StreamingJob', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + return deserialized + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_replace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}'} # type: ignore + + def update( + self, + resource_group_name, # type: str + job_name, # type: str + streaming_job, # type: "models.StreamingJob" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.StreamingJob" + """Updates an existing streaming job. This can be used to partially update (ie. update one or two + properties) a streaming job without affecting the rest the job definition. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param streaming_job: A streaming job object. The properties specified here will overwrite the + corresponding properties in the existing streaming job (ie. Those properties will be updated). + Any properties that are set to null here will mean that the corresponding property in the + existing input will remain the same and not change as a result of this PATCH operation. + :type streaming_job: ~stream_analytics_management_client.models.StreamingJob + :param if_match: The ETag of the streaming job. Omit this value to always overwrite the current + record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent + changes. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StreamingJob, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.StreamingJob + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(streaming_job, 'StreamingJob') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('StreamingJob', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}'} # type: ignore + + def _delete_initial( + self, + resource_group_name, # type: str + job_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}'} # type: ignore + + def begin_delete( + self, + resource_group_name, # type: str + job_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Deletes a streaming job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + job_name=job_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + job_name, # type: str + expand=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.StreamingJob" + """Gets details about the specified streaming job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param expand: The $expand OData query parameter. This is a comma-separated list of additional + streaming job properties to include in the response, beyond the default set returned when this + parameter is absent. The default set is all streaming job properties other than 'inputs', + 'transformation', 'outputs', and 'functions'. + :type expand: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StreamingJob, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.StreamingJob + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if expand is not None: + query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('StreamingJob', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}'} # type: ignore + + def list_by_resource_group( + self, + resource_group_name, # type: str + expand=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.StreamingJobListResult"] + """Lists all of the streaming jobs in the specified resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param expand: The $expand OData query parameter. This is a comma-separated list of additional + streaming job properties to include in the response, beyond the default set returned when this + parameter is absent. The default set is all streaming job properties other than 'inputs', + 'transformation', 'outputs', and 'functions'. + :type expand: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either StreamingJobListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.StreamingJobListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJobListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if expand is not None: + query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('StreamingJobListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs'} # type: ignore + + def list( + self, + expand=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.StreamingJobListResult"] + """Lists all of the streaming jobs in the given subscription. + + :param expand: The $expand OData query parameter. This is a comma-separated list of additional + streaming job properties to include in the response, beyond the default set returned when this + parameter is absent. The default set is all streaming job properties other than 'inputs', + 'transformation', 'outputs', and 'functions'. + :type expand: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either StreamingJobListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.StreamingJobListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJobListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if expand is not None: + query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('StreamingJobListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/streamingjobs'} # type: ignore + + def _start_initial( + self, + resource_group_name, # type: str + job_name, # type: str + start_job_parameters=None, # type: Optional["models.StartStreamingJobParameters"] + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self._start_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if start_job_parameters is not None: + body_content = self._serialize.body(start_job_parameters, 'StartStreamingJobParameters') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/start'} # type: ignore + + def begin_start( + self, + resource_group_name, # type: str + job_name, # type: str + start_job_parameters=None, # type: Optional["models.StartStreamingJobParameters"] + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Starts a streaming job. Once a job is started it will start processing input events and produce + output. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param start_job_parameters: Parameters applicable to a start streaming job operation. + :type start_job_parameters: ~stream_analytics_management_client.models.StartStreamingJobParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._start_initial( + resource_group_name=resource_group_name, + job_name=job_name, + start_job_parameters=start_job_parameters, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/start'} # type: ignore + + def _stop_initial( + self, + resource_group_name, # type: str + job_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + + # Construct URL + url = self._stop_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/stop'} # type: ignore + + def begin_stop( + self, + resource_group_name, # type: str + job_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Stops a running streaming job. This will cause a running streaming job to stop processing input + events and producing output. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._stop_initial( + resource_group_name=resource_group_name, + job_name=job_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/stop'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_subscriptions_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_subscriptions_operations.py new file mode 100644 index 000000000000..81d2bbf70cdb --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_subscriptions_operations.py @@ -0,0 +1,102 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class SubscriptionsOperations(object): + """SubscriptionsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~stream_analytics_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_quotas( + self, + location, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.SubscriptionQuotasListResult" + """Retrieves the subscription's current quota information in a particular region. + + :param location: The region in which to retrieve the subscription's quota information. You can + find out which regions Azure Stream Analytics is supported in here: + https://azure.microsoft.com/en-us/regions/. + :type location: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SubscriptionQuotasListResult, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.SubscriptionQuotasListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SubscriptionQuotasListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + accept = "application/json" + + # Construct URL + url = self.list_quotas.metadata['url'] # type: ignore + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SubscriptionQuotasListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_quotas.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/quotas'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_transformations_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_transformations_operations.py new file mode 100644 index 000000000000..49f318e3a748 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_transformations_operations.py @@ -0,0 +1,290 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class TransformationsOperations(object): + """TransformationsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~stream_analytics_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def create_or_replace( + self, + resource_group_name, # type: str + job_name, # type: str + transformation_name, # type: str + transformation, # type: "models.Transformation" + if_match=None, # type: Optional[str] + if_none_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.Transformation" + """Creates a transformation or replaces an already existing transformation under an existing + streaming job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param transformation_name: The name of the transformation. + :type transformation_name: str + :param transformation: The definition of the transformation that will be used to create a new + transformation or replace the existing one under the streaming job. + :type transformation: ~stream_analytics_management_client.models.Transformation + :param if_match: The ETag of the transformation. Omit this value to always overwrite the + current transformation. Specify the last-seen ETag value to prevent accidentally overwriting + concurrent changes. + :type if_match: str + :param if_none_match: Set to '*' to allow a new transformation to be created, but to prevent + updating an existing transformation. Other values will result in a 412 Pre-condition Failed + response. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Transformation, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.Transformation + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Transformation"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_replace.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'transformationName': self._serialize.url("transformation_name", transformation_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(transformation, 'Transformation') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Transformation', pipeline_response) + + if response.status_code == 201: + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Transformation', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + create_or_replace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}'} # type: ignore + + def update( + self, + resource_group_name, # type: str + job_name, # type: str + transformation_name, # type: str + transformation, # type: "models.Transformation" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.Transformation" + """Updates an existing transformation under an existing streaming job. This can be used to + partially update (ie. update one or two properties) a transformation without affecting the rest + the job or transformation definition. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param transformation_name: The name of the transformation. + :type transformation_name: str + :param transformation: A Transformation object. The properties specified here will overwrite + the corresponding properties in the existing transformation (ie. Those properties will be + updated). Any properties that are set to null here will mean that the corresponding property in + the existing transformation will remain the same and not change as a result of this PATCH + operation. + :type transformation: ~stream_analytics_management_client.models.Transformation + :param if_match: The ETag of the transformation. Omit this value to always overwrite the + current transformation. Specify the last-seen ETag value to prevent accidentally overwriting + concurrent changes. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Transformation, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.Transformation + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Transformation"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'transformationName': self._serialize.url("transformation_name", transformation_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(transformation, 'Transformation') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Transformation', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + job_name, # type: str + transformation_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.Transformation" + """Gets details about the specified transformation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param transformation_name: The name of the transformation. + :type transformation_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Transformation, or the result of cls(response) + :rtype: ~stream_analytics_management_client.models.Transformation + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Transformation"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'transformationName': self._serialize.url("transformation_name", transformation_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Transformation', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/py.typed b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/py.typed new file mode 100644 index 000000000000..e5aff4f83af8 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/dev_requirements.txt b/sdk/streamanalytics/azure-mgmt-streamanalytics/dev_requirements.txt new file mode 100644 index 000000000000..8625fcf6ef8d --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/dev_requirements.txt @@ -0,0 +1 @@ +aiohttp>=3.0; python_version >= '3.5' diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/sdk_packaging.toml b/sdk/streamanalytics/azure-mgmt-streamanalytics/sdk_packaging.toml new file mode 100644 index 000000000000..7f400816b472 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/sdk_packaging.toml @@ -0,0 +1,9 @@ +[packaging] +package_name = "azure-mgmt-streamanalytics" +package_nspkg = "azure-mgmt-nspkg" +package_pprint_name = "Stream Analytics Management" +package_doc_id = "?view=azure-python-preview" +is_stable = false +is_arm = true +need_msrestazure = false +need_azuremgmtcore = true diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/setup.cfg b/sdk/streamanalytics/azure-mgmt-streamanalytics/setup.cfg new file mode 100644 index 000000000000..3c6e79cf31da --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/setup.cfg @@ -0,0 +1,2 @@ +[bdist_wheel] +universal=1 diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/setup.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/setup.py new file mode 100644 index 000000000000..bebe1ecec2de --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/setup.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python + +#------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +#-------------------------------------------------------------------------- + +import re +import os.path +from io import open +from setuptools import find_packages, setup + +# Change the PACKAGE_NAME only to change folder and different name +PACKAGE_NAME = "azure-mgmt-streamanalytics" +PACKAGE_PPRINT_NAME = "Stream Analytics Management" + +# a-b-c => a/b/c +package_folder_path = PACKAGE_NAME.replace('-', '/') +# a-b-c => a.b.c +namespace_name = PACKAGE_NAME.replace('-', '.') + +# azure v0.x is not compatible with this package +# azure v0.x used to have a __version__ attribute (newer versions don't) +try: + import azure + try: + ver = azure.__version__ + raise Exception( + 'This package is incompatible with azure=={}. '.format(ver) + + 'Uninstall it with "pip uninstall azure".' + ) + except AttributeError: + pass +except ImportError: + pass + +# Version extraction inspired from 'requests' +with open(os.path.join(package_folder_path, 'version.py') + if os.path.exists(os.path.join(package_folder_path, 'version.py')) + else os.path.join(package_folder_path, '_version.py'), 'r') as fd: + version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', + fd.read(), re.MULTILINE).group(1) + +if not version: + raise RuntimeError('Cannot find version information') + +with open('README.md', encoding='utf-8') as f: + readme = f.read() +with open('CHANGELOG.md', encoding='utf-8') as f: + changelog = f.read() + +setup( + name=PACKAGE_NAME, + version=version, + description='Microsoft Azure {} Client Library for Python'.format(PACKAGE_PPRINT_NAME), + long_description=readme + '\n\n' + changelog, + long_description_content_type='text/markdown', + license='MIT License', + author='Microsoft Corporation', + author_email='azpysdkhelp@microsoft.com', + url='https://github.com/Azure/azure-sdk-for-python', + classifiers=[ + 'Development Status :: 4 - Beta', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'License :: OSI Approved :: MIT License', + ], + zip_safe=False, + packages=find_packages(exclude=[ + 'tests', + # Exclude packages that will be covered by PEP420 or nspkg + 'azure', + 'azure.mgmt', + ]), + install_requires=[ + 'msrest>=0.5.0', + 'azure-common~=1.1', + 'azure-mgmt-core>=1.2.0,<2.0.0', + ], + extras_require={ + ":python_version<'3.0'": ['azure-mgmt-nspkg'], + } +) From cfb561dc565390664d70a9bf77db94e47483051c Mon Sep 17 00:00:00 2001 From: Changlong Liu Date: Fri, 18 Sep 2020 16:32:57 +0800 Subject: [PATCH 2/2] add init.py --- sdk/streamanalytics/azure-mgmt-streamanalytics/azure/__init__.py | 1 + .../azure-mgmt-streamanalytics/azure/mgmt/__init__.py | 1 + 2 files changed, 2 insertions(+) create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/__init__.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/__init__.py diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/__init__.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/__init__.py new file mode 100644 index 000000000000..0260537a02bb --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/__init__.py @@ -0,0 +1 @@ +__path__ = __import__('pkgutil').extend_path(__path__, __name__) \ No newline at end of file diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/__init__.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/__init__.py new file mode 100644 index 000000000000..0260537a02bb --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/__init__.py @@ -0,0 +1 @@ +__path__ = __import__('pkgutil').extend_path(__path__, __name__) \ No newline at end of file