diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/CHANGELOG.md b/sdk/streamanalytics/azure-mgmt-streamanalytics/CHANGELOG.md index 15d67b7d6167..fd5715094782 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/CHANGELOG.md +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/CHANGELOG.md @@ -1,5 +1,39 @@ # Release History +## 1.0.0 (2022-01-06) + +**Features** + + - Added operation StreamingJobsOperations.begin_scale + - Model AzureSqlReferenceInputDataSource has a new parameter database + - Model AzureSqlReferenceInputDataSource has a new parameter delta_snapshot_query + - Model AzureSqlReferenceInputDataSource has a new parameter full_snapshot_query + - Model AzureSqlReferenceInputDataSource has a new parameter password + - Model AzureSqlReferenceInputDataSource has a new parameter refresh_rate + - Model AzureSqlReferenceInputDataSource has a new parameter refresh_type + - Model AzureSqlReferenceInputDataSource has a new parameter server + - Model AzureSqlReferenceInputDataSource has a new parameter table + - Model AzureSqlReferenceInputDataSource has a new parameter user + - Model Cluster has a new parameter capacity_allocated + - Model Cluster has a new parameter capacity_assigned + - Model Cluster has a new parameter cluster_id + - Model Cluster has a new parameter created_date + - Model Cluster has a new parameter provisioning_state + - Model Operation has a new parameter is_data_action + - Model PrivateEndpoint has a new parameter created_date + - Model PrivateEndpoint has a new parameter manual_private_link_service_connections + - Model Transformation has a new parameter valid_streaming_units + +**Breaking changes** + + - Model AzureSqlReferenceInputDataSource no longer has parameter properties + - Model Cluster no longer has parameter properties + - Model FunctionProperties no longer has parameter binding + - Model FunctionProperties no longer has parameter inputs + - Model FunctionProperties no longer has parameter output + - Model PrivateEndpoint no longer has parameter properties + - Model StreamingJob no longer has parameter externals + ## 1.0.0rc1 (2020-09-18) - Initial Release diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/_meta.json b/sdk/streamanalytics/azure-mgmt-streamanalytics/_meta.json new file mode 100644 index 000000000000..9ecc2a71e645 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/_meta.json @@ -0,0 +1,11 @@ +{ + "autorest": "3.7.2", + "use": [ + "@autorest/python@5.12.0", + "@autorest/modelerfour@4.19.3" + ], + "commit": "0ca8399b0e4fb4ae4608ffd0a6c056213664dacd", + "repository_url": "https://github.com/Azure/azure-rest-api-specs", + "autorest_command": "autorest specification/streamanalytics/resource-manager/readme.md --multiapi --python --python-mode=update --python-sdks-folder=/home/vsts/work/1/s/azure-sdk-for-python/sdk --python3-only --track2 --use=@autorest/python@5.12.0 --use=@autorest/modelerfour@4.19.3 --version=3.7.2", + "readme": "specification/streamanalytics/resource-manager/readme.md" +} \ No newline at end of file diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/__init__.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/__init__.py index 83390f96aa83..8d459f9a001f 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/__init__.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/__init__.py @@ -12,8 +12,7 @@ __version__ = VERSION __all__ = ['StreamAnalyticsManagementClient'] -try: - from ._patch import patch_sdk # type: ignore - patch_sdk() -except ImportError: - pass +# `._patch.py` is used for handwritten extensions to the generated code +# Example: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md +from ._patch import patch_sdk +patch_sdk() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_configuration.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_configuration.py index 1034fe20616d..93696b9ba2dc 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_configuration.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_configuration.py @@ -6,18 +6,16 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING +from typing import Any, TYPE_CHECKING from azure.core.configuration import Configuration from azure.core.pipeline import policies -from azure.mgmt.core.policies import ARMHttpLoggingPolicy +from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy from ._version import VERSION if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any - from azure.core.credentials import TokenCredential @@ -35,19 +33,19 @@ class StreamAnalyticsManagementClientConfiguration(Configuration): def __init__( self, - credential, # type: "TokenCredential" - subscription_id, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + credential: "TokenCredential", + subscription_id: str, + **kwargs: Any + ) -> None: + super(StreamAnalyticsManagementClientConfiguration, self).__init__(**kwargs) if credential is None: raise ValueError("Parameter 'credential' must not be None.") if subscription_id is None: raise ValueError("Parameter 'subscription_id' must not be None.") - super(StreamAnalyticsManagementClientConfiguration, self).__init__(**kwargs) self.credential = credential self.subscription_id = subscription_id + self.api_version = "2020-03-01" self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) kwargs.setdefault('sdk_moniker', 'mgmt-streamanalytics/{}'.format(VERSION)) self._configure(**kwargs) @@ -67,4 +65,4 @@ def _configure( self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) self.authentication_policy = kwargs.get('authentication_policy') if self.credential and not self.authentication_policy: - self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) + self.authentication_policy = ARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_metadata.json b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_metadata.json index 5ff7c85f8f06..7cf93e4b6c02 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_metadata.json +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_metadata.json @@ -1,39 +1,42 @@ { - "chosen_version": "", - "total_api_version_list": ["2017-04-01-preview", "2020-03-01-preview"], + "chosen_version": "2020-03-01", + "total_api_version_list": ["2020-03-01"], "client": { "name": "StreamAnalyticsManagementClient", "filename": "_stream_analytics_management_client", "description": "Stream Analytics Client.", - "base_url": "\u0027https://management.azure.com\u0027", - "custom_base_url": null, + "host_value": "\"https://management.azure.com\"", + "parameterized_host_template": null, "azure_arm": true, - "has_lro_operations": true + "has_lro_operations": true, + "client_side_validation": false, + "sync_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"ARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"StreamAnalyticsManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}}}", + "async_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"], \"azure.core.credentials\": [\"TokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"AsyncARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"StreamAnalyticsManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}}}" }, "global_parameters": { - "sync_method": { + "sync": { "credential": { - "method_signature": "credential, # type: \"TokenCredential\"", + "signature": "credential, # type: \"TokenCredential\"", "description": "Credential needed for the client to connect to Azure.", "docstring_type": "~azure.core.credentials.TokenCredential", "required": true }, "subscription_id": { - "method_signature": "subscription_id, # type: str", + "signature": "subscription_id, # type: str", "description": "The ID of the target subscription.", "docstring_type": "str", "required": true } }, - "async_method": { + "async": { "credential": { - "method_signature": "credential, # type: \"AsyncTokenCredential\"", + "signature": "credential: \"AsyncTokenCredential\",", "description": "Credential needed for the client to connect to Azure.", "docstring_type": "~azure.core.credentials_async.AsyncTokenCredential", "required": true }, "subscription_id": { - "method_signature": "subscription_id, # type: str", + "signature": "subscription_id: str,", "description": "The ID of the target subscription.", "docstring_type": "str", "required": true @@ -41,28 +44,67 @@ }, "constant": { }, - "call": "credential, subscription_id" + "call": "credential, subscription_id", + "service_client_specific": { + "sync": { + "api_version": { + "signature": "api_version=None, # type: Optional[str]", + "description": "API version to use if no profile is provided, or if missing in profile.", + "docstring_type": "str", + "required": false + }, + "base_url": { + "signature": "base_url=\"https://management.azure.com\", # type: str", + "description": "Service URL", + "docstring_type": "str", + "required": false + }, + "profile": { + "signature": "profile=KnownProfiles.default, # type: KnownProfiles", + "description": "A profile definition, from KnownProfiles to dict.", + "docstring_type": "azure.profiles.KnownProfiles", + "required": false + } + }, + "async": { + "api_version": { + "signature": "api_version: Optional[str] = None,", + "description": "API version to use if no profile is provided, or if missing in profile.", + "docstring_type": "str", + "required": false + }, + "base_url": { + "signature": "base_url: str = \"https://management.azure.com\",", + "description": "Service URL", + "docstring_type": "str", + "required": false + }, + "profile": { + "signature": "profile: KnownProfiles = KnownProfiles.default,", + "description": "A profile definition, from KnownProfiles to dict.", + "docstring_type": "azure.profiles.KnownProfiles", + "required": false + } + } + } }, "config": { "credential": true, "credential_scopes": ["https://management.azure.com/.default"], - "credential_default_policy_type": "BearerTokenCredentialPolicy", - "credential_default_policy_type_has_async_version": true, - "credential_key_header_name": null + "credential_call_sync": "ARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs)", + "credential_call_async": "AsyncARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs)", + "sync_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMChallengeAuthenticationPolicy\", \"ARMHttpLoggingPolicy\"]}, \"local\": {\"._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}}", + "async_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMHttpLoggingPolicy\", \"AsyncARMChallengeAuthenticationPolicy\"]}, \"local\": {\".._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}}" }, "operation_groups": { - "functions": "FunctionsOperations", + "operations": "Operations", + "streaming_jobs": "StreamingJobsOperations", "inputs": "InputsOperations", "outputs": "OutputsOperations", - "streaming_jobs": "StreamingJobsOperations", - "subscriptions": "SubscriptionsOperations", "transformations": "TransformationsOperations", - "operations": "Operations", + "functions": "FunctionsOperations", + "subscriptions": "SubscriptionsOperations", "clusters": "ClustersOperations", "private_endpoints": "PrivateEndpointsOperations" - }, - "operation_mixins": { - }, - "sync_imports": "None", - "async_imports": "None" + } } \ No newline at end of file diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_patch.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_patch.py new file mode 100644 index 000000000000..74e48ecd07cf --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_patch.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +# This file is used for handwritten extensions to the generated code. Example: +# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md +def patch_sdk(): + pass \ No newline at end of file diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_stream_analytics_management_client.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_stream_analytics_management_client.py index 7322c3fb1613..aa0fbce8834f 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_stream_analytics_management_client.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_stream_analytics_management_client.py @@ -6,95 +6,104 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING +from copy import deepcopy +from typing import Any, Optional, TYPE_CHECKING +from azure.core.rest import HttpRequest, HttpResponse from azure.mgmt.core import ARMPipelineClient from msrest import Deserializer, Serializer +from . import models +from ._configuration import StreamAnalyticsManagementClientConfiguration +from .operations import ClustersOperations, FunctionsOperations, InputsOperations, Operations, OutputsOperations, PrivateEndpointsOperations, StreamingJobsOperations, SubscriptionsOperations, TransformationsOperations + if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Optional - from azure.core.credentials import TokenCredential -from ._configuration import StreamAnalyticsManagementClientConfiguration -from .operations import FunctionsOperations -from .operations import InputsOperations -from .operations import OutputsOperations -from .operations import StreamingJobsOperations -from .operations import SubscriptionsOperations -from .operations import TransformationsOperations -from .operations import Operations -from .operations import ClustersOperations -from .operations import PrivateEndpointsOperations -from . import models - - -class StreamAnalyticsManagementClient(object): +class StreamAnalyticsManagementClient: """Stream Analytics Client. - :ivar functions: FunctionsOperations operations - :vartype functions: stream_analytics_management_client.operations.FunctionsOperations + :ivar operations: Operations operations + :vartype operations: stream_analytics_management_client.operations.Operations + :ivar streaming_jobs: StreamingJobsOperations operations + :vartype streaming_jobs: stream_analytics_management_client.operations.StreamingJobsOperations :ivar inputs: InputsOperations operations :vartype inputs: stream_analytics_management_client.operations.InputsOperations :ivar outputs: OutputsOperations operations :vartype outputs: stream_analytics_management_client.operations.OutputsOperations - :ivar streaming_jobs: StreamingJobsOperations operations - :vartype streaming_jobs: stream_analytics_management_client.operations.StreamingJobsOperations + :ivar transformations: TransformationsOperations operations + :vartype transformations: + stream_analytics_management_client.operations.TransformationsOperations + :ivar functions: FunctionsOperations operations + :vartype functions: stream_analytics_management_client.operations.FunctionsOperations :ivar subscriptions: SubscriptionsOperations operations :vartype subscriptions: stream_analytics_management_client.operations.SubscriptionsOperations - :ivar transformations: TransformationsOperations operations - :vartype transformations: stream_analytics_management_client.operations.TransformationsOperations - :ivar operations: Operations operations - :vartype operations: stream_analytics_management_client.operations.Operations :ivar clusters: ClustersOperations operations :vartype clusters: stream_analytics_management_client.operations.ClustersOperations :ivar private_endpoints: PrivateEndpointsOperations operations - :vartype private_endpoints: stream_analytics_management_client.operations.PrivateEndpointsOperations + :vartype private_endpoints: + stream_analytics_management_client.operations.PrivateEndpointsOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials.TokenCredential :param subscription_id: The ID of the target subscription. :type subscription_id: str - :param str base_url: Service URL - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :param base_url: Service URL. Default value is 'https://management.azure.com'. + :type base_url: str + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. """ def __init__( self, - credential, # type: "TokenCredential" - subscription_id, # type: str - base_url=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> None - if not base_url: - base_url = 'https://management.azure.com' - self._config = StreamAnalyticsManagementClientConfiguration(credential, subscription_id, **kwargs) + credential: "TokenCredential", + subscription_id: str, + base_url: str = "https://management.azure.com", + **kwargs: Any + ) -> None: + self._config = StreamAnalyticsManagementClientConfiguration(credential=credential, subscription_id=subscription_id, **kwargs) self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) - self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) + self._serialize.client_side_validation = False + self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) + self.streaming_jobs = StreamingJobsOperations(self._client, self._config, self._serialize, self._deserialize) + self.inputs = InputsOperations(self._client, self._config, self._serialize, self._deserialize) + self.outputs = OutputsOperations(self._client, self._config, self._serialize, self._deserialize) + self.transformations = TransformationsOperations(self._client, self._config, self._serialize, self._deserialize) + self.functions = FunctionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.subscriptions = SubscriptionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.clusters = ClustersOperations(self._client, self._config, self._serialize, self._deserialize) + self.private_endpoints = PrivateEndpointsOperations(self._client, self._config, self._serialize, self._deserialize) + + + def _send_request( + self, + request, # type: HttpRequest + **kwargs: Any + ) -> HttpResponse: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = client._send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.HttpResponse + """ - self.functions = FunctionsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.inputs = InputsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.outputs = OutputsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.streaming_jobs = StreamingJobsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.subscriptions = SubscriptionsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.transformations = TransformationsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.operations = Operations( - self._client, self._config, self._serialize, self._deserialize) - self.clusters = ClustersOperations( - self._client, self._config, self._serialize, self._deserialize) - self.private_endpoints = PrivateEndpointsOperations( - self._client, self._config, self._serialize, self._deserialize) + request_copy = deepcopy(request) + request_copy.url = self._client.format_url(request_copy.url) + return self._client.send_request(request_copy, **kwargs) def close(self): # type: () -> None diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_vendor.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_vendor.py new file mode 100644 index 000000000000..138f663c53a4 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_vendor.py @@ -0,0 +1,27 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.core.pipeline.transport import HttpRequest + +def _convert_request(request, files=None): + data = request.content if not files else None + request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data) + if files: + request.set_formdata_body(files) + return request + +def _format_url_section(template, **kwargs): + components = template.split("/") + while components: + try: + return template.format(**kwargs) + except KeyError as key: + formatted_components = template.split("/") + components = [ + c for c in formatted_components if "{}".format(key.args[0]) not in c + ] + template = "/".join(components) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_version.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_version.py index 3b909b5c8886..c47f66669f1b 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_version.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "1.0.0rc1" +VERSION = "1.0.0" diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/__init__.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/__init__.py index 372ab5c830f2..a23c47af3d77 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/__init__.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/__init__.py @@ -8,3 +8,8 @@ from ._stream_analytics_management_client import StreamAnalyticsManagementClient __all__ = ['StreamAnalyticsManagementClient'] + +# `._patch.py` is used for handwritten extensions to the generated code +# Example: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md +from ._patch import patch_sdk +patch_sdk() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_configuration.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_configuration.py index 444e27397d52..e5b4066e7159 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_configuration.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_configuration.py @@ -10,7 +10,7 @@ from azure.core.configuration import Configuration from azure.core.pipeline import policies -from azure.mgmt.core.policies import ARMHttpLoggingPolicy +from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy from .._version import VERSION @@ -37,14 +37,15 @@ def __init__( subscription_id: str, **kwargs: Any ) -> None: + super(StreamAnalyticsManagementClientConfiguration, self).__init__(**kwargs) if credential is None: raise ValueError("Parameter 'credential' must not be None.") if subscription_id is None: raise ValueError("Parameter 'subscription_id' must not be None.") - super(StreamAnalyticsManagementClientConfiguration, self).__init__(**kwargs) self.credential = credential self.subscription_id = subscription_id + self.api_version = "2020-03-01" self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) kwargs.setdefault('sdk_moniker', 'mgmt-streamanalytics/{}'.format(VERSION)) self._configure(**kwargs) @@ -63,4 +64,4 @@ def _configure( self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) self.authentication_policy = kwargs.get('authentication_policy') if self.credential and not self.authentication_policy: - self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) + self.authentication_policy = AsyncARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_patch.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_patch.py new file mode 100644 index 000000000000..74e48ecd07cf --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_patch.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +# This file is used for handwritten extensions to the generated code. Example: +# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md +def patch_sdk(): + pass \ No newline at end of file diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_stream_analytics_management_client.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_stream_analytics_management_client.py index 99472b9429a0..087a69a6403c 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_stream_analytics_management_client.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_stream_analytics_management_client.py @@ -6,92 +6,106 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Optional, TYPE_CHECKING +from copy import deepcopy +from typing import Any, Awaitable, Optional, TYPE_CHECKING +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.mgmt.core import AsyncARMPipelineClient from msrest import Deserializer, Serializer +from .. import models +from ._configuration import StreamAnalyticsManagementClientConfiguration +from .operations import ClustersOperations, FunctionsOperations, InputsOperations, Operations, OutputsOperations, PrivateEndpointsOperations, StreamingJobsOperations, SubscriptionsOperations, TransformationsOperations + if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential -from ._configuration import StreamAnalyticsManagementClientConfiguration -from .operations import FunctionsOperations -from .operations import InputsOperations -from .operations import OutputsOperations -from .operations import StreamingJobsOperations -from .operations import SubscriptionsOperations -from .operations import TransformationsOperations -from .operations import Operations -from .operations import ClustersOperations -from .operations import PrivateEndpointsOperations -from .. import models - - -class StreamAnalyticsManagementClient(object): +class StreamAnalyticsManagementClient: """Stream Analytics Client. - :ivar functions: FunctionsOperations operations - :vartype functions: stream_analytics_management_client.aio.operations.FunctionsOperations + :ivar operations: Operations operations + :vartype operations: stream_analytics_management_client.aio.operations.Operations + :ivar streaming_jobs: StreamingJobsOperations operations + :vartype streaming_jobs: + stream_analytics_management_client.aio.operations.StreamingJobsOperations :ivar inputs: InputsOperations operations :vartype inputs: stream_analytics_management_client.aio.operations.InputsOperations :ivar outputs: OutputsOperations operations :vartype outputs: stream_analytics_management_client.aio.operations.OutputsOperations - :ivar streaming_jobs: StreamingJobsOperations operations - :vartype streaming_jobs: stream_analytics_management_client.aio.operations.StreamingJobsOperations - :ivar subscriptions: SubscriptionsOperations operations - :vartype subscriptions: stream_analytics_management_client.aio.operations.SubscriptionsOperations :ivar transformations: TransformationsOperations operations - :vartype transformations: stream_analytics_management_client.aio.operations.TransformationsOperations - :ivar operations: Operations operations - :vartype operations: stream_analytics_management_client.aio.operations.Operations + :vartype transformations: + stream_analytics_management_client.aio.operations.TransformationsOperations + :ivar functions: FunctionsOperations operations + :vartype functions: stream_analytics_management_client.aio.operations.FunctionsOperations + :ivar subscriptions: SubscriptionsOperations operations + :vartype subscriptions: + stream_analytics_management_client.aio.operations.SubscriptionsOperations :ivar clusters: ClustersOperations operations :vartype clusters: stream_analytics_management_client.aio.operations.ClustersOperations :ivar private_endpoints: PrivateEndpointsOperations operations - :vartype private_endpoints: stream_analytics_management_client.aio.operations.PrivateEndpointsOperations + :vartype private_endpoints: + stream_analytics_management_client.aio.operations.PrivateEndpointsOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: The ID of the target subscription. :type subscription_id: str - :param str base_url: Service URL - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :param base_url: Service URL. Default value is 'https://management.azure.com'. + :type base_url: str + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. """ def __init__( self, credential: "AsyncTokenCredential", subscription_id: str, - base_url: Optional[str] = None, + base_url: str = "https://management.azure.com", **kwargs: Any ) -> None: - if not base_url: - base_url = 'https://management.azure.com' - self._config = StreamAnalyticsManagementClientConfiguration(credential, subscription_id, **kwargs) + self._config = StreamAnalyticsManagementClientConfiguration(credential=credential, subscription_id=subscription_id, **kwargs) self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) - self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) + self._serialize.client_side_validation = False + self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) + self.streaming_jobs = StreamingJobsOperations(self._client, self._config, self._serialize, self._deserialize) + self.inputs = InputsOperations(self._client, self._config, self._serialize, self._deserialize) + self.outputs = OutputsOperations(self._client, self._config, self._serialize, self._deserialize) + self.transformations = TransformationsOperations(self._client, self._config, self._serialize, self._deserialize) + self.functions = FunctionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.subscriptions = SubscriptionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.clusters = ClustersOperations(self._client, self._config, self._serialize, self._deserialize) + self.private_endpoints = PrivateEndpointsOperations(self._client, self._config, self._serialize, self._deserialize) + + + def _send_request( + self, + request: HttpRequest, + **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = await client._send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.AsyncHttpResponse + """ - self.functions = FunctionsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.inputs = InputsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.outputs = OutputsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.streaming_jobs = StreamingJobsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.subscriptions = SubscriptionsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.transformations = TransformationsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.operations = Operations( - self._client, self._config, self._serialize, self._deserialize) - self.clusters = ClustersOperations( - self._client, self._config, self._serialize, self._deserialize) - self.private_endpoints = PrivateEndpointsOperations( - self._client, self._config, self._serialize, self._deserialize) + request_copy = deepcopy(request) + request_copy.url = self._client.format_url(request_copy.url) + return self._client.send_request(request_copy, **kwargs) async def close(self) -> None: await self._client.close() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/__init__.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/__init__.py index a247559efb05..72cfdc41ec92 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/__init__.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/__init__.py @@ -6,24 +6,24 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._functions_operations import FunctionsOperations +from ._operations import Operations +from ._streaming_jobs_operations import StreamingJobsOperations from ._inputs_operations import InputsOperations from ._outputs_operations import OutputsOperations -from ._streaming_jobs_operations import StreamingJobsOperations -from ._subscriptions_operations import SubscriptionsOperations from ._transformations_operations import TransformationsOperations -from ._operations import Operations +from ._functions_operations import FunctionsOperations +from ._subscriptions_operations import SubscriptionsOperations from ._clusters_operations import ClustersOperations from ._private_endpoints_operations import PrivateEndpointsOperations __all__ = [ - 'FunctionsOperations', + 'Operations', + 'StreamingJobsOperations', 'InputsOperations', 'OutputsOperations', - 'StreamingJobsOperations', - 'SubscriptionsOperations', 'TransformationsOperations', - 'Operations', + 'FunctionsOperations', + 'SubscriptionsOperations', 'ClustersOperations', 'PrivateEndpointsOperations', ] diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_clusters_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_clusters_operations.py index 29088ce6710f..1de006867e79 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_clusters_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_clusters_operations.py @@ -5,19 +5,24 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling -from ... import models - +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._clusters_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_by_resource_group_request, build_list_by_subscription_request, build_list_streaming_jobs_request, build_update_request_initial T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -35,7 +40,7 @@ class ClustersOperations: :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -47,53 +52,40 @@ async def _create_or_update_initial( self, resource_group_name: str, cluster_name: str, - cluster: "models.Cluster", + cluster: "_models.Cluster", if_match: Optional[str] = None, if_none_match: Optional[str] = None, - **kwargs - ) -> "models.Cluster": - cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + **kwargs: Any + ) -> "_models.Cluster": + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._create_or_update_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(cluster, 'Cluster') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(cluster, 'Cluster') + + request = build_create_or_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_name=cluster_name, + content_type=content_type, + json=_json, + if_match=if_match, + if_none_match=if_none_match, + template_url=self._create_or_update_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) if response.status_code == 200: deserialized = self._deserialize('Cluster', pipeline_response) @@ -105,17 +97,20 @@ async def _create_or_update_initial( return cls(pipeline_response, deserialized, {}) return deserialized + _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} # type: ignore + + @distributed_trace_async async def begin_create_or_update( self, resource_group_name: str, cluster_name: str, - cluster: "models.Cluster", + cluster: "_models.Cluster", if_match: Optional[str] = None, if_none_match: Optional[str] = None, - **kwargs - ) -> AsyncLROPoller["models.Cluster"]: + **kwargs: Any + ) -> AsyncLROPoller["_models.Cluster"]: """Creates a Stream Analytics Cluster or replaces an already existing cluster. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -134,16 +129,20 @@ async def begin_create_or_update( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either Cluster or the result of cls(response) + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Cluster or the result of + cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~stream_analytics_management_client.models.Cluster] - :raises ~azure.core.exceptions.HttpResponseError: + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -156,21 +155,21 @@ async def begin_create_or_update( cluster=cluster, if_match=if_match, if_none_match=if_none_match, + content_type=content_type, cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): + response = pipeline_response.http_response deserialized = self._deserialize('Cluster', pipeline_response) - if cls: return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -182,56 +181,45 @@ def get_long_running_output(pipeline_response): ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} # type: ignore async def _update_initial( self, resource_group_name: str, cluster_name: str, - cluster: "models.Cluster", + cluster: "_models.Cluster", if_match: Optional[str] = None, - **kwargs - ) -> Optional["models.Cluster"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Cluster"]] + **kwargs: Any + ) -> Optional["_models.Cluster"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Cluster"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._update_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(cluster, 'Cluster') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(cluster, 'Cluster') + + request = build_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_name=cluster_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self._update_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: @@ -241,16 +229,19 @@ async def _update_initial( return cls(pipeline_response, deserialized, {}) return deserialized + _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} # type: ignore + + @distributed_trace_async async def begin_update( self, resource_group_name: str, cluster_name: str, - cluster: "models.Cluster", + cluster: "_models.Cluster", if_match: Optional[str] = None, - **kwargs - ) -> AsyncLROPoller["models.Cluster"]: + **kwargs: Any + ) -> AsyncLROPoller["_models.Cluster"]: """Updates an existing cluster. This can be used to partially update (ie. update one or two properties) a cluster without affecting the rest of the cluster definition. @@ -267,16 +258,20 @@ async def begin_update( :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either Cluster or the result of cls(response) + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Cluster or the result of + cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~stream_analytics_management_client.models.Cluster] - :raises ~azure.core.exceptions.HttpResponseError: + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -288,21 +283,21 @@ async def begin_update( cluster_name=cluster_name, cluster=cluster, if_match=if_match, + content_type=content_type, cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): + response = pipeline_response.http_response deserialized = self._deserialize('Cluster', pipeline_response) - if cls: return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -314,14 +309,16 @@ def get_long_running_output(pipeline_response): ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} # type: ignore + @distributed_trace_async async def get( self, resource_group_name: str, cluster_name: str, - **kwargs - ) -> "models.Cluster": + **kwargs: Any + ) -> "_models.Cluster": """Gets information about the specified cluster. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -333,38 +330,28 @@ async def get( :rtype: ~stream_analytics_management_client.models.Cluster :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_name=cluster_name, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('Cluster', pipeline_response) @@ -373,58 +360,51 @@ async def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} # type: ignore + async def _delete_initial( self, resource_group_name: str, cluster_name: str, - **kwargs + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" - accept = "application/json" - - # Construct URL - url = self._delete_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_delete_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_name=cluster_name, + template_url=self._delete_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} # type: ignore + + @distributed_trace_async async def begin_delete( self, resource_group_name: str, cluster_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller[None]: """Deletes the specified cluster. @@ -434,15 +414,17 @@ async def begin_delete( :type cluster_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', @@ -456,15 +438,14 @@ async def begin_delete( cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -476,52 +457,50 @@ def get_long_running_output(pipeline_response): ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} # type: ignore + @distributed_trace def list_by_subscription( self, - **kwargs - ) -> AsyncIterable["models.ClusterListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.ClusterListResult"]: """Lists all of the clusters in the given subscription. :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ClusterListResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.ClusterListResult] + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.ClusterListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ClusterListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ClusterListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_subscription.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_subscription_request( + subscription_id=self._config.subscription_id, + template_url=self.list_by_subscription.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_subscription_request( + subscription_id=self._config.subscription_id, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('ClusterListResult', pipeline_response) + deserialized = self._deserialize("ClusterListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -534,65 +513,64 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return AsyncItemPaged( get_next, extract_data ) list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/clusters'} # type: ignore + @distributed_trace def list_by_resource_group( self, resource_group_name: str, - **kwargs - ) -> AsyncIterable["models.ClusterListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.ClusterListResult"]: """Lists all of the clusters in the given resource group. :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ClusterListResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.ClusterListResult] + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.ClusterListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ClusterListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ClusterListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_resource_group.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_resource_group_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + template_url=self.list_by_resource_group.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_resource_group_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('ClusterListResult', pipeline_response) + deserialized = self._deserialize("ClusterListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -605,23 +583,25 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return AsyncItemPaged( get_next, extract_data ) list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters'} # type: ignore + @distributed_trace def list_streaming_jobs( self, resource_group_name: str, cluster_name: str, - **kwargs - ) -> AsyncIterable["models.ClusterJobListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.ClusterJobListResult"]: """Lists all of the streaming jobs in the given cluster. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -629,45 +609,44 @@ def list_streaming_jobs( :param cluster_name: The name of the cluster. :type cluster_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ClusterJobListResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.ClusterJobListResult] + :return: An iterator like instance of either ClusterJobListResult or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.ClusterJobListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ClusterJobListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ClusterJobListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_streaming_jobs.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.post(url, query_parameters, header_parameters) + + request = build_list_streaming_jobs_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_name=cluster_name, + template_url=self.list_streaming_jobs.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_streaming_jobs_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_name=cluster_name, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('ClusterJobListResult', pipeline_response) + deserialized = self._deserialize("ClusterJobListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -680,12 +659,13 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return AsyncItemPaged( get_next, extract_data ) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_functions_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_functions_operations.py index 1d04fcab6693..7257e585966a 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_functions_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_functions_operations.py @@ -5,19 +5,24 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling -from ... import models - +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._functions_operations import build_create_or_replace_request, build_delete_request, build_get_request, build_list_by_streaming_job_request, build_retrieve_default_definition_request, build_test_request_initial, build_update_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -35,7 +40,7 @@ class FunctionsOperations: :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -43,16 +48,17 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config + @distributed_trace_async async def create_or_replace( self, resource_group_name: str, job_name: str, function_name: str, - function: "models.Function", + function: "_models.Function", if_match: Optional[str] = None, if_none_match: Optional[str] = None, - **kwargs - ) -> "models.Function": + **kwargs: Any + ) -> "_models.Function": """Creates a function or replaces an already existing function under an existing streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -76,73 +82,67 @@ async def create_or_replace( :rtype: ~stream_analytics_management_client.models.Function :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Function"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_replace.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'functionName': self._serialize.url("function_name", function_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(function, 'Function') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(function, 'Function') + + request = build_create_or_replace_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + function_name=function_name, + content_type=content_type, + json=_json, + if_match=if_match, + if_none_match=if_none_match, + template_url=self.create_or_replace.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Function', pipeline_response) if response.status_code == 201: response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Function', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + create_or_replace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}'} # type: ignore + + @distributed_trace_async async def update( self, resource_group_name: str, job_name: str, function_name: str, - function: "models.Function", + function: "_models.Function", if_match: Optional[str] = None, - **kwargs - ) -> "models.Function": + **kwargs: Any + ) -> "_models.Function": """Updates an existing function under an existing streaming job. This can be used to partially update (ie. update one or two properties) a function without affecting the rest the job or function definition. @@ -167,63 +167,57 @@ async def update( :rtype: ~stream_analytics_management_client.models.Function :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Function"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'functionName': self._serialize.url("function_name", function_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(function, 'Function') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(function, 'Function') + + request = build_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + function_name=function_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Function', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}'} # type: ignore + + @distributed_trace_async async def delete( self, resource_group_name: str, job_name: str, function_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes a function from the streaming job. @@ -243,45 +237,40 @@ async def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'functionName': self._serialize.url("function_name", function_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + function_name=function_name, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}'} # type: ignore + + @distributed_trace_async async def get( self, resource_group_name: str, job_name: str, function_name: str, - **kwargs - ) -> "models.Function": + **kwargs: Any + ) -> "_models.Function": """Gets details about the specified function. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -295,57 +284,52 @@ async def get( :rtype: ~stream_analytics_management_client.models.Function :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Function"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'functionName': self._serialize.url("function_name", function_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + function_name=function_name, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Function', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}'} # type: ignore + + @distributed_trace def list_by_streaming_job( self, resource_group_name: str, job_name: str, select: Optional[str] = None, - **kwargs - ) -> AsyncIterable["models.FunctionListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.FunctionListResult"]: """Lists all of the functions under the specified streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -358,46 +342,44 @@ def list_by_streaming_job( :type select: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either FunctionListResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.FunctionListResult] + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.FunctionListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.FunctionListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.FunctionListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_streaming_job.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if select is not None: - query_parameters['$select'] = self._serialize.query("select", select, 'str') - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_streaming_job_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + select=select, + template_url=self.list_by_streaming_job.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_streaming_job_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + select=select, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('FunctionListResult', pipeline_response) + deserialized = self._deserialize("FunctionListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -411,10 +393,12 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return AsyncItemPaged( get_next, extract_data ) @@ -425,44 +409,34 @@ async def _test_initial( resource_group_name: str, job_name: str, function_name: str, - function: Optional["models.Function"] = None, - **kwargs - ) -> Optional["models.ResourceTestStatus"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ResourceTestStatus"]] + function: Optional["_models.Function"] = None, + **kwargs: Any + ) -> Optional["_models.ResourceTestStatus"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ResourceTestStatus"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._test_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'functionName': self._serialize.url("function_name", function_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] if function is not None: - body_content = self._serialize.body(function, 'Function') + _json = self._serialize.body(function, 'Function') else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + _json = None + + request = build_test_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + function_name=function_name, + content_type=content_type, + json=_json, + template_url=self._test_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -478,16 +452,19 @@ async def _test_initial( return cls(pipeline_response, deserialized, {}) return deserialized + _test_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/test'} # type: ignore + + @distributed_trace_async async def begin_test( self, resource_group_name: str, job_name: str, function_name: str, - function: Optional["models.Function"] = None, - **kwargs - ) -> AsyncLROPoller["models.ResourceTestStatus"]: + function: Optional["_models.Function"] = None, + **kwargs: Any + ) -> AsyncLROPoller["_models.ResourceTestStatus"]: """Tests if the information provided for a function is valid. This can range from testing the connection to the underlying web service behind the function or making sure the function code provided is syntactically correct. @@ -506,16 +483,21 @@ async def begin_test( :type function: ~stream_analytics_management_client.models.Function :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either ResourceTestStatus or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~stream_analytics_management_client.models.ResourceTestStatus] - :raises ~azure.core.exceptions.HttpResponseError: + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ResourceTestStatus or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~stream_analytics_management_client.models.ResourceTestStatus] + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.ResourceTestStatus"] + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceTestStatus"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -527,21 +509,21 @@ async def begin_test( job_name=job_name, function_name=function_name, function=function, + content_type=content_type, cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): + response = pipeline_response.http_response deserialized = self._deserialize('ResourceTestStatus', pipeline_response) - if cls: return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -553,16 +535,18 @@ def get_long_running_output(pipeline_response): ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_test.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/test'} # type: ignore + @distributed_trace_async async def retrieve_default_definition( self, resource_group_name: str, job_name: str, function_name: str, - function_retrieve_default_definition_parameters: Optional["models.FunctionRetrieveDefaultDefinitionParameters"] = None, - **kwargs - ) -> "models.Function": + function_retrieve_default_definition_parameters: Optional["_models.FunctionRetrieveDefaultDefinitionParameters"] = None, + **kwargs: Any + ) -> "_models.Function": """Retrieves the default definition of a function based on the parameters specified. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -573,53 +557,45 @@ async def retrieve_default_definition( :type function_name: str :param function_retrieve_default_definition_parameters: Parameters used to specify the type of function to retrieve the default definition for. - :type function_retrieve_default_definition_parameters: ~stream_analytics_management_client.models.FunctionRetrieveDefaultDefinitionParameters + :type function_retrieve_default_definition_parameters: + ~stream_analytics_management_client.models.FunctionRetrieveDefaultDefinitionParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: Function, or the result of cls(response) :rtype: ~stream_analytics_management_client.models.Function :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Function"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.retrieve_default_definition.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'functionName': self._serialize.url("function_name", function_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] if function_retrieve_default_definition_parameters is not None: - body_content = self._serialize.body(function_retrieve_default_definition_parameters, 'FunctionRetrieveDefaultDefinitionParameters') + _json = self._serialize.body(function_retrieve_default_definition_parameters, 'FunctionRetrieveDefaultDefinitionParameters') else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + _json = None + + request = build_retrieve_default_definition_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + function_name=function_name, + content_type=content_type, + json=_json, + template_url=self.retrieve_default_definition.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('Function', pipeline_response) @@ -627,4 +603,6 @@ async def retrieve_default_definition( return cls(pipeline_response, deserialized, {}) return deserialized - retrieve_default_definition.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/RetrieveDefaultDefinition'} # type: ignore + + retrieve_default_definition.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/retrieveDefaultDefinition'} # type: ignore + diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_inputs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_inputs_operations.py index f7451e9e8aab..a6f8bfe6c39c 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_inputs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_inputs_operations.py @@ -5,19 +5,24 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling -from ... import models - +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._inputs_operations import build_create_or_replace_request, build_delete_request, build_get_request, build_list_by_streaming_job_request, build_test_request_initial, build_update_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -35,7 +40,7 @@ class InputsOperations: :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -43,16 +48,17 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config + @distributed_trace_async async def create_or_replace( self, resource_group_name: str, job_name: str, input_name: str, - input: "models.Input", + input: "_models.Input", if_match: Optional[str] = None, if_none_match: Optional[str] = None, - **kwargs - ) -> "models.Input": + **kwargs: Any + ) -> "_models.Input": """Creates an input or replaces an already existing input under an existing streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -75,73 +81,67 @@ async def create_or_replace( :rtype: ~stream_analytics_management_client.models.Input :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Input"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Input"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_replace.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'inputName': self._serialize.url("input_name", input_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(input, 'Input') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(input, 'Input') + + request = build_create_or_replace_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + input_name=input_name, + content_type=content_type, + json=_json, + if_match=if_match, + if_none_match=if_none_match, + template_url=self.create_or_replace.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Input', pipeline_response) if response.status_code == 201: response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Input', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + create_or_replace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}'} # type: ignore + + @distributed_trace_async async def update( self, resource_group_name: str, job_name: str, input_name: str, - input: "models.Input", + input: "_models.Input", if_match: Optional[str] = None, - **kwargs - ) -> "models.Input": + **kwargs: Any + ) -> "_models.Input": """Updates an existing input under an existing streaming job. This can be used to partially update (ie. update one or two properties) an input without affecting the rest the job or input definition. @@ -165,63 +165,57 @@ async def update( :rtype: ~stream_analytics_management_client.models.Input :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Input"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Input"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'inputName': self._serialize.url("input_name", input_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(input, 'Input') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(input, 'Input') + + request = build_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + input_name=input_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Input', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}'} # type: ignore + + @distributed_trace_async async def delete( self, resource_group_name: str, job_name: str, input_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes an input from the streaming job. @@ -241,45 +235,40 @@ async def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'inputName': self._serialize.url("input_name", input_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + input_name=input_name, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}'} # type: ignore + + @distributed_trace_async async def get( self, resource_group_name: str, job_name: str, input_name: str, - **kwargs - ) -> "models.Input": + **kwargs: Any + ) -> "_models.Input": """Gets details about the specified input. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -293,57 +282,52 @@ async def get( :rtype: ~stream_analytics_management_client.models.Input :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Input"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Input"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'inputName': self._serialize.url("input_name", input_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + input_name=input_name, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Input', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}'} # type: ignore + + @distributed_trace def list_by_streaming_job( self, resource_group_name: str, job_name: str, select: Optional[str] = None, - **kwargs - ) -> AsyncIterable["models.InputListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.InputListResult"]: """Lists all of the inputs under the specified streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -356,46 +340,44 @@ def list_by_streaming_job( :type select: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either InputListResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.InputListResult] + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.InputListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.InputListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.InputListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_streaming_job.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if select is not None: - query_parameters['$select'] = self._serialize.query("select", select, 'str') - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_streaming_job_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + select=select, + template_url=self.list_by_streaming_job.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_streaming_job_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + select=select, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('InputListResult', pipeline_response) + deserialized = self._deserialize("InputListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -409,10 +391,12 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return AsyncItemPaged( get_next, extract_data ) @@ -423,44 +407,34 @@ async def _test_initial( resource_group_name: str, job_name: str, input_name: str, - input: Optional["models.Input"] = None, - **kwargs - ) -> Optional["models.ResourceTestStatus"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ResourceTestStatus"]] + input: Optional["_models.Input"] = None, + **kwargs: Any + ) -> Optional["_models.ResourceTestStatus"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ResourceTestStatus"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._test_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'inputName': self._serialize.url("input_name", input_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - body_content_kwargs = {} # type: Dict[str, Any] if input is not None: - body_content = self._serialize.body(input, 'Input') + _json = self._serialize.body(input, 'Input') else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + _json = None + + request = build_test_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + input_name=input_name, + content_type=content_type, + json=_json, + template_url=self._test_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -476,16 +450,19 @@ async def _test_initial( return cls(pipeline_response, deserialized, {}) return deserialized + _test_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}/test'} # type: ignore + + @distributed_trace_async async def begin_test( self, resource_group_name: str, job_name: str, input_name: str, - input: Optional["models.Input"] = None, - **kwargs - ) -> AsyncLROPoller["models.ResourceTestStatus"]: + input: Optional["_models.Input"] = None, + **kwargs: Any + ) -> AsyncLROPoller["_models.ResourceTestStatus"]: """Tests whether an input’s datasource is reachable and usable by the Azure Stream Analytics service. @@ -503,16 +480,21 @@ async def begin_test( :type input: ~stream_analytics_management_client.models.Input :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either ResourceTestStatus or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~stream_analytics_management_client.models.ResourceTestStatus] - :raises ~azure.core.exceptions.HttpResponseError: + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ResourceTestStatus or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~stream_analytics_management_client.models.ResourceTestStatus] + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.ResourceTestStatus"] + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceTestStatus"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -524,21 +506,21 @@ async def begin_test( job_name=job_name, input_name=input_name, input=input, + content_type=content_type, cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): + response = pipeline_response.http_response deserialized = self._deserialize('ResourceTestStatus', pipeline_response) - if cls: return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -550,4 +532,5 @@ def get_long_running_output(pipeline_response): ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_test.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}/test'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_operations.py index 6e073694446f..cd934d596162 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_operations.py @@ -5,17 +5,22 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat -from ... import models - +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._operations import build_list_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -33,7 +38,7 @@ class Operations: :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -41,46 +46,45 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config + @distributed_trace def list( self, - **kwargs - ) -> AsyncIterable["models.OperationListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.OperationListResult"]: """Lists all of the available Stream Analytics related operations. :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either OperationListResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.OperationListResult] + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.OperationListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = build_list_request( + template_url=self.list.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('OperationListResult', pipeline_response) + deserialized = self._deserialize("OperationListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -94,10 +98,12 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return AsyncItemPaged( get_next, extract_data ) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_outputs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_outputs_operations.py index a9d08028e8aa..652f9cb13b08 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_outputs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_outputs_operations.py @@ -5,19 +5,24 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling -from ... import models - +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._outputs_operations import build_create_or_replace_request, build_delete_request, build_get_request, build_list_by_streaming_job_request, build_test_request_initial, build_update_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -35,7 +40,7 @@ class OutputsOperations: :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -43,16 +48,17 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config + @distributed_trace_async async def create_or_replace( self, resource_group_name: str, job_name: str, output_name: str, - output: "models.Output", + output: "_models.Output", if_match: Optional[str] = None, if_none_match: Optional[str] = None, - **kwargs - ) -> "models.Output": + **kwargs: Any + ) -> "_models.Output": """Creates an output or replaces an already existing output under an existing streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -76,73 +82,67 @@ async def create_or_replace( :rtype: ~stream_analytics_management_client.models.Output :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Output"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Output"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_replace.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'outputName': self._serialize.url("output_name", output_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(output, 'Output') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(output, 'Output') + + request = build_create_or_replace_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + output_name=output_name, + content_type=content_type, + json=_json, + if_match=if_match, + if_none_match=if_none_match, + template_url=self.create_or_replace.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Output', pipeline_response) if response.status_code == 201: response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Output', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + create_or_replace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}'} # type: ignore + + @distributed_trace_async async def update( self, resource_group_name: str, job_name: str, output_name: str, - output: "models.Output", + output: "_models.Output", if_match: Optional[str] = None, - **kwargs - ) -> "models.Output": + **kwargs: Any + ) -> "_models.Output": """Updates an existing output under an existing streaming job. This can be used to partially update (ie. update one or two properties) an output without affecting the rest the job or output definition. @@ -167,63 +167,57 @@ async def update( :rtype: ~stream_analytics_management_client.models.Output :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Output"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Output"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'outputName': self._serialize.url("output_name", output_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(output, 'Output') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(output, 'Output') + + request = build_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + output_name=output_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Output', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}'} # type: ignore + + @distributed_trace_async async def delete( self, resource_group_name: str, job_name: str, output_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes an output from the streaming job. @@ -243,45 +237,40 @@ async def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'outputName': self._serialize.url("output_name", output_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + output_name=output_name, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}'} # type: ignore + + @distributed_trace_async async def get( self, resource_group_name: str, job_name: str, output_name: str, - **kwargs - ) -> "models.Output": + **kwargs: Any + ) -> "_models.Output": """Gets details about the specified output. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -295,57 +284,52 @@ async def get( :rtype: ~stream_analytics_management_client.models.Output :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Output"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Output"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'outputName': self._serialize.url("output_name", output_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + output_name=output_name, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Output', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}'} # type: ignore + + @distributed_trace def list_by_streaming_job( self, resource_group_name: str, job_name: str, select: Optional[str] = None, - **kwargs - ) -> AsyncIterable["models.OutputListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.OutputListResult"]: """Lists all of the outputs under the specified streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -358,46 +342,44 @@ def list_by_streaming_job( :type select: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either OutputListResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.OutputListResult] + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.OutputListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.OutputListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.OutputListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_streaming_job.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if select is not None: - query_parameters['$select'] = self._serialize.query("select", select, 'str') - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_streaming_job_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + select=select, + template_url=self.list_by_streaming_job.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_streaming_job_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + select=select, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('OutputListResult', pipeline_response) + deserialized = self._deserialize("OutputListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -411,10 +393,12 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return AsyncItemPaged( get_next, extract_data ) @@ -425,44 +409,34 @@ async def _test_initial( resource_group_name: str, job_name: str, output_name: str, - output: Optional["models.Output"] = None, - **kwargs - ) -> Optional["models.ResourceTestStatus"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ResourceTestStatus"]] + output: Optional["_models.Output"] = None, + **kwargs: Any + ) -> Optional["_models.ResourceTestStatus"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ResourceTestStatus"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._test_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'outputName': self._serialize.url("output_name", output_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - body_content_kwargs = {} # type: Dict[str, Any] if output is not None: - body_content = self._serialize.body(output, 'Output') + _json = self._serialize.body(output, 'Output') else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + _json = None + + request = build_test_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + output_name=output_name, + content_type=content_type, + json=_json, + template_url=self._test_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -478,16 +452,19 @@ async def _test_initial( return cls(pipeline_response, deserialized, {}) return deserialized + _test_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}/test'} # type: ignore + + @distributed_trace_async async def begin_test( self, resource_group_name: str, job_name: str, output_name: str, - output: Optional["models.Output"] = None, - **kwargs - ) -> AsyncLROPoller["models.ResourceTestStatus"]: + output: Optional["_models.Output"] = None, + **kwargs: Any + ) -> AsyncLROPoller["_models.ResourceTestStatus"]: """Tests whether an output’s datasource is reachable and usable by the Azure Stream Analytics service. @@ -505,16 +482,21 @@ async def begin_test( :type output: ~stream_analytics_management_client.models.Output :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either ResourceTestStatus or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~stream_analytics_management_client.models.ResourceTestStatus] - :raises ~azure.core.exceptions.HttpResponseError: + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ResourceTestStatus or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~stream_analytics_management_client.models.ResourceTestStatus] + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.ResourceTestStatus"] + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceTestStatus"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -526,21 +508,21 @@ async def begin_test( job_name=job_name, output_name=output_name, output=output, + content_type=content_type, cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): + response = pipeline_response.http_response deserialized = self._deserialize('ResourceTestStatus', pipeline_response) - if cls: return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -552,4 +534,5 @@ def get_long_running_output(pipeline_response): ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_test.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}/test'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_private_endpoints_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_private_endpoints_operations.py index ba2975090858..8ce95c7d32f7 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_private_endpoints_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_private_endpoints_operations.py @@ -5,19 +5,24 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling -from ... import models - +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._private_endpoints_operations import build_create_or_update_request, build_delete_request_initial, build_get_request, build_list_by_cluster_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -35,7 +40,7 @@ class PrivateEndpointsOperations: :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -43,16 +48,17 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config + @distributed_trace_async async def create_or_update( self, resource_group_name: str, cluster_name: str, private_endpoint_name: str, - private_endpoint: "models.PrivateEndpoint", + private_endpoint: "_models.PrivateEndpoint", if_match: Optional[str] = None, if_none_match: Optional[str] = None, - **kwargs - ) -> "models.PrivateEndpoint": + **kwargs: Any + ) -> "_models.PrivateEndpoint": """Creates a Stream Analytics Private Endpoint or replaces an already existing Private Endpoint. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -76,48 +82,36 @@ async def create_or_update( :rtype: ~stream_analytics_management_client.models.PrivateEndpoint :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpoint"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpoint"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), - 'privateEndpointName': self._serialize.url("private_endpoint_name", private_endpoint_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(private_endpoint, 'PrivateEndpoint') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(private_endpoint, 'PrivateEndpoint') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_name=cluster_name, + private_endpoint_name=private_endpoint_name, + content_type=content_type, + json=_json, + if_match=if_match, + if_none_match=if_none_match, + template_url=self.create_or_update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -130,15 +124,18 @@ async def create_or_update( return cls(pipeline_response, deserialized, {}) return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}'} # type: ignore + + @distributed_trace_async async def get( self, resource_group_name: str, cluster_name: str, private_endpoint_name: str, - **kwargs - ) -> "models.PrivateEndpoint": + **kwargs: Any + ) -> "_models.PrivateEndpoint": """Gets information about the specified Private Endpoint. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -152,39 +149,29 @@ async def get( :rtype: ~stream_analytics_management_client.models.PrivateEndpoint :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpoint"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpoint"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), - 'privateEndpointName': self._serialize.url("private_endpoint_name", private_endpoint_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_name=cluster_name, + private_endpoint_name=private_endpoint_name, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('PrivateEndpoint', pipeline_response) @@ -193,61 +180,54 @@ async def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}'} # type: ignore + async def _delete_initial( self, resource_group_name: str, cluster_name: str, private_endpoint_name: str, - **kwargs + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" - accept = "application/json" - - # Construct URL - url = self._delete_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), - 'privateEndpointName': self._serialize.url("private_endpoint_name", private_endpoint_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_delete_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_name=cluster_name, + private_endpoint_name=private_endpoint_name, + template_url=self._delete_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}'} # type: ignore + + @distributed_trace_async async def begin_delete( self, resource_group_name: str, cluster_name: str, private_endpoint_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller[None]: """Delete the specified private endpoint. @@ -259,15 +239,17 @@ async def begin_delete( :type private_endpoint_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', @@ -282,15 +264,14 @@ async def begin_delete( cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -302,14 +283,16 @@ def get_long_running_output(pipeline_response): ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}'} # type: ignore + @distributed_trace def list_by_cluster( self, resource_group_name: str, cluster_name: str, - **kwargs - ) -> AsyncIterable["models.PrivateEndpointListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.PrivateEndpointListResult"]: """Lists the private endpoints in the cluster. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -317,45 +300,44 @@ def list_by_cluster( :param cluster_name: The name of the cluster. :type cluster_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PrivateEndpointListResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.PrivateEndpointListResult] + :return: An iterator like instance of either PrivateEndpointListResult or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.PrivateEndpointListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_cluster.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_cluster_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_name=cluster_name, + template_url=self.list_by_cluster.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_cluster_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_name=cluster_name, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('PrivateEndpointListResult', pipeline_response) + deserialized = self._deserialize("PrivateEndpointListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -368,12 +350,13 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return AsyncItemPaged( get_next, extract_data ) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_streaming_jobs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_streaming_jobs_operations.py index d6cdd13560d3..6776ea16d840 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_streaming_jobs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_streaming_jobs_operations.py @@ -5,19 +5,24 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling -from ... import models - +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._streaming_jobs_operations import build_create_or_replace_request_initial, build_delete_request_initial, build_get_request, build_list_by_resource_group_request, build_list_request, build_scale_request_initial, build_start_request_initial, build_stop_request_initial, build_update_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -35,7 +40,7 @@ class StreamingJobsOperations: :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -47,46 +52,34 @@ async def _create_or_replace_initial( self, resource_group_name: str, job_name: str, - streaming_job: "models.StreamingJob", + streaming_job: "_models.StreamingJob", if_match: Optional[str] = None, if_none_match: Optional[str] = None, - **kwargs - ) -> "models.StreamingJob": - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + **kwargs: Any + ) -> "_models.StreamingJob": + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJob"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._create_or_replace_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(streaming_job, 'StreamingJob') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(streaming_job, 'StreamingJob') + + request = build_create_or_replace_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + content_type=content_type, + json=_json, + if_match=if_match, + if_none_match=if_none_match, + template_url=self._create_or_replace_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -97,27 +90,32 @@ async def _create_or_replace_initial( response_headers = {} if response.status_code == 200: response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('StreamingJob', pipeline_response) if response.status_code == 201: response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('StreamingJob', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + _create_or_replace_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}'} # type: ignore + + @distributed_trace_async async def begin_create_or_replace( self, resource_group_name: str, job_name: str, - streaming_job: "models.StreamingJob", + streaming_job: "_models.StreamingJob", if_match: Optional[str] = None, if_none_match: Optional[str] = None, - **kwargs - ) -> AsyncLROPoller["models.StreamingJob"]: + **kwargs: Any + ) -> AsyncLROPoller["_models.StreamingJob"]: """Creates a streaming job or replaces an already existing streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -137,16 +135,21 @@ async def begin_create_or_replace( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either StreamingJob or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~stream_analytics_management_client.models.StreamingJob] - :raises ~azure.core.exceptions.HttpResponseError: + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either StreamingJob or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~stream_analytics_management_client.models.StreamingJob] + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJob"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -159,24 +162,24 @@ async def begin_create_or_replace( streaming_job=streaming_job, if_match=if_match, if_none_match=if_none_match, + content_type=content_type, cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): response_headers = {} response = pipeline_response.http_response response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('StreamingJob', pipeline_response) - if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -188,16 +191,18 @@ def get_long_running_output(pipeline_response): ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_replace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}'} # type: ignore + @distributed_trace_async async def update( self, resource_group_name: str, job_name: str, - streaming_job: "models.StreamingJob", + streaming_job: "_models.StreamingJob", if_match: Optional[str] = None, - **kwargs - ) -> "models.StreamingJob": + **kwargs: Any + ) -> "_models.StreamingJob": """Updates an existing streaming job. This can be used to partially update (ie. update one or two properties) a streaming job without affecting the rest the job definition. @@ -219,86 +224,71 @@ async def update( :rtype: ~stream_analytics_management_client.models.StreamingJob :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJob"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(streaming_job, 'StreamingJob') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(streaming_job, 'StreamingJob') + + request = build_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('StreamingJob', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}'} # type: ignore + async def _delete_initial( self, resource_group_name: str, job_name: str, - **kwargs + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - - # Construct URL - url = self._delete_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] + + request = build_delete_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + template_url=self._delete_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -311,11 +301,13 @@ async def _delete_initial( _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}'} # type: ignore + + @distributed_trace_async async def begin_delete( self, resource_group_name: str, job_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller[None]: """Deletes a streaming job. @@ -325,15 +317,17 @@ async def begin_delete( :type job_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', @@ -347,15 +341,14 @@ async def begin_delete( cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -367,15 +360,17 @@ def get_long_running_output(pipeline_response): ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}'} # type: ignore + @distributed_trace_async async def get( self, resource_group_name: str, job_name: str, expand: Optional[str] = None, - **kwargs - ) -> "models.StreamingJob": + **kwargs: Any + ) -> "_models.StreamingJob": """Gets details about the specified streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -392,57 +387,51 @@ async def get( :rtype: ~stream_analytics_management_client.models.StreamingJob :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJob"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if expand is not None: - query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + expand=expand, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('StreamingJob', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}'} # type: ignore + + @distributed_trace def list_by_resource_group( self, resource_group_name: str, expand: Optional[str] = None, - **kwargs - ) -> AsyncIterable["models.StreamingJobListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.StreamingJobListResult"]: """Lists all of the streaming jobs in the specified resource group. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -453,46 +442,44 @@ def list_by_resource_group( 'transformation', 'outputs', and 'functions'. :type expand: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either StreamingJobListResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.StreamingJobListResult] + :return: An iterator like instance of either StreamingJobListResult or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.StreamingJobListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJobListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJobListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_resource_group.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if expand is not None: - query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_resource_group_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + expand=expand, + template_url=self.list_by_resource_group.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_resource_group_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + expand=expand, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('StreamingJobListResult', pipeline_response) + deserialized = self._deserialize("StreamingJobListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -506,20 +493,23 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return AsyncItemPaged( get_next, extract_data ) list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs'} # type: ignore + @distributed_trace def list( self, expand: Optional[str] = None, - **kwargs - ) -> AsyncIterable["models.StreamingJobListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.StreamingJobListResult"]: """Lists all of the streaming jobs in the given subscription. :param expand: The $expand OData query parameter. This is a comma-separated list of additional @@ -528,45 +518,42 @@ def list( 'transformation', 'outputs', and 'functions'. :type expand: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either StreamingJobListResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.StreamingJobListResult] + :return: An iterator like instance of either StreamingJobListResult or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.StreamingJobListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJobListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJobListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if expand is not None: - query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + subscription_id=self._config.subscription_id, + expand=expand, + template_url=self.list.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + subscription_id=self._config.subscription_id, + expand=expand, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('StreamingJobListResult', pipeline_response) + deserialized = self._deserialize("StreamingJobListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -580,10 +567,12 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return AsyncItemPaged( get_next, extract_data ) @@ -593,41 +582,33 @@ async def _start_initial( self, resource_group_name: str, job_name: str, - start_job_parameters: Optional["models.StartStreamingJobParameters"] = None, - **kwargs + start_job_parameters: Optional["_models.StartStreamingJobParameters"] = None, + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self._start_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] if start_job_parameters is not None: - body_content = self._serialize.body(start_job_parameters, 'StartStreamingJobParameters') + _json = self._serialize.body(start_job_parameters, 'StartStreamingJobParameters') else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + _json = None + + request = build_start_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + content_type=content_type, + json=_json, + template_url=self._start_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -640,12 +621,14 @@ async def _start_initial( _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/start'} # type: ignore + + @distributed_trace_async async def begin_start( self, resource_group_name: str, job_name: str, - start_job_parameters: Optional["models.StartStreamingJobParameters"] = None, - **kwargs + start_job_parameters: Optional["_models.StartStreamingJobParameters"] = None, + **kwargs: Any ) -> AsyncLROPoller[None]: """Starts a streaming job. Once a job is started it will start processing input events and produce output. @@ -655,18 +638,22 @@ async def begin_start( :param job_name: The name of the streaming job. :type job_name: str :param start_job_parameters: Parameters applicable to a start streaming job operation. - :type start_job_parameters: ~stream_analytics_management_client.models.StartStreamingJobParameters + :type start_job_parameters: + ~stream_analytics_management_client.models.StartStreamingJobParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', @@ -678,18 +665,18 @@ async def begin_start( resource_group_name=resource_group_name, job_name=job_name, start_job_parameters=start_job_parameters, + content_type=content_type, cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -701,38 +688,31 @@ def get_long_running_output(pipeline_response): ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/start'} # type: ignore async def _stop_initial( self, resource_group_name: str, job_name: str, - **kwargs + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - - # Construct URL - url = self._stop_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] + + request = build_stop_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + template_url=self._stop_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -745,11 +725,13 @@ async def _stop_initial( _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/stop'} # type: ignore + + @distributed_trace_async async def begin_stop( self, resource_group_name: str, job_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller[None]: """Stops a running streaming job. This will cause a running streaming job to stop processing input events and producing output. @@ -760,15 +742,17 @@ async def begin_stop( :type job_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', @@ -782,15 +766,14 @@ async def begin_stop( cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -802,4 +785,117 @@ def get_long_running_output(pipeline_response): ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/stop'} # type: ignore + + async def _scale_initial( + self, + resource_group_name: str, + job_name: str, + scale_job_parameters: Optional["_models.ScaleStreamingJobParameters"] = None, + **kwargs: Any + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + if scale_job_parameters is not None: + _json = self._serialize.body(scale_job_parameters, 'ScaleStreamingJobParameters') + else: + _json = None + + request = build_scale_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + content_type=content_type, + json=_json, + template_url=self._scale_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _scale_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/scale'} # type: ignore + + + @distributed_trace_async + async def begin_scale( + self, + resource_group_name: str, + job_name: str, + scale_job_parameters: Optional["_models.ScaleStreamingJobParameters"] = None, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Scales a streaming job when the job is running. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param scale_job_parameters: Parameters applicable to a scale streaming job operation. + :type scale_job_parameters: + ~stream_analytics_management_client.models.ScaleStreamingJobParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._scale_initial( + resource_group_name=resource_group_name, + job_name=job_name, + scale_job_parameters=scale_job_parameters, + content_type=content_type, + cls=lambda x,y,z: x, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_scale.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/scale'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_subscriptions_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_subscriptions_operations.py index 7a94e1139245..1c779379caaf 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_subscriptions_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_subscriptions_operations.py @@ -5,16 +5,20 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat -from ... import models - +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._subscriptions_operations import build_list_quotas_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -32,7 +36,7 @@ class SubscriptionsOperations: :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -40,11 +44,12 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config + @distributed_trace_async async def list_quotas( self, location: str, - **kwargs - ) -> "models.SubscriptionQuotasListResult": + **kwargs: Any + ) -> "_models.SubscriptionQuotasListResult": """Retrieves the subscription's current quota information in a particular region. :param location: The region in which to retrieve the subscription's quota information. You can @@ -56,37 +61,28 @@ async def list_quotas( :rtype: ~stream_analytics_management_client.models.SubscriptionQuotasListResult :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.SubscriptionQuotasListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.SubscriptionQuotasListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - accept = "application/json" - - # Construct URL - url = self.list_quotas.metadata['url'] # type: ignore - path_format_arguments = { - 'location': self._serialize.url("location", location, 'str'), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_list_quotas_request( + location=location, + subscription_id=self._config.subscription_id, + template_url=self.list_quotas.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('SubscriptionQuotasListResult', pipeline_response) @@ -94,4 +90,6 @@ async def list_quotas( return cls(pipeline_response, deserialized, {}) return deserialized + list_quotas.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/quotas'} # type: ignore + diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_transformations_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_transformations_operations.py index 066f372f8baa..a4afa53f1705 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_transformations_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_transformations_operations.py @@ -5,16 +5,20 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat -from ... import models - +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._transformations_operations import build_create_or_replace_request, build_get_request, build_update_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -32,7 +36,7 @@ class TransformationsOperations: :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -40,16 +44,17 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config + @distributed_trace_async async def create_or_replace( self, resource_group_name: str, job_name: str, transformation_name: str, - transformation: "models.Transformation", + transformation: "_models.Transformation", if_match: Optional[str] = None, if_none_match: Optional[str] = None, - **kwargs - ) -> "models.Transformation": + **kwargs: Any + ) -> "_models.Transformation": """Creates a transformation or replaces an already existing transformation under an existing streaming job. @@ -75,73 +80,67 @@ async def create_or_replace( :rtype: ~stream_analytics_management_client.models.Transformation :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Transformation"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Transformation"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_replace.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'transformationName': self._serialize.url("transformation_name", transformation_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(transformation, 'Transformation') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(transformation, 'Transformation') + + request = build_create_or_replace_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + transformation_name=transformation_name, + content_type=content_type, + json=_json, + if_match=if_match, + if_none_match=if_none_match, + template_url=self.create_or_replace.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Transformation', pipeline_response) if response.status_code == 201: response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Transformation', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + create_or_replace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}'} # type: ignore + + @distributed_trace_async async def update( self, resource_group_name: str, job_name: str, transformation_name: str, - transformation: "models.Transformation", + transformation: "_models.Transformation", if_match: Optional[str] = None, - **kwargs - ) -> "models.Transformation": + **kwargs: Any + ) -> "_models.Transformation": """Updates an existing transformation under an existing streaming job. This can be used to partially update (ie. update one or two properties) a transformation without affecting the rest the job or transformation definition. @@ -167,64 +166,58 @@ async def update( :rtype: ~stream_analytics_management_client.models.Transformation :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Transformation"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Transformation"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'transformationName': self._serialize.url("transformation_name", transformation_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(transformation, 'Transformation') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(transformation, 'Transformation') + + request = build_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + transformation_name=transformation_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Transformation', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}'} # type: ignore + + @distributed_trace_async async def get( self, resource_group_name: str, job_name: str, transformation_name: str, - **kwargs - ) -> "models.Transformation": + **kwargs: Any + ) -> "_models.Transformation": """Gets details about the specified transformation. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -238,46 +231,40 @@ async def get( :rtype: ~stream_analytics_management_client.models.Transformation :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Transformation"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Transformation"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'transformationName': self._serialize.url("transformation_name", transformation_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + transformation_name=transformation_name, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Transformation', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}'} # type: ignore + diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py index bdaa063b8a5d..cc323de0b32e 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py @@ -6,240 +6,114 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -try: - from ._models_py3 import AggregateFunctionProperties - from ._models_py3 import AvroSerialization - from ._models_py3 import AzureDataLakeStoreOutputDataSource - from ._models_py3 import AzureDataLakeStoreOutputDataSourceProperties - from ._models_py3 import AzureFunctionOutputDataSource - from ._models_py3 import AzureMachineLearningServiceFunctionBinding - from ._models_py3 import AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters - from ._models_py3 import AzureMachineLearningServiceInputColumn - from ._models_py3 import AzureMachineLearningServiceInputs - from ._models_py3 import AzureMachineLearningServiceOutputColumn - from ._models_py3 import AzureMachineLearningStudioFunctionBinding - from ._models_py3 import AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters - from ._models_py3 import AzureMachineLearningStudioInputColumn - from ._models_py3 import AzureMachineLearningStudioInputs - from ._models_py3 import AzureMachineLearningStudioOutputColumn - from ._models_py3 import AzureSqlDatabaseDataSourceProperties - from ._models_py3 import AzureSqlDatabaseOutputDataSource - from ._models_py3 import AzureSqlDatabaseOutputDataSourceProperties - from ._models_py3 import AzureSqlReferenceInputDataSource - from ._models_py3 import AzureSqlReferenceInputDataSourceProperties - from ._models_py3 import AzureSynapseDataSourceProperties - from ._models_py3 import AzureSynapseOutputDataSource - from ._models_py3 import AzureSynapseOutputDataSourceProperties - from ._models_py3 import AzureTableOutputDataSource - from ._models_py3 import BlobDataSourceProperties - from ._models_py3 import BlobOutputDataSource - from ._models_py3 import BlobOutputDataSourceProperties - from ._models_py3 import BlobReferenceInputDataSource - from ._models_py3 import BlobReferenceInputDataSourceProperties - from ._models_py3 import BlobStreamInputDataSource - from ._models_py3 import BlobStreamInputDataSourceProperties - from ._models_py3 import CSharpFunctionBinding - from ._models_py3 import CSharpFunctionRetrieveDefaultDefinitionParameters - from ._models_py3 import Cluster - from ._models_py3 import ClusterInfo - from ._models_py3 import ClusterJob - from ._models_py3 import ClusterJobListResult - from ._models_py3 import ClusterListResult - from ._models_py3 import ClusterProperties - from ._models_py3 import ClusterSku - from ._models_py3 import Compression - from ._models_py3 import CsvSerialization - from ._models_py3 import CustomClrSerialization - from ._models_py3 import DiagnosticCondition - from ._models_py3 import Diagnostics - from ._models_py3 import DocumentDbOutputDataSource - from ._models_py3 import Error - from ._models_py3 import ErrorAutoGenerated - from ._models_py3 import ErrorDetails - from ._models_py3 import ErrorResponse - from ._models_py3 import EventHubDataSourceProperties - from ._models_py3 import EventHubOutputDataSource - from ._models_py3 import EventHubOutputDataSourceProperties - from ._models_py3 import EventHubStreamInputDataSource - from ._models_py3 import EventHubStreamInputDataSourceProperties - from ._models_py3 import EventHubV2OutputDataSource - from ._models_py3 import EventHubV2StreamInputDataSource - from ._models_py3 import External - from ._models_py3 import Function - from ._models_py3 import FunctionBinding - from ._models_py3 import FunctionInput - from ._models_py3 import FunctionListResult - from ._models_py3 import FunctionOutput - from ._models_py3 import FunctionProperties - from ._models_py3 import FunctionRetrieveDefaultDefinitionParameters - from ._models_py3 import Identity - from ._models_py3 import Input - from ._models_py3 import InputListResult - from ._models_py3 import InputProperties - from ._models_py3 import IoTHubStreamInputDataSource - from ._models_py3 import JavaScriptFunctionBinding - from ._models_py3 import JavaScriptFunctionRetrieveDefaultDefinitionParameters - from ._models_py3 import JobStorageAccount - from ._models_py3 import JsonSerialization - from ._models_py3 import OAuthBasedDataSourceProperties - from ._models_py3 import Operation - from ._models_py3 import OperationDisplay - from ._models_py3 import OperationListResult - from ._models_py3 import Output - from ._models_py3 import OutputDataSource - from ._models_py3 import OutputListResult - from ._models_py3 import ParquetSerialization - from ._models_py3 import PowerBIOutputDataSource - from ._models_py3 import PowerBIOutputDataSourceProperties - from ._models_py3 import PrivateEndpoint - from ._models_py3 import PrivateEndpointListResult - from ._models_py3 import PrivateEndpointProperties - from ._models_py3 import PrivateLinkConnectionState - from ._models_py3 import PrivateLinkServiceConnection - from ._models_py3 import ProxyResource - from ._models_py3 import ReferenceInputDataSource - from ._models_py3 import ReferenceInputProperties - from ._models_py3 import Resource - from ._models_py3 import ResourceTestStatus - from ._models_py3 import ScalarFunctionProperties - from ._models_py3 import Serialization - from ._models_py3 import ServiceBusDataSourceProperties - from ._models_py3 import ServiceBusQueueOutputDataSource - from ._models_py3 import ServiceBusQueueOutputDataSourceProperties - from ._models_py3 import ServiceBusTopicOutputDataSource - from ._models_py3 import ServiceBusTopicOutputDataSourceProperties - from ._models_py3 import StartStreamingJobParameters - from ._models_py3 import StorageAccount - from ._models_py3 import StreamInputDataSource - from ._models_py3 import StreamInputProperties - from ._models_py3 import StreamingJob - from ._models_py3 import StreamingJobListResult - from ._models_py3 import StreamingJobSku - from ._models_py3 import SubResource - from ._models_py3 import SubscriptionQuota - from ._models_py3 import SubscriptionQuotasListResult - from ._models_py3 import TrackedResource - from ._models_py3 import Transformation -except (SyntaxError, ImportError): - from ._models import AggregateFunctionProperties # type: ignore - from ._models import AvroSerialization # type: ignore - from ._models import AzureDataLakeStoreOutputDataSource # type: ignore - from ._models import AzureDataLakeStoreOutputDataSourceProperties # type: ignore - from ._models import AzureFunctionOutputDataSource # type: ignore - from ._models import AzureMachineLearningServiceFunctionBinding # type: ignore - from ._models import AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters # type: ignore - from ._models import AzureMachineLearningServiceInputColumn # type: ignore - from ._models import AzureMachineLearningServiceInputs # type: ignore - from ._models import AzureMachineLearningServiceOutputColumn # type: ignore - from ._models import AzureMachineLearningStudioFunctionBinding # type: ignore - from ._models import AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters # type: ignore - from ._models import AzureMachineLearningStudioInputColumn # type: ignore - from ._models import AzureMachineLearningStudioInputs # type: ignore - from ._models import AzureMachineLearningStudioOutputColumn # type: ignore - from ._models import AzureSqlDatabaseDataSourceProperties # type: ignore - from ._models import AzureSqlDatabaseOutputDataSource # type: ignore - from ._models import AzureSqlDatabaseOutputDataSourceProperties # type: ignore - from ._models import AzureSqlReferenceInputDataSource # type: ignore - from ._models import AzureSqlReferenceInputDataSourceProperties # type: ignore - from ._models import AzureSynapseDataSourceProperties # type: ignore - from ._models import AzureSynapseOutputDataSource # type: ignore - from ._models import AzureSynapseOutputDataSourceProperties # type: ignore - from ._models import AzureTableOutputDataSource # type: ignore - from ._models import BlobDataSourceProperties # type: ignore - from ._models import BlobOutputDataSource # type: ignore - from ._models import BlobOutputDataSourceProperties # type: ignore - from ._models import BlobReferenceInputDataSource # type: ignore - from ._models import BlobReferenceInputDataSourceProperties # type: ignore - from ._models import BlobStreamInputDataSource # type: ignore - from ._models import BlobStreamInputDataSourceProperties # type: ignore - from ._models import CSharpFunctionBinding # type: ignore - from ._models import CSharpFunctionRetrieveDefaultDefinitionParameters # type: ignore - from ._models import Cluster # type: ignore - from ._models import ClusterInfo # type: ignore - from ._models import ClusterJob # type: ignore - from ._models import ClusterJobListResult # type: ignore - from ._models import ClusterListResult # type: ignore - from ._models import ClusterProperties # type: ignore - from ._models import ClusterSku # type: ignore - from ._models import Compression # type: ignore - from ._models import CsvSerialization # type: ignore - from ._models import CustomClrSerialization # type: ignore - from ._models import DiagnosticCondition # type: ignore - from ._models import Diagnostics # type: ignore - from ._models import DocumentDbOutputDataSource # type: ignore - from ._models import Error # type: ignore - from ._models import ErrorAutoGenerated # type: ignore - from ._models import ErrorDetails # type: ignore - from ._models import ErrorResponse # type: ignore - from ._models import EventHubDataSourceProperties # type: ignore - from ._models import EventHubOutputDataSource # type: ignore - from ._models import EventHubOutputDataSourceProperties # type: ignore - from ._models import EventHubStreamInputDataSource # type: ignore - from ._models import EventHubStreamInputDataSourceProperties # type: ignore - from ._models import EventHubV2OutputDataSource # type: ignore - from ._models import EventHubV2StreamInputDataSource # type: ignore - from ._models import External # type: ignore - from ._models import Function # type: ignore - from ._models import FunctionBinding # type: ignore - from ._models import FunctionInput # type: ignore - from ._models import FunctionListResult # type: ignore - from ._models import FunctionOutput # type: ignore - from ._models import FunctionProperties # type: ignore - from ._models import FunctionRetrieveDefaultDefinitionParameters # type: ignore - from ._models import Identity # type: ignore - from ._models import Input # type: ignore - from ._models import InputListResult # type: ignore - from ._models import InputProperties # type: ignore - from ._models import IoTHubStreamInputDataSource # type: ignore - from ._models import JavaScriptFunctionBinding # type: ignore - from ._models import JavaScriptFunctionRetrieveDefaultDefinitionParameters # type: ignore - from ._models import JobStorageAccount # type: ignore - from ._models import JsonSerialization # type: ignore - from ._models import OAuthBasedDataSourceProperties # type: ignore - from ._models import Operation # type: ignore - from ._models import OperationDisplay # type: ignore - from ._models import OperationListResult # type: ignore - from ._models import Output # type: ignore - from ._models import OutputDataSource # type: ignore - from ._models import OutputListResult # type: ignore - from ._models import ParquetSerialization # type: ignore - from ._models import PowerBIOutputDataSource # type: ignore - from ._models import PowerBIOutputDataSourceProperties # type: ignore - from ._models import PrivateEndpoint # type: ignore - from ._models import PrivateEndpointListResult # type: ignore - from ._models import PrivateEndpointProperties # type: ignore - from ._models import PrivateLinkConnectionState # type: ignore - from ._models import PrivateLinkServiceConnection # type: ignore - from ._models import ProxyResource # type: ignore - from ._models import ReferenceInputDataSource # type: ignore - from ._models import ReferenceInputProperties # type: ignore - from ._models import Resource # type: ignore - from ._models import ResourceTestStatus # type: ignore - from ._models import ScalarFunctionProperties # type: ignore - from ._models import Serialization # type: ignore - from ._models import ServiceBusDataSourceProperties # type: ignore - from ._models import ServiceBusQueueOutputDataSource # type: ignore - from ._models import ServiceBusQueueOutputDataSourceProperties # type: ignore - from ._models import ServiceBusTopicOutputDataSource # type: ignore - from ._models import ServiceBusTopicOutputDataSourceProperties # type: ignore - from ._models import StartStreamingJobParameters # type: ignore - from ._models import StorageAccount # type: ignore - from ._models import StreamInputDataSource # type: ignore - from ._models import StreamInputProperties # type: ignore - from ._models import StreamingJob # type: ignore - from ._models import StreamingJobListResult # type: ignore - from ._models import StreamingJobSku # type: ignore - from ._models import SubResource # type: ignore - from ._models import SubscriptionQuota # type: ignore - from ._models import SubscriptionQuotasListResult # type: ignore - from ._models import TrackedResource # type: ignore - from ._models import Transformation # type: ignore +from ._models_py3 import AvroSerialization +from ._models_py3 import AzureDataLakeStoreOutputDataSource +from ._models_py3 import AzureDataLakeStoreOutputDataSourceProperties +from ._models_py3 import AzureMachineLearningWebServiceFunctionBinding +from ._models_py3 import AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters +from ._models_py3 import AzureMachineLearningWebServiceInputColumn +from ._models_py3 import AzureMachineLearningWebServiceInputs +from ._models_py3 import AzureMachineLearningWebServiceOutputColumn +from ._models_py3 import AzureSqlDatabaseDataSourceProperties +from ._models_py3 import AzureSqlDatabaseOutputDataSource +from ._models_py3 import AzureSqlDatabaseOutputDataSourceProperties +from ._models_py3 import AzureSqlReferenceInputDataSource +from ._models_py3 import AzureSynapseDataSourceProperties +from ._models_py3 import AzureSynapseOutputDataSource +from ._models_py3 import AzureSynapseOutputDataSourceProperties +from ._models_py3 import AzureTableOutputDataSource +from ._models_py3 import BlobDataSourceProperties +from ._models_py3 import BlobOutputDataSource +from ._models_py3 import BlobOutputDataSourceProperties +from ._models_py3 import BlobReferenceInputDataSource +from ._models_py3 import BlobReferenceInputDataSourceProperties +from ._models_py3 import BlobStreamInputDataSource +from ._models_py3 import BlobStreamInputDataSourceProperties +from ._models_py3 import Cluster +from ._models_py3 import ClusterInfo +from ._models_py3 import ClusterJob +from ._models_py3 import ClusterJobListResult +from ._models_py3 import ClusterListResult +from ._models_py3 import ClusterSku +from ._models_py3 import Compression +from ._models_py3 import CsvSerialization +from ._models_py3 import DiagnosticCondition +from ._models_py3 import Diagnostics +from ._models_py3 import DocumentDbOutputDataSource +from ._models_py3 import Error +from ._models_py3 import ErrorDetails +from ._models_py3 import ErrorError +from ._models_py3 import ErrorResponse +from ._models_py3 import EventHubDataSourceProperties +from ._models_py3 import EventHubOutputDataSource +from ._models_py3 import EventHubOutputDataSourceProperties +from ._models_py3 import EventHubStreamInputDataSource +from ._models_py3 import EventHubStreamInputDataSourceProperties +from ._models_py3 import EventHubV2OutputDataSource +from ._models_py3 import EventHubV2StreamInputDataSource +from ._models_py3 import Function +from ._models_py3 import FunctionBinding +from ._models_py3 import FunctionInput +from ._models_py3 import FunctionListResult +from ._models_py3 import FunctionOutput +from ._models_py3 import FunctionProperties +from ._models_py3 import FunctionRetrieveDefaultDefinitionParameters +from ._models_py3 import Identity +from ._models_py3 import Input +from ._models_py3 import InputListResult +from ._models_py3 import InputProperties +from ._models_py3 import IoTHubStreamInputDataSource +from ._models_py3 import JavaScriptFunctionBinding +from ._models_py3 import JavaScriptFunctionRetrieveDefaultDefinitionParameters +from ._models_py3 import JobStorageAccount +from ._models_py3 import JsonSerialization +from ._models_py3 import OAuthBasedDataSourceProperties +from ._models_py3 import Operation +from ._models_py3 import OperationDisplay +from ._models_py3 import OperationListResult +from ._models_py3 import Output +from ._models_py3 import OutputDataSource +from ._models_py3 import OutputListResult +from ._models_py3 import ParquetSerialization +from ._models_py3 import PowerBIOutputDataSource +from ._models_py3 import PowerBIOutputDataSourceProperties +from ._models_py3 import PrivateEndpoint +from ._models_py3 import PrivateEndpointListResult +from ._models_py3 import PrivateLinkConnectionState +from ._models_py3 import PrivateLinkServiceConnection +from ._models_py3 import ProxyResource +from ._models_py3 import ReferenceInputDataSource +from ._models_py3 import ReferenceInputProperties +from ._models_py3 import Resource +from ._models_py3 import ResourceTestStatus +from ._models_py3 import ScalarFunctionProperties +from ._models_py3 import ScaleStreamingJobParameters +from ._models_py3 import Serialization +from ._models_py3 import ServiceBusDataSourceProperties +from ._models_py3 import ServiceBusQueueOutputDataSource +from ._models_py3 import ServiceBusQueueOutputDataSourceProperties +from ._models_py3 import ServiceBusTopicOutputDataSource +from ._models_py3 import ServiceBusTopicOutputDataSourceProperties +from ._models_py3 import Sku +from ._models_py3 import StartStreamingJobParameters +from ._models_py3 import StorageAccount +from ._models_py3 import StreamInputDataSource +from ._models_py3 import StreamInputProperties +from ._models_py3 import StreamingJob +from ._models_py3 import StreamingJobListResult +from ._models_py3 import SubResource +from ._models_py3 import SubscriptionQuota +from ._models_py3 import SubscriptionQuotasListResult +from ._models_py3 import TrackedResource +from ._models_py3 import Transformation + from ._stream_analytics_management_client_enums import ( AuthenticationMode, ClusterProvisioningState, ClusterSkuName, CompatibilityLevel, + CompressionType, ContentStoragePolicy, Encoding, EventSerializationType, @@ -249,30 +123,23 @@ JsonOutputSerializationFormat, OutputErrorPolicy, OutputStartMode, - StreamingJobSkuName, + RefreshType, + SkuName, ) __all__ = [ - 'AggregateFunctionProperties', 'AvroSerialization', 'AzureDataLakeStoreOutputDataSource', 'AzureDataLakeStoreOutputDataSourceProperties', - 'AzureFunctionOutputDataSource', - 'AzureMachineLearningServiceFunctionBinding', - 'AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters', - 'AzureMachineLearningServiceInputColumn', - 'AzureMachineLearningServiceInputs', - 'AzureMachineLearningServiceOutputColumn', - 'AzureMachineLearningStudioFunctionBinding', - 'AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters', - 'AzureMachineLearningStudioInputColumn', - 'AzureMachineLearningStudioInputs', - 'AzureMachineLearningStudioOutputColumn', + 'AzureMachineLearningWebServiceFunctionBinding', + 'AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters', + 'AzureMachineLearningWebServiceInputColumn', + 'AzureMachineLearningWebServiceInputs', + 'AzureMachineLearningWebServiceOutputColumn', 'AzureSqlDatabaseDataSourceProperties', 'AzureSqlDatabaseOutputDataSource', 'AzureSqlDatabaseOutputDataSourceProperties', 'AzureSqlReferenceInputDataSource', - 'AzureSqlReferenceInputDataSourceProperties', 'AzureSynapseDataSourceProperties', 'AzureSynapseOutputDataSource', 'AzureSynapseOutputDataSourceProperties', @@ -284,24 +151,20 @@ 'BlobReferenceInputDataSourceProperties', 'BlobStreamInputDataSource', 'BlobStreamInputDataSourceProperties', - 'CSharpFunctionBinding', - 'CSharpFunctionRetrieveDefaultDefinitionParameters', 'Cluster', 'ClusterInfo', 'ClusterJob', 'ClusterJobListResult', 'ClusterListResult', - 'ClusterProperties', 'ClusterSku', 'Compression', 'CsvSerialization', - 'CustomClrSerialization', 'DiagnosticCondition', 'Diagnostics', 'DocumentDbOutputDataSource', 'Error', - 'ErrorAutoGenerated', 'ErrorDetails', + 'ErrorError', 'ErrorResponse', 'EventHubDataSourceProperties', 'EventHubOutputDataSource', @@ -310,7 +173,6 @@ 'EventHubStreamInputDataSourceProperties', 'EventHubV2OutputDataSource', 'EventHubV2StreamInputDataSource', - 'External', 'Function', 'FunctionBinding', 'FunctionInput', @@ -339,7 +201,6 @@ 'PowerBIOutputDataSourceProperties', 'PrivateEndpoint', 'PrivateEndpointListResult', - 'PrivateEndpointProperties', 'PrivateLinkConnectionState', 'PrivateLinkServiceConnection', 'ProxyResource', @@ -348,19 +209,20 @@ 'Resource', 'ResourceTestStatus', 'ScalarFunctionProperties', + 'ScaleStreamingJobParameters', 'Serialization', 'ServiceBusDataSourceProperties', 'ServiceBusQueueOutputDataSource', 'ServiceBusQueueOutputDataSourceProperties', 'ServiceBusTopicOutputDataSource', 'ServiceBusTopicOutputDataSourceProperties', + 'Sku', 'StartStreamingJobParameters', 'StorageAccount', 'StreamInputDataSource', 'StreamInputProperties', 'StreamingJob', 'StreamingJobListResult', - 'StreamingJobSku', 'SubResource', 'SubscriptionQuota', 'SubscriptionQuotasListResult', @@ -370,6 +232,7 @@ 'ClusterProvisioningState', 'ClusterSkuName', 'CompatibilityLevel', + 'CompressionType', 'ContentStoragePolicy', 'Encoding', 'EventSerializationType', @@ -379,5 +242,6 @@ 'JsonOutputSerializationFormat', 'OutputErrorPolicy', 'OutputStartMode', - 'StreamingJobSkuName', + 'RefreshType', + 'SkuName', ] diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models.py deleted file mode 100644 index 100ff571855a..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models.py +++ /dev/null @@ -1,4712 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.core.exceptions import HttpResponseError -import msrest.serialization - - -class FunctionProperties(msrest.serialization.Model): - """The properties that are associated with a function. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AggregateFunctionProperties, ScalarFunctionProperties. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of function.Constant filled by server. - :type type: str - :ivar etag: The current entity tag for the function. This is an opaque string. You can use it - to detect whether the resource has changed between requests. You can also use it in the If- - Match or If-None-Match headers for write operations for optimistic concurrency. - :vartype etag: str - :param inputs: - :type inputs: list[~stream_analytics_management_client.models.FunctionInput] - :param output: Describes the output of a function. - :type output: ~stream_analytics_management_client.models.FunctionOutput - :param binding: The physical binding of the function. For example, in the Azure Machine - Learning web service’s case, this describes the endpoint. - :type binding: ~stream_analytics_management_client.models.FunctionBinding - """ - - _validation = { - 'type': {'required': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'inputs': {'key': 'properties.inputs', 'type': '[FunctionInput]'}, - 'output': {'key': 'properties.output', 'type': 'FunctionOutput'}, - 'binding': {'key': 'properties.binding', 'type': 'FunctionBinding'}, - } - - _subtype_map = { - 'type': {'Aggregate': 'AggregateFunctionProperties', 'Scalar': 'ScalarFunctionProperties'} - } - - def __init__( - self, - **kwargs - ): - super(FunctionProperties, self).__init__(**kwargs) - self.type = None # type: Optional[str] - self.etag = None - self.inputs = kwargs.get('inputs', None) - self.output = kwargs.get('output', None) - self.binding = kwargs.get('binding', None) - - -class AggregateFunctionProperties(FunctionProperties): - """The properties that are associated with an aggregate function. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of function.Constant filled by server. - :type type: str - :ivar etag: The current entity tag for the function. This is an opaque string. You can use it - to detect whether the resource has changed between requests. You can also use it in the If- - Match or If-None-Match headers for write operations for optimistic concurrency. - :vartype etag: str - :param inputs: - :type inputs: list[~stream_analytics_management_client.models.FunctionInput] - :param output: Describes the output of a function. - :type output: ~stream_analytics_management_client.models.FunctionOutput - :param binding: The physical binding of the function. For example, in the Azure Machine - Learning web service’s case, this describes the endpoint. - :type binding: ~stream_analytics_management_client.models.FunctionBinding - """ - - _validation = { - 'type': {'required': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'inputs': {'key': 'properties.inputs', 'type': '[FunctionInput]'}, - 'output': {'key': 'properties.output', 'type': 'FunctionOutput'}, - 'binding': {'key': 'properties.binding', 'type': 'FunctionBinding'}, - } - - def __init__( - self, - **kwargs - ): - super(AggregateFunctionProperties, self).__init__(**kwargs) - self.type = 'Aggregate' # type: str - - -class Serialization(msrest.serialization.Model): - """Describes how data from an input is serialized or how data is serialized when written to an output. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSerialization, CsvSerialization, CustomClrSerialization, JsonSerialization, ParquetSerialization. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of serialization that the input or output uses. - Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: - "Csv", "Avro", "Json", "CustomClr", "Parquet". - :type type: str or ~stream_analytics_management_client.models.EventSerializationType - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'Avro': 'AvroSerialization', 'Csv': 'CsvSerialization', 'CustomClr': 'CustomClrSerialization', 'Json': 'JsonSerialization', 'Parquet': 'ParquetSerialization'} - } - - def __init__( - self, - **kwargs - ): - super(Serialization, self).__init__(**kwargs) - self.type = None # type: Optional[str] - - -class AvroSerialization(Serialization): - """Describes how data from an input is serialized or how data is serialized when written to an output in Avro format. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of serialization that the input or output uses. - Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: - "Csv", "Avro", "Json", "CustomClr", "Parquet". - :type type: str or ~stream_analytics_management_client.models.EventSerializationType - :param properties: The properties that are associated with the Avro serialization type. - Required on PUT (CreateOrReplace) requests. - :type properties: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AvroSerialization, self).__init__(**kwargs) - self.type = 'Avro' # type: str - self.properties = kwargs.get('properties', None) - - -class OutputDataSource(msrest.serialization.Model): - """Describes the data source that output will be written to. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureFunctionOutputDataSource, AzureDataLakeStoreOutputDataSource, EventHubV2OutputDataSource, EventHubOutputDataSource, ServiceBusQueueOutputDataSource, ServiceBusTopicOutputDataSource, AzureSynapseOutputDataSource, AzureSqlDatabaseOutputDataSource, BlobOutputDataSource, DocumentDbOutputDataSource, AzureTableOutputDataSource, PowerBIOutputDataSource. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of data source output will be written to. Required on - PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'Microsoft.AzureFunction': 'AzureFunctionOutputDataSource', 'Microsoft.DataLake/Accounts': 'AzureDataLakeStoreOutputDataSource', 'Microsoft.EventHub/EventHub': 'EventHubV2OutputDataSource', 'Microsoft.ServiceBus/EventHub': 'EventHubOutputDataSource', 'Microsoft.ServiceBus/Queue': 'ServiceBusQueueOutputDataSource', 'Microsoft.ServiceBus/Topic': 'ServiceBusTopicOutputDataSource', 'Microsoft.Sql/Server/DataWarehouse': 'AzureSynapseOutputDataSource', 'Microsoft.Sql/Server/Database': 'AzureSqlDatabaseOutputDataSource', 'Microsoft.Storage/Blob': 'BlobOutputDataSource', 'Microsoft.Storage/DocumentDB': 'DocumentDbOutputDataSource', 'Microsoft.Storage/Table': 'AzureTableOutputDataSource', 'PowerBI': 'PowerBIOutputDataSource'} - } - - def __init__( - self, - **kwargs - ): - super(OutputDataSource, self).__init__(**kwargs) - self.type = None # type: Optional[str] - - -class AzureDataLakeStoreOutputDataSource(OutputDataSource): - """Describes an Azure Data Lake Store output data source. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of data source output will be written to. Required on - PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param refresh_token: A refresh token that can be used to obtain a valid access token that can - then be used to authenticate with the data source. A valid refresh token is currently only - obtainable via the Azure Portal. It is recommended to put a dummy string value here when - creating the data source and then going to the Azure Portal to authenticate the data source - which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) - requests. - :type refresh_token: str - :param token_user_principal_name: The user principal name (UPN) of the user that was used to - obtain the refresh token. Use this property to help remember which user was used to obtain the - refresh token. - :type token_user_principal_name: str - :param token_user_display_name: The user display name of the user that was used to obtain the - refresh token. Use this property to help remember which user was used to obtain the refresh - token. - :type token_user_display_name: str - :param account_name: The name of the Azure Data Lake Store account. Required on PUT - (CreateOrReplace) requests. - :type account_name: str - :param tenant_id: The tenant id of the user used to obtain the refresh token. Required on PUT - (CreateOrReplace) requests. - :type tenant_id: str - :param file_path_prefix: The location of the file to which the output should be written to. - Required on PUT (CreateOrReplace) requests. - :type file_path_prefix: str - :param date_format: The date format. Wherever {date} appears in filePathPrefix, the value of - this property is used as the date format instead. - :type date_format: str - :param time_format: The time format. Wherever {time} appears in filePathPrefix, the value of - this property is used as the time format instead. - :type time_format: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", - "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'refresh_token': {'key': 'properties.refreshToken', 'type': 'str'}, - 'token_user_principal_name': {'key': 'properties.tokenUserPrincipalName', 'type': 'str'}, - 'token_user_display_name': {'key': 'properties.tokenUserDisplayName', 'type': 'str'}, - 'account_name': {'key': 'properties.accountName', 'type': 'str'}, - 'tenant_id': {'key': 'properties.tenantId', 'type': 'str'}, - 'file_path_prefix': {'key': 'properties.filePathPrefix', 'type': 'str'}, - 'date_format': {'key': 'properties.dateFormat', 'type': 'str'}, - 'time_format': {'key': 'properties.timeFormat', 'type': 'str'}, - 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureDataLakeStoreOutputDataSource, self).__init__(**kwargs) - self.type = 'Microsoft.DataLake/Accounts' # type: str - self.refresh_token = kwargs.get('refresh_token', None) - self.token_user_principal_name = kwargs.get('token_user_principal_name', None) - self.token_user_display_name = kwargs.get('token_user_display_name', None) - self.account_name = kwargs.get('account_name', None) - self.tenant_id = kwargs.get('tenant_id', None) - self.file_path_prefix = kwargs.get('file_path_prefix', None) - self.date_format = kwargs.get('date_format', None) - self.time_format = kwargs.get('time_format', None) - self.authentication_mode = kwargs.get('authentication_mode', None) - - -class OAuthBasedDataSourceProperties(msrest.serialization.Model): - """The properties that are associated with data sources that use OAuth as their authentication model. - - :param refresh_token: A refresh token that can be used to obtain a valid access token that can - then be used to authenticate with the data source. A valid refresh token is currently only - obtainable via the Azure Portal. It is recommended to put a dummy string value here when - creating the data source and then going to the Azure Portal to authenticate the data source - which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) - requests. - :type refresh_token: str - :param token_user_principal_name: The user principal name (UPN) of the user that was used to - obtain the refresh token. Use this property to help remember which user was used to obtain the - refresh token. - :type token_user_principal_name: str - :param token_user_display_name: The user display name of the user that was used to obtain the - refresh token. Use this property to help remember which user was used to obtain the refresh - token. - :type token_user_display_name: str - """ - - _attribute_map = { - 'refresh_token': {'key': 'refreshToken', 'type': 'str'}, - 'token_user_principal_name': {'key': 'tokenUserPrincipalName', 'type': 'str'}, - 'token_user_display_name': {'key': 'tokenUserDisplayName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(OAuthBasedDataSourceProperties, self).__init__(**kwargs) - self.refresh_token = kwargs.get('refresh_token', None) - self.token_user_principal_name = kwargs.get('token_user_principal_name', None) - self.token_user_display_name = kwargs.get('token_user_display_name', None) - - -class AzureDataLakeStoreOutputDataSourceProperties(OAuthBasedDataSourceProperties): - """The properties that are associated with an Azure Data Lake Store. - - :param refresh_token: A refresh token that can be used to obtain a valid access token that can - then be used to authenticate with the data source. A valid refresh token is currently only - obtainable via the Azure Portal. It is recommended to put a dummy string value here when - creating the data source and then going to the Azure Portal to authenticate the data source - which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) - requests. - :type refresh_token: str - :param token_user_principal_name: The user principal name (UPN) of the user that was used to - obtain the refresh token. Use this property to help remember which user was used to obtain the - refresh token. - :type token_user_principal_name: str - :param token_user_display_name: The user display name of the user that was used to obtain the - refresh token. Use this property to help remember which user was used to obtain the refresh - token. - :type token_user_display_name: str - :param account_name: The name of the Azure Data Lake Store account. Required on PUT - (CreateOrReplace) requests. - :type account_name: str - :param tenant_id: The tenant id of the user used to obtain the refresh token. Required on PUT - (CreateOrReplace) requests. - :type tenant_id: str - :param file_path_prefix: The location of the file to which the output should be written to. - Required on PUT (CreateOrReplace) requests. - :type file_path_prefix: str - :param date_format: The date format. Wherever {date} appears in filePathPrefix, the value of - this property is used as the date format instead. - :type date_format: str - :param time_format: The time format. Wherever {time} appears in filePathPrefix, the value of - this property is used as the time format instead. - :type time_format: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", - "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - """ - - _attribute_map = { - 'refresh_token': {'key': 'refreshToken', 'type': 'str'}, - 'token_user_principal_name': {'key': 'tokenUserPrincipalName', 'type': 'str'}, - 'token_user_display_name': {'key': 'tokenUserDisplayName', 'type': 'str'}, - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - 'file_path_prefix': {'key': 'filePathPrefix', 'type': 'str'}, - 'date_format': {'key': 'dateFormat', 'type': 'str'}, - 'time_format': {'key': 'timeFormat', 'type': 'str'}, - 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureDataLakeStoreOutputDataSourceProperties, self).__init__(**kwargs) - self.account_name = kwargs.get('account_name', None) - self.tenant_id = kwargs.get('tenant_id', None) - self.file_path_prefix = kwargs.get('file_path_prefix', None) - self.date_format = kwargs.get('date_format', None) - self.time_format = kwargs.get('time_format', None) - self.authentication_mode = kwargs.get('authentication_mode', None) - - -class AzureFunctionOutputDataSource(OutputDataSource): - """Defines the metadata of AzureFunctionOutputDataSource. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of data source output will be written to. Required on - PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param function_app_name: The name of your Azure Functions app. - :type function_app_name: str - :param function_name: The name of the function in your Azure Functions app. - :type function_name: str - :param api_key: If you want to use an Azure Function from another subscription, you can do so - by providing the key to access your function. - :type api_key: str - :param max_batch_size: A property that lets you set the maximum size for each output batch - that's sent to your Azure function. The input unit is in bytes. By default, this value is - 262,144 bytes (256 KB). - :type max_batch_size: float - :param max_batch_count: A property that lets you specify the maximum number of events in each - batch that's sent to Azure Functions. The default value is 100. - :type max_batch_count: float - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'function_app_name': {'key': 'properties.functionAppName', 'type': 'str'}, - 'function_name': {'key': 'properties.functionName', 'type': 'str'}, - 'api_key': {'key': 'properties.apiKey', 'type': 'str'}, - 'max_batch_size': {'key': 'properties.maxBatchSize', 'type': 'float'}, - 'max_batch_count': {'key': 'properties.maxBatchCount', 'type': 'float'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureFunctionOutputDataSource, self).__init__(**kwargs) - self.type = 'Microsoft.AzureFunction' # type: str - self.function_app_name = kwargs.get('function_app_name', None) - self.function_name = kwargs.get('function_name', None) - self.api_key = kwargs.get('api_key', None) - self.max_batch_size = kwargs.get('max_batch_size', None) - self.max_batch_count = kwargs.get('max_batch_count', None) - - -class FunctionBinding(msrest.serialization.Model): - """The physical binding of the function. For example, in the Azure Machine Learning web service’s case, this describes the endpoint. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureMachineLearningStudioFunctionBinding, AzureMachineLearningServiceFunctionBinding, CSharpFunctionBinding, JavaScriptFunctionBinding. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the function binding type.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'Microsoft.MachineLearning/WebService': 'AzureMachineLearningStudioFunctionBinding', 'Microsoft.MachineLearningServices': 'AzureMachineLearningServiceFunctionBinding', 'Microsoft.StreamAnalytics/CLRUdf': 'CSharpFunctionBinding', 'Microsoft.StreamAnalytics/JavascriptUdf': 'JavaScriptFunctionBinding'} - } - - def __init__( - self, - **kwargs - ): - super(FunctionBinding, self).__init__(**kwargs) - self.type = None # type: Optional[str] - - -class AzureMachineLearningServiceFunctionBinding(FunctionBinding): - """The binding to an Azure Machine Learning web service. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the function binding type.Constant filled by server. - :type type: str - :param endpoint: The Request-Response execute endpoint of the Azure Machine Learning web - service. - :type endpoint: str - :param api_key: The API key used to authenticate with Request-Response endpoint. - :type api_key: str - :param inputs: The inputs for the Azure Machine Learning web service endpoint. - :type inputs: - list[~stream_analytics_management_client.models.AzureMachineLearningServiceInputColumn] - :param outputs: A list of outputs from the Azure Machine Learning web service endpoint - execution. - :type outputs: - list[~stream_analytics_management_client.models.AzureMachineLearningServiceOutputColumn] - :param batch_size: Number between 1 and 10000 describing maximum number of rows for every Azure - ML RRS execute request. Default is 1000. - :type batch_size: int - :param number_of_parallel_requests: The number of parallel requests that will be sent per - partition of your job to the machine learning service. Default is 1. - :type number_of_parallel_requests: int - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, - 'api_key': {'key': 'properties.apiKey', 'type': 'str'}, - 'inputs': {'key': 'properties.inputs', 'type': '[AzureMachineLearningServiceInputColumn]'}, - 'outputs': {'key': 'properties.outputs', 'type': '[AzureMachineLearningServiceOutputColumn]'}, - 'batch_size': {'key': 'properties.batchSize', 'type': 'int'}, - 'number_of_parallel_requests': {'key': 'properties.numberOfParallelRequests', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureMachineLearningServiceFunctionBinding, self).__init__(**kwargs) - self.type = 'Microsoft.MachineLearningServices' # type: str - self.endpoint = kwargs.get('endpoint', None) - self.api_key = kwargs.get('api_key', None) - self.inputs = kwargs.get('inputs', None) - self.outputs = kwargs.get('outputs', None) - self.batch_size = kwargs.get('batch_size', None) - self.number_of_parallel_requests = kwargs.get('number_of_parallel_requests', None) - - -class FunctionRetrieveDefaultDefinitionParameters(msrest.serialization.Model): - """Parameters used to specify the type of function to retrieve the default definition for. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters, AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters, CSharpFunctionRetrieveDefaultDefinitionParameters, JavaScriptFunctionRetrieveDefaultDefinitionParameters. - - All required parameters must be populated in order to send to Azure. - - :param binding_type: Required. Indicates the function binding type.Constant filled by server. - :type binding_type: str - """ - - _validation = { - 'binding_type': {'required': True}, - } - - _attribute_map = { - 'binding_type': {'key': 'bindingType', 'type': 'str'}, - } - - _subtype_map = { - 'binding_type': {'Microsoft.MachineLearning/WebService': 'AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters', 'Microsoft.MachineLearningServices': 'AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters', 'Microsoft.StreamAnalytics/CLRUdf': 'CSharpFunctionRetrieveDefaultDefinitionParameters', 'Microsoft.StreamAnalytics/JavascriptUdf': 'JavaScriptFunctionRetrieveDefaultDefinitionParameters'} - } - - def __init__( - self, - **kwargs - ): - super(FunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) - self.binding_type = None # type: Optional[str] - - -class AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): - """The parameters needed to retrieve the default function definition for an Azure Machine Learning web service function. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param binding_type: Required. Indicates the function binding type.Constant filled by server. - :type binding_type: str - :param execute_endpoint: The Request-Response execute endpoint of the Azure Machine Learning - web service. - :type execute_endpoint: str - :ivar udf_type: The function type. Default value: "Scalar". - :vartype udf_type: str - """ - - _validation = { - 'binding_type': {'required': True}, - 'udf_type': {'constant': True}, - } - - _attribute_map = { - 'binding_type': {'key': 'bindingType', 'type': 'str'}, - 'execute_endpoint': {'key': 'bindingRetrievalProperties.executeEndpoint', 'type': 'str'}, - 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, - } - - udf_type = "Scalar" - - def __init__( - self, - **kwargs - ): - super(AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) - self.binding_type = 'Microsoft.MachineLearningServices' # type: str - self.execute_endpoint = kwargs.get('execute_endpoint', None) - - -class AzureMachineLearningServiceInputColumn(msrest.serialization.Model): - """Describes an input column for the Azure Machine Learning web service endpoint. - - :param name: The name of the input column. - :type name: str - :param data_type: The (Azure Machine Learning supported) data type of the input column. - :type data_type: str - :param map_to: The zero based index of the function parameter this input maps to. - :type map_to: int - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'map_to': {'key': 'mapTo', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureMachineLearningServiceInputColumn, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.data_type = kwargs.get('data_type', None) - self.map_to = kwargs.get('map_to', None) - - -class AzureMachineLearningServiceInputs(msrest.serialization.Model): - """The inputs for the Azure Machine Learning web service endpoint. - - :param name: The name of the input. This is the name provided while authoring the endpoint. - :type name: str - :param column_names: A list of input columns for the Azure Machine Learning web service - endpoint. - :type column_names: - list[~stream_analytics_management_client.models.AzureMachineLearningServiceInputColumn] - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'column_names': {'key': 'columnNames', 'type': '[AzureMachineLearningServiceInputColumn]'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureMachineLearningServiceInputs, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.column_names = kwargs.get('column_names', None) - - -class AzureMachineLearningServiceOutputColumn(msrest.serialization.Model): - """Describes an output column for the Azure Machine Learning web service endpoint. - - :param name: The name of the output column. - :type name: str - :param data_type: The (Azure Machine Learning supported) data type of the output column. - :type data_type: str - :param map_to: The zero based index of the function parameter this input maps to. - :type map_to: int - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'map_to': {'key': 'mapTo', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureMachineLearningServiceOutputColumn, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.data_type = kwargs.get('data_type', None) - self.map_to = kwargs.get('map_to', None) - - -class AzureMachineLearningStudioFunctionBinding(FunctionBinding): - """The binding to an Azure Machine Learning Studio. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the function binding type.Constant filled by server. - :type type: str - :param endpoint: The Request-Response execute endpoint of the Azure Machine Learning Studio. - Find out more here: https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning- - consume-web-services#request-response-service-rrs. - :type endpoint: str - :param api_key: The API key used to authenticate with Request-Response endpoint. - :type api_key: str - :param inputs: The inputs for the Azure Machine Learning Studio endpoint. - :type inputs: ~stream_analytics_management_client.models.AzureMachineLearningStudioInputs - :param outputs: A list of outputs from the Azure Machine Learning Studio endpoint execution. - :type outputs: - list[~stream_analytics_management_client.models.AzureMachineLearningStudioOutputColumn] - :param batch_size: Number between 1 and 10000 describing maximum number of rows for every Azure - ML RRS execute request. Default is 1000. - :type batch_size: int - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, - 'api_key': {'key': 'properties.apiKey', 'type': 'str'}, - 'inputs': {'key': 'properties.inputs', 'type': 'AzureMachineLearningStudioInputs'}, - 'outputs': {'key': 'properties.outputs', 'type': '[AzureMachineLearningStudioOutputColumn]'}, - 'batch_size': {'key': 'properties.batchSize', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureMachineLearningStudioFunctionBinding, self).__init__(**kwargs) - self.type = 'Microsoft.MachineLearning/WebService' # type: str - self.endpoint = kwargs.get('endpoint', None) - self.api_key = kwargs.get('api_key', None) - self.inputs = kwargs.get('inputs', None) - self.outputs = kwargs.get('outputs', None) - self.batch_size = kwargs.get('batch_size', None) - - -class AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): - """The parameters needed to retrieve the default function definition for an Azure Machine Learning Studio function. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param binding_type: Required. Indicates the function binding type.Constant filled by server. - :type binding_type: str - :param execute_endpoint: The Request-Response execute endpoint of the Azure Machine Learning - Studio. Find out more here: https://docs.microsoft.com/en-us/azure/machine-learning/machine- - learning-consume-web-services#request-response-service-rrs. - :type execute_endpoint: str - :ivar udf_type: The function type. Default value: "Scalar". - :vartype udf_type: str - """ - - _validation = { - 'binding_type': {'required': True}, - 'udf_type': {'constant': True}, - } - - _attribute_map = { - 'binding_type': {'key': 'bindingType', 'type': 'str'}, - 'execute_endpoint': {'key': 'bindingRetrievalProperties.executeEndpoint', 'type': 'str'}, - 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, - } - - udf_type = "Scalar" - - def __init__( - self, - **kwargs - ): - super(AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) - self.binding_type = 'Microsoft.MachineLearning/WebService' # type: str - self.execute_endpoint = kwargs.get('execute_endpoint', None) - - -class AzureMachineLearningStudioInputColumn(msrest.serialization.Model): - """Describes an input column for the Azure Machine Learning Studio endpoint. - - :param name: The name of the input column. - :type name: str - :param data_type: The (Azure Machine Learning supported) data type of the input column. A list - of valid Azure Machine Learning data types are described at https://msdn.microsoft.com/en- - us/library/azure/dn905923.aspx . - :type data_type: str - :param map_to: The zero based index of the function parameter this input maps to. - :type map_to: int - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'map_to': {'key': 'mapTo', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureMachineLearningStudioInputColumn, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.data_type = kwargs.get('data_type', None) - self.map_to = kwargs.get('map_to', None) - - -class AzureMachineLearningStudioInputs(msrest.serialization.Model): - """The inputs for the Azure Machine Learning Studio endpoint. - - :param name: The name of the input. This is the name provided while authoring the endpoint. - :type name: str - :param column_names: A list of input columns for the Azure Machine Learning Studio endpoint. - :type column_names: - list[~stream_analytics_management_client.models.AzureMachineLearningStudioInputColumn] - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'column_names': {'key': 'columnNames', 'type': '[AzureMachineLearningStudioInputColumn]'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureMachineLearningStudioInputs, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.column_names = kwargs.get('column_names', None) - - -class AzureMachineLearningStudioOutputColumn(msrest.serialization.Model): - """Describes an output column for the Azure Machine Learning Studio endpoint. - - :param name: The name of the output column. - :type name: str - :param data_type: The (Azure Machine Learning supported) data type of the output column. A list - of valid Azure Machine Learning data types are described at https://msdn.microsoft.com/en- - us/library/azure/dn905923.aspx . - :type data_type: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureMachineLearningStudioOutputColumn, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.data_type = kwargs.get('data_type', None) - - -class AzureSqlDatabaseDataSourceProperties(msrest.serialization.Model): - """The properties that are associated with an Azure SQL database data source. - - :param server: The name of the SQL server containing the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :type server: str - :param database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) - requests. - :type database: str - :param user: The user name that will be used to connect to the Azure SQL database. Required on - PUT (CreateOrReplace) requests. - :type user: str - :param password: The password that will be used to connect to the Azure SQL database. Required - on PUT (CreateOrReplace) requests. - :type password: str - :param table: The name of the table in the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :type table: str - :param max_batch_count: Max Batch count for write to Sql database, the default value is 10,000. - Optional on PUT requests. - :type max_batch_count: float - :param max_writer_count: Max Write r count, currently only 1(single writer) and 0(based on - query partition) are available. Optional on PUT requests. - :type max_writer_count: float - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", - "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - """ - - _attribute_map = { - 'server': {'key': 'server', 'type': 'str'}, - 'database': {'key': 'database', 'type': 'str'}, - 'user': {'key': 'user', 'type': 'str'}, - 'password': {'key': 'password', 'type': 'str'}, - 'table': {'key': 'table', 'type': 'str'}, - 'max_batch_count': {'key': 'maxBatchCount', 'type': 'float'}, - 'max_writer_count': {'key': 'maxWriterCount', 'type': 'float'}, - 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureSqlDatabaseDataSourceProperties, self).__init__(**kwargs) - self.server = kwargs.get('server', None) - self.database = kwargs.get('database', None) - self.user = kwargs.get('user', None) - self.password = kwargs.get('password', None) - self.table = kwargs.get('table', None) - self.max_batch_count = kwargs.get('max_batch_count', None) - self.max_writer_count = kwargs.get('max_writer_count', None) - self.authentication_mode = kwargs.get('authentication_mode', None) - - -class AzureSqlDatabaseOutputDataSource(OutputDataSource): - """Describes an Azure SQL database output data source. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of data source output will be written to. Required on - PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param server: The name of the SQL server containing the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :type server: str - :param database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) - requests. - :type database: str - :param user: The user name that will be used to connect to the Azure SQL database. Required on - PUT (CreateOrReplace) requests. - :type user: str - :param password: The password that will be used to connect to the Azure SQL database. Required - on PUT (CreateOrReplace) requests. - :type password: str - :param table: The name of the table in the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :type table: str - :param max_batch_count: Max Batch count for write to Sql database, the default value is 10,000. - Optional on PUT requests. - :type max_batch_count: float - :param max_writer_count: Max Write r count, currently only 1(single writer) and 0(based on - query partition) are available. Optional on PUT requests. - :type max_writer_count: float - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", - "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'properties.server', 'type': 'str'}, - 'database': {'key': 'properties.database', 'type': 'str'}, - 'user': {'key': 'properties.user', 'type': 'str'}, - 'password': {'key': 'properties.password', 'type': 'str'}, - 'table': {'key': 'properties.table', 'type': 'str'}, - 'max_batch_count': {'key': 'properties.maxBatchCount', 'type': 'float'}, - 'max_writer_count': {'key': 'properties.maxWriterCount', 'type': 'float'}, - 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureSqlDatabaseOutputDataSource, self).__init__(**kwargs) - self.type = 'Microsoft.Sql/Server/Database' # type: str - self.server = kwargs.get('server', None) - self.database = kwargs.get('database', None) - self.user = kwargs.get('user', None) - self.password = kwargs.get('password', None) - self.table = kwargs.get('table', None) - self.max_batch_count = kwargs.get('max_batch_count', None) - self.max_writer_count = kwargs.get('max_writer_count', None) - self.authentication_mode = kwargs.get('authentication_mode', None) - - -class AzureSqlDatabaseOutputDataSourceProperties(AzureSqlDatabaseDataSourceProperties): - """The properties that are associated with an Azure SQL database output. - - :param server: The name of the SQL server containing the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :type server: str - :param database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) - requests. - :type database: str - :param user: The user name that will be used to connect to the Azure SQL database. Required on - PUT (CreateOrReplace) requests. - :type user: str - :param password: The password that will be used to connect to the Azure SQL database. Required - on PUT (CreateOrReplace) requests. - :type password: str - :param table: The name of the table in the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :type table: str - :param max_batch_count: Max Batch count for write to Sql database, the default value is 10,000. - Optional on PUT requests. - :type max_batch_count: float - :param max_writer_count: Max Write r count, currently only 1(single writer) and 0(based on - query partition) are available. Optional on PUT requests. - :type max_writer_count: float - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", - "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - """ - - _attribute_map = { - 'server': {'key': 'server', 'type': 'str'}, - 'database': {'key': 'database', 'type': 'str'}, - 'user': {'key': 'user', 'type': 'str'}, - 'password': {'key': 'password', 'type': 'str'}, - 'table': {'key': 'table', 'type': 'str'}, - 'max_batch_count': {'key': 'maxBatchCount', 'type': 'float'}, - 'max_writer_count': {'key': 'maxWriterCount', 'type': 'float'}, - 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureSqlDatabaseOutputDataSourceProperties, self).__init__(**kwargs) - - -class ReferenceInputDataSource(msrest.serialization.Model): - """Describes an input data source that contains reference data. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureSqlReferenceInputDataSource, BlobReferenceInputDataSource. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of input data source containing reference data. - Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'Microsoft.Sql/Server/Database': 'AzureSqlReferenceInputDataSource', 'Microsoft.Storage/Blob': 'BlobReferenceInputDataSource'} - } - - def __init__( - self, - **kwargs - ): - super(ReferenceInputDataSource, self).__init__(**kwargs) - self.type = None # type: Optional[str] - - -class AzureSqlReferenceInputDataSource(ReferenceInputDataSource): - """Describes an Azure SQL database reference input data source. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of input data source containing reference data. - Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param properties: - :type properties: - ~stream_analytics_management_client.models.AzureSqlReferenceInputDataSourceProperties - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'AzureSqlReferenceInputDataSourceProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureSqlReferenceInputDataSource, self).__init__(**kwargs) - self.type = 'Microsoft.Sql/Server/Database' # type: str - self.properties = kwargs.get('properties', None) - - -class AzureSqlReferenceInputDataSourceProperties(msrest.serialization.Model): - """AzureSqlReferenceInputDataSourceProperties. - - :param server: This element is associated with the datasource element. This is the name of the - server that contains the database that will be written to. - :type server: str - :param database: This element is associated with the datasource element. This is the name of - the database that output will be written to. - :type database: str - :param user: This element is associated with the datasource element. This is the user name that - will be used to connect to the SQL Database instance. - :type user: str - :param password: This element is associated with the datasource element. This is the password - that will be used to connect to the SQL Database instance. - :type password: str - :param table: This element is associated with the datasource element. The name of the table in - the Azure SQL database.. - :type table: str - :param refresh_type: This element is associated with the datasource element. This element is of - enum type. It indicates what kind of data refresh option do we want to - use:Static/RefreshPeriodicallyWithFull/RefreshPeriodicallyWithDelta. - :type refresh_type: str - :param refresh_rate: This element is associated with the datasource element. This indicates how - frequently the data will be fetched from the database. It is of DateTime format. - :type refresh_rate: str - :param full_snapshot_query: This element is associated with the datasource element. This query - is used to fetch data from the sql database. - :type full_snapshot_query: str - :param delta_snapshot_query: This element is associated with the datasource element. This query - is used to fetch incremental changes from the SQL database. To use this option, we recommend - using temporal tables in Azure SQL Database. - :type delta_snapshot_query: str - """ - - _attribute_map = { - 'server': {'key': 'server', 'type': 'str'}, - 'database': {'key': 'database', 'type': 'str'}, - 'user': {'key': 'user', 'type': 'str'}, - 'password': {'key': 'password', 'type': 'str'}, - 'table': {'key': 'table', 'type': 'str'}, - 'refresh_type': {'key': 'refreshType', 'type': 'str'}, - 'refresh_rate': {'key': 'refreshRate', 'type': 'str'}, - 'full_snapshot_query': {'key': 'fullSnapshotQuery', 'type': 'str'}, - 'delta_snapshot_query': {'key': 'deltaSnapshotQuery', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureSqlReferenceInputDataSourceProperties, self).__init__(**kwargs) - self.server = kwargs.get('server', None) - self.database = kwargs.get('database', None) - self.user = kwargs.get('user', None) - self.password = kwargs.get('password', None) - self.table = kwargs.get('table', None) - self.refresh_type = kwargs.get('refresh_type', None) - self.refresh_rate = kwargs.get('refresh_rate', None) - self.full_snapshot_query = kwargs.get('full_snapshot_query', None) - self.delta_snapshot_query = kwargs.get('delta_snapshot_query', None) - - -class AzureSynapseDataSourceProperties(msrest.serialization.Model): - """The properties that are associated with an Azure SQL database data source. - - :param server: The name of the SQL server containing the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :type server: str - :param database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) - requests. - :type database: str - :param table: The name of the table in the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :type table: str - :param user: The user name that will be used to connect to the Azure SQL database. Required on - PUT (CreateOrReplace) requests. - :type user: str - :param password: The password that will be used to connect to the Azure SQL database. Required - on PUT (CreateOrReplace) requests. - :type password: str - """ - - _attribute_map = { - 'server': {'key': 'server', 'type': 'str'}, - 'database': {'key': 'database', 'type': 'str'}, - 'table': {'key': 'table', 'type': 'str'}, - 'user': {'key': 'user', 'type': 'str'}, - 'password': {'key': 'password', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureSynapseDataSourceProperties, self).__init__(**kwargs) - self.server = kwargs.get('server', None) - self.database = kwargs.get('database', None) - self.table = kwargs.get('table', None) - self.user = kwargs.get('user', None) - self.password = kwargs.get('password', None) - - -class AzureSynapseOutputDataSource(OutputDataSource): - """Describes an Azure Synapse output data source. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of data source output will be written to. Required on - PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param server: The name of the SQL server containing the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :type server: str - :param database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) - requests. - :type database: str - :param table: The name of the table in the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :type table: str - :param user: The user name that will be used to connect to the Azure SQL database. Required on - PUT (CreateOrReplace) requests. - :type user: str - :param password: The password that will be used to connect to the Azure SQL database. Required - on PUT (CreateOrReplace) requests. - :type password: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'properties.server', 'type': 'str'}, - 'database': {'key': 'properties.database', 'type': 'str'}, - 'table': {'key': 'properties.table', 'type': 'str'}, - 'user': {'key': 'properties.user', 'type': 'str'}, - 'password': {'key': 'properties.password', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureSynapseOutputDataSource, self).__init__(**kwargs) - self.type = 'Microsoft.Sql/Server/DataWarehouse' # type: str - self.server = kwargs.get('server', None) - self.database = kwargs.get('database', None) - self.table = kwargs.get('table', None) - self.user = kwargs.get('user', None) - self.password = kwargs.get('password', None) - - -class AzureSynapseOutputDataSourceProperties(AzureSynapseDataSourceProperties): - """The properties that are associated with an Azure Synapse output. - - :param server: The name of the SQL server containing the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :type server: str - :param database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) - requests. - :type database: str - :param table: The name of the table in the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :type table: str - :param user: The user name that will be used to connect to the Azure SQL database. Required on - PUT (CreateOrReplace) requests. - :type user: str - :param password: The password that will be used to connect to the Azure SQL database. Required - on PUT (CreateOrReplace) requests. - :type password: str - """ - - _attribute_map = { - 'server': {'key': 'server', 'type': 'str'}, - 'database': {'key': 'database', 'type': 'str'}, - 'table': {'key': 'table', 'type': 'str'}, - 'user': {'key': 'user', 'type': 'str'}, - 'password': {'key': 'password', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureSynapseOutputDataSourceProperties, self).__init__(**kwargs) - - -class AzureTableOutputDataSource(OutputDataSource): - """Describes an Azure Table output data source. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of data source output will be written to. Required on - PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param account_name: The name of the Azure Storage account. Required on PUT (CreateOrReplace) - requests. - :type account_name: str - :param account_key: The account key for the Azure Storage account. Required on PUT - (CreateOrReplace) requests. - :type account_key: str - :param table: The name of the Azure Table. Required on PUT (CreateOrReplace) requests. - :type table: str - :param partition_key: This element indicates the name of a column from the SELECT statement in - the query that will be used as the partition key for the Azure Table. Required on PUT - (CreateOrReplace) requests. - :type partition_key: str - :param row_key: This element indicates the name of a column from the SELECT statement in the - query that will be used as the row key for the Azure Table. Required on PUT (CreateOrReplace) - requests. - :type row_key: str - :param columns_to_remove: If specified, each item in the array is the name of a column to - remove (if present) from output event entities. - :type columns_to_remove: list[str] - :param batch_size: The number of rows to write to the Azure Table at a time. - :type batch_size: int - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'account_name': {'key': 'properties.accountName', 'type': 'str'}, - 'account_key': {'key': 'properties.accountKey', 'type': 'str'}, - 'table': {'key': 'properties.table', 'type': 'str'}, - 'partition_key': {'key': 'properties.partitionKey', 'type': 'str'}, - 'row_key': {'key': 'properties.rowKey', 'type': 'str'}, - 'columns_to_remove': {'key': 'properties.columnsToRemove', 'type': '[str]'}, - 'batch_size': {'key': 'properties.batchSize', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureTableOutputDataSource, self).__init__(**kwargs) - self.type = 'Microsoft.Storage/Table' # type: str - self.account_name = kwargs.get('account_name', None) - self.account_key = kwargs.get('account_key', None) - self.table = kwargs.get('table', None) - self.partition_key = kwargs.get('partition_key', None) - self.row_key = kwargs.get('row_key', None) - self.columns_to_remove = kwargs.get('columns_to_remove', None) - self.batch_size = kwargs.get('batch_size', None) - - -class BlobDataSourceProperties(msrest.serialization.Model): - """The properties that are associated with a blob data source. - - :param storage_accounts: A list of one or more Azure Storage accounts. Required on PUT - (CreateOrReplace) requests. - :type storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] - :param container: The name of a container within the associated Storage account. This container - contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) - requests. - :type container: str - :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern - against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. - :type path_pattern: str - :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this - property is used as the date format instead. - :type date_format: str - :param time_format: The time format. Wherever {time} appears in pathPattern, the value of this - property is used as the time format instead. - :type time_format: str - """ - - _attribute_map = { - 'storage_accounts': {'key': 'storageAccounts', 'type': '[StorageAccount]'}, - 'container': {'key': 'container', 'type': 'str'}, - 'path_pattern': {'key': 'pathPattern', 'type': 'str'}, - 'date_format': {'key': 'dateFormat', 'type': 'str'}, - 'time_format': {'key': 'timeFormat', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(BlobDataSourceProperties, self).__init__(**kwargs) - self.storage_accounts = kwargs.get('storage_accounts', None) - self.container = kwargs.get('container', None) - self.path_pattern = kwargs.get('path_pattern', None) - self.date_format = kwargs.get('date_format', None) - self.time_format = kwargs.get('time_format', None) - - -class BlobOutputDataSource(OutputDataSource): - """Describes a blob output data source. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of data source output will be written to. Required on - PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param storage_accounts: A list of one or more Azure Storage accounts. Required on PUT - (CreateOrReplace) requests. - :type storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] - :param container: The name of a container within the associated Storage account. This container - contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) - requests. - :type container: str - :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern - against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. - :type path_pattern: str - :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this - property is used as the date format instead. - :type date_format: str - :param time_format: The time format. Wherever {time} appears in pathPattern, the value of this - property is used as the time format instead. - :type time_format: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", - "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'storage_accounts': {'key': 'properties.storageAccounts', 'type': '[StorageAccount]'}, - 'container': {'key': 'properties.container', 'type': 'str'}, - 'path_pattern': {'key': 'properties.pathPattern', 'type': 'str'}, - 'date_format': {'key': 'properties.dateFormat', 'type': 'str'}, - 'time_format': {'key': 'properties.timeFormat', 'type': 'str'}, - 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(BlobOutputDataSource, self).__init__(**kwargs) - self.type = 'Microsoft.Storage/Blob' # type: str - self.storage_accounts = kwargs.get('storage_accounts', None) - self.container = kwargs.get('container', None) - self.path_pattern = kwargs.get('path_pattern', None) - self.date_format = kwargs.get('date_format', None) - self.time_format = kwargs.get('time_format', None) - self.authentication_mode = kwargs.get('authentication_mode', None) - - -class BlobOutputDataSourceProperties(BlobDataSourceProperties): - """The properties that are associated with a blob output. - - :param storage_accounts: A list of one or more Azure Storage accounts. Required on PUT - (CreateOrReplace) requests. - :type storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] - :param container: The name of a container within the associated Storage account. This container - contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) - requests. - :type container: str - :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern - against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. - :type path_pattern: str - :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this - property is used as the date format instead. - :type date_format: str - :param time_format: The time format. Wherever {time} appears in pathPattern, the value of this - property is used as the time format instead. - :type time_format: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", - "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - """ - - _attribute_map = { - 'storage_accounts': {'key': 'storageAccounts', 'type': '[StorageAccount]'}, - 'container': {'key': 'container', 'type': 'str'}, - 'path_pattern': {'key': 'pathPattern', 'type': 'str'}, - 'date_format': {'key': 'dateFormat', 'type': 'str'}, - 'time_format': {'key': 'timeFormat', 'type': 'str'}, - 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(BlobOutputDataSourceProperties, self).__init__(**kwargs) - self.authentication_mode = kwargs.get('authentication_mode', None) - - -class BlobReferenceInputDataSource(ReferenceInputDataSource): - """Describes a blob input data source that contains reference data. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of input data source containing reference data. - Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param storage_accounts: A list of one or more Azure Storage accounts. Required on PUT - (CreateOrReplace) requests. - :type storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] - :param container: The name of a container within the associated Storage account. This container - contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) - requests. - :type container: str - :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern - against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. - :type path_pattern: str - :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this - property is used as the date format instead. - :type date_format: str - :param time_format: The time format. Wherever {time} appears in pathPattern, the value of this - property is used as the time format instead. - :type time_format: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'storage_accounts': {'key': 'properties.storageAccounts', 'type': '[StorageAccount]'}, - 'container': {'key': 'properties.container', 'type': 'str'}, - 'path_pattern': {'key': 'properties.pathPattern', 'type': 'str'}, - 'date_format': {'key': 'properties.dateFormat', 'type': 'str'}, - 'time_format': {'key': 'properties.timeFormat', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(BlobReferenceInputDataSource, self).__init__(**kwargs) - self.type = 'Microsoft.Storage/Blob' # type: str - self.storage_accounts = kwargs.get('storage_accounts', None) - self.container = kwargs.get('container', None) - self.path_pattern = kwargs.get('path_pattern', None) - self.date_format = kwargs.get('date_format', None) - self.time_format = kwargs.get('time_format', None) - - -class BlobReferenceInputDataSourceProperties(BlobDataSourceProperties): - """The properties that are associated with a blob input containing reference data. - - :param storage_accounts: A list of one or more Azure Storage accounts. Required on PUT - (CreateOrReplace) requests. - :type storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] - :param container: The name of a container within the associated Storage account. This container - contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) - requests. - :type container: str - :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern - against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. - :type path_pattern: str - :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this - property is used as the date format instead. - :type date_format: str - :param time_format: The time format. Wherever {time} appears in pathPattern, the value of this - property is used as the time format instead. - :type time_format: str - """ - - _attribute_map = { - 'storage_accounts': {'key': 'storageAccounts', 'type': '[StorageAccount]'}, - 'container': {'key': 'container', 'type': 'str'}, - 'path_pattern': {'key': 'pathPattern', 'type': 'str'}, - 'date_format': {'key': 'dateFormat', 'type': 'str'}, - 'time_format': {'key': 'timeFormat', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(BlobReferenceInputDataSourceProperties, self).__init__(**kwargs) - - -class StreamInputDataSource(msrest.serialization.Model): - """Describes an input data source that contains stream data. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: IoTHubStreamInputDataSource, EventHubV2StreamInputDataSource, EventHubStreamInputDataSource, BlobStreamInputDataSource. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of input data source containing stream data. Required - on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'Microsoft.Devices/IotHubs': 'IoTHubStreamInputDataSource', 'Microsoft.EventHub/EventHub': 'EventHubV2StreamInputDataSource', 'Microsoft.ServiceBus/EventHub': 'EventHubStreamInputDataSource', 'Microsoft.Storage/Blob': 'BlobStreamInputDataSource'} - } - - def __init__( - self, - **kwargs - ): - super(StreamInputDataSource, self).__init__(**kwargs) - self.type = None # type: Optional[str] - - -class BlobStreamInputDataSource(StreamInputDataSource): - """Describes a blob input data source that contains stream data. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of input data source containing stream data. Required - on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param storage_accounts: A list of one or more Azure Storage accounts. Required on PUT - (CreateOrReplace) requests. - :type storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] - :param container: The name of a container within the associated Storage account. This container - contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) - requests. - :type container: str - :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern - against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. - :type path_pattern: str - :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this - property is used as the date format instead. - :type date_format: str - :param time_format: The time format. Wherever {time} appears in pathPattern, the value of this - property is used as the time format instead. - :type time_format: str - :param source_partition_count: The partition count of the blob input data source. Range 1 - - 256. - :type source_partition_count: int - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'storage_accounts': {'key': 'properties.storageAccounts', 'type': '[StorageAccount]'}, - 'container': {'key': 'properties.container', 'type': 'str'}, - 'path_pattern': {'key': 'properties.pathPattern', 'type': 'str'}, - 'date_format': {'key': 'properties.dateFormat', 'type': 'str'}, - 'time_format': {'key': 'properties.timeFormat', 'type': 'str'}, - 'source_partition_count': {'key': 'properties.sourcePartitionCount', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(BlobStreamInputDataSource, self).__init__(**kwargs) - self.type = 'Microsoft.Storage/Blob' # type: str - self.storage_accounts = kwargs.get('storage_accounts', None) - self.container = kwargs.get('container', None) - self.path_pattern = kwargs.get('path_pattern', None) - self.date_format = kwargs.get('date_format', None) - self.time_format = kwargs.get('time_format', None) - self.source_partition_count = kwargs.get('source_partition_count', None) - - -class BlobStreamInputDataSourceProperties(BlobDataSourceProperties): - """The properties that are associated with a blob input containing stream data. - - :param storage_accounts: A list of one or more Azure Storage accounts. Required on PUT - (CreateOrReplace) requests. - :type storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] - :param container: The name of a container within the associated Storage account. This container - contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) - requests. - :type container: str - :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern - against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. - :type path_pattern: str - :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this - property is used as the date format instead. - :type date_format: str - :param time_format: The time format. Wherever {time} appears in pathPattern, the value of this - property is used as the time format instead. - :type time_format: str - :param source_partition_count: The partition count of the blob input data source. Range 1 - - 256. - :type source_partition_count: int - """ - - _attribute_map = { - 'storage_accounts': {'key': 'storageAccounts', 'type': '[StorageAccount]'}, - 'container': {'key': 'container', 'type': 'str'}, - 'path_pattern': {'key': 'pathPattern', 'type': 'str'}, - 'date_format': {'key': 'dateFormat', 'type': 'str'}, - 'time_format': {'key': 'timeFormat', 'type': 'str'}, - 'source_partition_count': {'key': 'sourcePartitionCount', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(BlobStreamInputDataSourceProperties, self).__init__(**kwargs) - self.source_partition_count = kwargs.get('source_partition_count', None) - - -class Resource(msrest.serialization.Model): - """Resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource Id for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or - Microsoft.Storage/storageAccounts. - :vartype type: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(Resource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - - -class TrackedResource(Resource): - """The resource model definition for a ARM tracked top level resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource Id for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or - Microsoft.Storage/storageAccounts. - :vartype type: str - :param tags: A set of tags. Resource tags. - :type tags: dict[str, str] - :param location: The geo-location where the resource lives. - :type location: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(TrackedResource, self).__init__(**kwargs) - self.tags = kwargs.get('tags', None) - self.location = kwargs.get('location', None) - - -class Cluster(TrackedResource): - """A Stream Analytics Cluster object. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource Id for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or - Microsoft.Storage/storageAccounts. - :vartype type: str - :param tags: A set of tags. Resource tags. - :type tags: dict[str, str] - :param location: The geo-location where the resource lives. - :type location: str - :param sku: The SKU of the cluster. This determines the size/capacity of the cluster. Required - on PUT (CreateOrUpdate) requests. - :type sku: ~stream_analytics_management_client.models.ClusterSku - :ivar etag: The current entity tag for the cluster. This is an opaque string. You can use it to - detect whether the resource has changed between requests. You can also use it in the If-Match - or If-None-Match headers for write operations for optimistic concurrency. - :vartype etag: str - :param properties: The properties associated with a Stream Analytics cluster. - :type properties: ~stream_analytics_management_client.models.ClusterProperties - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'sku': {'key': 'sku', 'type': 'ClusterSku'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'ClusterProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(Cluster, self).__init__(**kwargs) - self.sku = kwargs.get('sku', None) - self.etag = None - self.properties = kwargs.get('properties', None) - - -class ClusterInfo(msrest.serialization.Model): - """The properties associated with a Stream Analytics cluster. - - :param id: The resource id of cluster. - :type id: str - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ClusterInfo, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - - -class ClusterJob(msrest.serialization.Model): - """A streaming job. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Resource ID of the streaming job. - :vartype id: str - :ivar streaming_units: The number of streaming units that are used by the streaming job. - :vartype streaming_units: int - :ivar job_state: The current execution state of the streaming job. Possible values include: - "Created", "Starting", "Running", "Stopping", "Stopped", "Deleting", "Failed", "Degraded", - "Restarting", "Scaling". - :vartype job_state: str or ~stream_analytics_management_client.models.JobState - """ - - _validation = { - 'id': {'readonly': True}, - 'streaming_units': {'readonly': True}, - 'job_state': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'streaming_units': {'key': 'streamingUnits', 'type': 'int'}, - 'job_state': {'key': 'jobState', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ClusterJob, self).__init__(**kwargs) - self.id = None - self.streaming_units = None - self.job_state = None - - -class ClusterJobListResult(msrest.serialization.Model): - """A list of streaming jobs. Populated by a List operation. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: A list of streaming jobs. - :vartype value: list[~stream_analytics_management_client.models.ClusterJob] - :ivar next_link: The URL to fetch the next set of streaming jobs. - :vartype next_link: str - """ - - _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[ClusterJob]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ClusterJobListResult, self).__init__(**kwargs) - self.value = None - self.next_link = None - - -class ClusterListResult(msrest.serialization.Model): - """A list of clusters populated by a 'list' operation. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: A list of clusters. - :vartype value: list[~stream_analytics_management_client.models.Cluster] - :ivar next_link: The URL to fetch the next set of clusters. - :vartype next_link: str - """ - - _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[Cluster]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ClusterListResult, self).__init__(**kwargs) - self.value = None - self.next_link = None - - -class ClusterProperties(msrest.serialization.Model): - """The properties associated with a Stream Analytics cluster. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar created_date: The date this cluster was created. - :vartype created_date: ~datetime.datetime - :ivar cluster_id: Unique identifier for the cluster. - :vartype cluster_id: str - :ivar provisioning_state: The status of the cluster provisioning. The three terminal states - are: Succeeded, Failed and Canceled. Possible values include: "Succeeded", "Failed", - "Canceled", "InProgress". - :vartype provisioning_state: str or - ~stream_analytics_management_client.models.ClusterProvisioningState - :ivar capacity_allocated: Represents the number of streaming units currently being used on the - cluster. - :vartype capacity_allocated: int - :ivar capacity_assigned: Represents the sum of the SUs of all streaming jobs associated with - the cluster. If all of the jobs were running, this would be the capacity allocated. - :vartype capacity_assigned: int - """ - - _validation = { - 'created_date': {'readonly': True}, - 'cluster_id': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - 'capacity_allocated': {'readonly': True}, - 'capacity_assigned': {'readonly': True}, - } - - _attribute_map = { - 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, - 'cluster_id': {'key': 'clusterId', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'capacity_allocated': {'key': 'capacityAllocated', 'type': 'int'}, - 'capacity_assigned': {'key': 'capacityAssigned', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(ClusterProperties, self).__init__(**kwargs) - self.created_date = None - self.cluster_id = None - self.provisioning_state = None - self.capacity_allocated = None - self.capacity_assigned = None - - -class ClusterSku(msrest.serialization.Model): - """The SKU of the cluster. This determines the size/capacity of the cluster. Required on PUT (CreateOrUpdate) requests. - - :param name: Specifies the SKU name of the cluster. Required on PUT (CreateOrUpdate) requests. - Possible values include: "Default". - :type name: str or ~stream_analytics_management_client.models.ClusterSkuName - :param capacity: Denotes the number of streaming units the cluster can support. Valid values - for this property are multiples of 36 with a minimum value of 36 and maximum value of 216. - Required on PUT (CreateOrUpdate) requests. - :type capacity: int - """ - - _validation = { - 'capacity': {'maximum': 216, 'minimum': 36}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'capacity': {'key': 'capacity', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(ClusterSku, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.capacity = kwargs.get('capacity', None) - - -class Compression(msrest.serialization.Model): - """Describes how input data is compressed. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(Compression, self).__init__(**kwargs) - self.type = kwargs['type'] - - -class CSharpFunctionBinding(FunctionBinding): - """The binding to a CSharp function. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the function binding type.Constant filled by server. - :type type: str - :param script: The Csharp code containing a single function definition. - :type script: str - :param dll_path: The Csharp code containing a single function definition. - :type dll_path: str - :param class_property: The Csharp code containing a single function definition. - :type class_property: str - :param method: The Csharp code containing a single function definition. - :type method: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'script': {'key': 'properties.script', 'type': 'str'}, - 'dll_path': {'key': 'properties.dllPath', 'type': 'str'}, - 'class_property': {'key': 'properties.class', 'type': 'str'}, - 'method': {'key': 'properties.method', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(CSharpFunctionBinding, self).__init__(**kwargs) - self.type = 'Microsoft.StreamAnalytics/CLRUdf' # type: str - self.script = kwargs.get('script', None) - self.dll_path = kwargs.get('dll_path', None) - self.class_property = kwargs.get('class_property', None) - self.method = kwargs.get('method', None) - - -class CSharpFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): - """The parameters needed to retrieve the default function definition for a CSharp function. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param binding_type: Required. Indicates the function binding type.Constant filled by server. - :type binding_type: str - :param script: The CSharp code containing a single function definition. - :type script: str - :ivar udf_type: The function type. Default value: "Scalar". - :vartype udf_type: str - """ - - _validation = { - 'binding_type': {'required': True}, - 'udf_type': {'constant': True}, - } - - _attribute_map = { - 'binding_type': {'key': 'bindingType', 'type': 'str'}, - 'script': {'key': 'bindingRetrievalProperties.script', 'type': 'str'}, - 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, - } - - udf_type = "Scalar" - - def __init__( - self, - **kwargs - ): - super(CSharpFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) - self.binding_type = 'Microsoft.StreamAnalytics/CLRUdf' # type: str - self.script = kwargs.get('script', None) - - -class CsvSerialization(Serialization): - """Describes how data from an input is serialized or how data is serialized when written to an output in CSV format. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of serialization that the input or output uses. - Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: - "Csv", "Avro", "Json", "CustomClr", "Parquet". - :type type: str or ~stream_analytics_management_client.models.EventSerializationType - :param field_delimiter: Specifies the delimiter that will be used to separate comma-separated - value (CSV) records. See https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream- - analytics-input or https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics- - output for a list of supported values. Required on PUT (CreateOrReplace) requests. - :type field_delimiter: str - :param encoding: Specifies the encoding of the incoming data in the case of input and the - encoding of outgoing data in the case of output. Required on PUT (CreateOrReplace) requests. - Possible values include: "UTF8". - :type encoding: str or ~stream_analytics_management_client.models.Encoding - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'field_delimiter': {'key': 'properties.fieldDelimiter', 'type': 'str'}, - 'encoding': {'key': 'properties.encoding', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(CsvSerialization, self).__init__(**kwargs) - self.type = 'Csv' # type: str - self.field_delimiter = kwargs.get('field_delimiter', None) - self.encoding = kwargs.get('encoding', None) - - -class CustomClrSerialization(Serialization): - """Describes how data from an input is serialized or how data is serialized when written to an output in custom format. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of serialization that the input or output uses. - Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: - "Csv", "Avro", "Json", "CustomClr", "Parquet". - :type type: str or ~stream_analytics_management_client.models.EventSerializationType - :param serialization_dll_path: The serialization library path. - :type serialization_dll_path: str - :param serialization_class_name: The serialization class name. - :type serialization_class_name: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'serialization_dll_path': {'key': 'properties.serializationDllPath', 'type': 'str'}, - 'serialization_class_name': {'key': 'properties.serializationClassName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(CustomClrSerialization, self).__init__(**kwargs) - self.type = 'CustomClr' # type: str - self.serialization_dll_path = kwargs.get('serialization_dll_path', None) - self.serialization_class_name = kwargs.get('serialization_class_name', None) - - -class DiagnosticCondition(msrest.serialization.Model): - """Condition applicable to the resource, or to the job overall, that warrant customer attention. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar since: The UTC timestamp of when the condition started. Customers should be able to find - a corresponding event in the ops log around this time. - :vartype since: str - :ivar code: The opaque diagnostic code. - :vartype code: str - :ivar message: The human-readable message describing the condition in detail. Localized in the - Accept-Language of the client request. - :vartype message: str - """ - - _validation = { - 'since': {'readonly': True}, - 'code': {'readonly': True}, - 'message': {'readonly': True}, - } - - _attribute_map = { - 'since': {'key': 'since', 'type': 'str'}, - 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DiagnosticCondition, self).__init__(**kwargs) - self.since = None - self.code = None - self.message = None - - -class Diagnostics(msrest.serialization.Model): - """Describes conditions applicable to the Input, Output, or the job overall, that warrant customer attention. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar conditions: A collection of zero or more conditions applicable to the resource, or to the - job overall, that warrant customer attention. - :vartype conditions: list[~stream_analytics_management_client.models.DiagnosticCondition] - """ - - _validation = { - 'conditions': {'readonly': True}, - } - - _attribute_map = { - 'conditions': {'key': 'conditions', 'type': '[DiagnosticCondition]'}, - } - - def __init__( - self, - **kwargs - ): - super(Diagnostics, self).__init__(**kwargs) - self.conditions = None - - -class DocumentDbOutputDataSource(OutputDataSource): - """Describes a DocumentDB output data source. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of data source output will be written to. Required on - PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param account_id: The DocumentDB account name or ID. Required on PUT (CreateOrReplace) - requests. - :type account_id: str - :param account_key: The account key for the DocumentDB account. Required on PUT - (CreateOrReplace) requests. - :type account_key: str - :param database: The name of the DocumentDB database. Required on PUT (CreateOrReplace) - requests. - :type database: str - :param collection_name_pattern: The collection name pattern for the collections to be used. The - collection name format can be constructed using the optional {partition} token, where - partitions start from 0. See the DocumentDB section of https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for more information. Required on PUT - (CreateOrReplace) requests. - :type collection_name_pattern: str - :param partition_key: The name of the field in output events used to specify the key for - partitioning output across collections. If 'collectionNamePattern' contains the {partition} - token, this property is required to be specified. - :type partition_key: str - :param document_id: The name of the field in output events used to specify the primary key - which insert or update operations are based on. - :type document_id: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'account_id': {'key': 'properties.accountId', 'type': 'str'}, - 'account_key': {'key': 'properties.accountKey', 'type': 'str'}, - 'database': {'key': 'properties.database', 'type': 'str'}, - 'collection_name_pattern': {'key': 'properties.collectionNamePattern', 'type': 'str'}, - 'partition_key': {'key': 'properties.partitionKey', 'type': 'str'}, - 'document_id': {'key': 'properties.documentId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DocumentDbOutputDataSource, self).__init__(**kwargs) - self.type = 'Microsoft.Storage/DocumentDB' # type: str - self.account_id = kwargs.get('account_id', None) - self.account_key = kwargs.get('account_key', None) - self.database = kwargs.get('database', None) - self.collection_name_pattern = kwargs.get('collection_name_pattern', None) - self.partition_key = kwargs.get('partition_key', None) - self.document_id = kwargs.get('document_id', None) - - -class Error(msrest.serialization.Model): - """Common error representation. - - :param error: Error definition properties. - :type error: ~stream_analytics_management_client.models.ErrorAutoGenerated - """ - - _attribute_map = { - 'error': {'key': 'error', 'type': 'ErrorAutoGenerated'}, - } - - def __init__( - self, - **kwargs - ): - super(Error, self).__init__(**kwargs) - self.error = kwargs.get('error', None) - - -class ErrorAutoGenerated(msrest.serialization.Model): - """Error definition properties. - - :param code: Error code. - :type code: str - :param message: Error message. - :type message: str - :param target: Error target. - :type target: str - :param details: Error details. - :type details: list[~stream_analytics_management_client.models.ErrorDetails] - """ - - _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - 'target': {'key': 'target', 'type': 'str'}, - 'details': {'key': 'details', 'type': '[ErrorDetails]'}, - } - - def __init__( - self, - **kwargs - ): - super(ErrorAutoGenerated, self).__init__(**kwargs) - self.code = kwargs.get('code', None) - self.message = kwargs.get('message', None) - self.target = kwargs.get('target', None) - self.details = kwargs.get('details', None) - - -class ErrorDetails(msrest.serialization.Model): - """Common error details representation. - - :param code: Error code. - :type code: str - :param target: Error target. - :type target: str - :param message: Error message. - :type message: str - """ - - _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'target': {'key': 'target', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ErrorDetails, self).__init__(**kwargs) - self.code = kwargs.get('code', None) - self.target = kwargs.get('target', None) - self.message = kwargs.get('message', None) - - -class ErrorResponse(msrest.serialization.Model): - """Describes the error that occurred. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar code: Error code associated with the error that occurred. - :vartype code: str - :ivar message: Describes the error in detail. - :vartype message: str - """ - - _validation = { - 'code': {'readonly': True}, - 'message': {'readonly': True}, - } - - _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ErrorResponse, self).__init__(**kwargs) - self.code = None - self.message = None - - -class ServiceBusDataSourceProperties(msrest.serialization.Model): - """The common properties that are associated with Service Bus data sources (Queues, Topics, Event Hubs, etc.). - - :param service_bus_namespace: The namespace that is associated with the desired Event Hub, - Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type service_bus_namespace: str - :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus - Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_name: str - :param shared_access_policy_key: The shared access policy key for the specified shared access - policy. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_key: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", - "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - """ - - _attribute_map = { - 'service_bus_namespace': {'key': 'serviceBusNamespace', 'type': 'str'}, - 'shared_access_policy_name': {'key': 'sharedAccessPolicyName', 'type': 'str'}, - 'shared_access_policy_key': {'key': 'sharedAccessPolicyKey', 'type': 'str'}, - 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ServiceBusDataSourceProperties, self).__init__(**kwargs) - self.service_bus_namespace = kwargs.get('service_bus_namespace', None) - self.shared_access_policy_name = kwargs.get('shared_access_policy_name', None) - self.shared_access_policy_key = kwargs.get('shared_access_policy_key', None) - self.authentication_mode = kwargs.get('authentication_mode', None) - - -class EventHubDataSourceProperties(ServiceBusDataSourceProperties): - """The common properties that are associated with Event Hub data sources. - - :param service_bus_namespace: The namespace that is associated with the desired Event Hub, - Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type service_bus_namespace: str - :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus - Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_name: str - :param shared_access_policy_key: The shared access policy key for the specified shared access - policy. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_key: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", - "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - :param event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. - :type event_hub_name: str - """ - - _attribute_map = { - 'service_bus_namespace': {'key': 'serviceBusNamespace', 'type': 'str'}, - 'shared_access_policy_name': {'key': 'sharedAccessPolicyName', 'type': 'str'}, - 'shared_access_policy_key': {'key': 'sharedAccessPolicyKey', 'type': 'str'}, - 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, - 'event_hub_name': {'key': 'eventHubName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(EventHubDataSourceProperties, self).__init__(**kwargs) - self.event_hub_name = kwargs.get('event_hub_name', None) - - -class EventHubOutputDataSource(OutputDataSource): - """Describes an Event Hub output data source. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of data source output will be written to. Required on - PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param service_bus_namespace: The namespace that is associated with the desired Event Hub, - Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type service_bus_namespace: str - :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus - Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_name: str - :param shared_access_policy_key: The shared access policy key for the specified shared access - policy. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_key: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", - "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - :param event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. - :type event_hub_name: str - :param partition_key: The key/column that is used to determine to which partition to send event - data. - :type partition_key: str - :param property_columns: - :type property_columns: list[str] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, - 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, - 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, - 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, - 'event_hub_name': {'key': 'properties.eventHubName', 'type': 'str'}, - 'partition_key': {'key': 'properties.partitionKey', 'type': 'str'}, - 'property_columns': {'key': 'properties.propertyColumns', 'type': '[str]'}, - } - - def __init__( - self, - **kwargs - ): - super(EventHubOutputDataSource, self).__init__(**kwargs) - self.type = 'Microsoft.ServiceBus/EventHub' # type: str - self.service_bus_namespace = kwargs.get('service_bus_namespace', None) - self.shared_access_policy_name = kwargs.get('shared_access_policy_name', None) - self.shared_access_policy_key = kwargs.get('shared_access_policy_key', None) - self.authentication_mode = kwargs.get('authentication_mode', None) - self.event_hub_name = kwargs.get('event_hub_name', None) - self.partition_key = kwargs.get('partition_key', None) - self.property_columns = kwargs.get('property_columns', None) - - -class EventHubOutputDataSourceProperties(EventHubDataSourceProperties): - """The properties that are associated with an Event Hub output. - - :param service_bus_namespace: The namespace that is associated with the desired Event Hub, - Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type service_bus_namespace: str - :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus - Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_name: str - :param shared_access_policy_key: The shared access policy key for the specified shared access - policy. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_key: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", - "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - :param event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. - :type event_hub_name: str - :param partition_key: The key/column that is used to determine to which partition to send event - data. - :type partition_key: str - :param property_columns: - :type property_columns: list[str] - """ - - _attribute_map = { - 'service_bus_namespace': {'key': 'serviceBusNamespace', 'type': 'str'}, - 'shared_access_policy_name': {'key': 'sharedAccessPolicyName', 'type': 'str'}, - 'shared_access_policy_key': {'key': 'sharedAccessPolicyKey', 'type': 'str'}, - 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, - 'event_hub_name': {'key': 'eventHubName', 'type': 'str'}, - 'partition_key': {'key': 'partitionKey', 'type': 'str'}, - 'property_columns': {'key': 'propertyColumns', 'type': '[str]'}, - } - - def __init__( - self, - **kwargs - ): - super(EventHubOutputDataSourceProperties, self).__init__(**kwargs) - self.partition_key = kwargs.get('partition_key', None) - self.property_columns = kwargs.get('property_columns', None) - - -class EventHubStreamInputDataSource(StreamInputDataSource): - """Describes an Event Hub input data source that contains stream data. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of input data source containing stream data. Required - on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param service_bus_namespace: The namespace that is associated with the desired Event Hub, - Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type service_bus_namespace: str - :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus - Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_name: str - :param shared_access_policy_key: The shared access policy key for the specified shared access - policy. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_key: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", - "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - :param event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. - :type event_hub_name: str - :param consumer_group_name: The name of an Event Hub Consumer Group that should be used to read - events from the Event Hub. Specifying distinct consumer group names for multiple inputs allows - each of those inputs to receive the same events from the Event Hub. If not specified, the input - uses the Event Hub’s default consumer group. - :type consumer_group_name: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, - 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, - 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, - 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, - 'event_hub_name': {'key': 'properties.eventHubName', 'type': 'str'}, - 'consumer_group_name': {'key': 'properties.consumerGroupName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(EventHubStreamInputDataSource, self).__init__(**kwargs) - self.type = 'Microsoft.ServiceBus/EventHub' # type: str - self.service_bus_namespace = kwargs.get('service_bus_namespace', None) - self.shared_access_policy_name = kwargs.get('shared_access_policy_name', None) - self.shared_access_policy_key = kwargs.get('shared_access_policy_key', None) - self.authentication_mode = kwargs.get('authentication_mode', None) - self.event_hub_name = kwargs.get('event_hub_name', None) - self.consumer_group_name = kwargs.get('consumer_group_name', None) - - -class EventHubStreamInputDataSourceProperties(EventHubDataSourceProperties): - """The properties that are associated with a Event Hub input containing stream data. - - :param service_bus_namespace: The namespace that is associated with the desired Event Hub, - Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type service_bus_namespace: str - :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus - Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_name: str - :param shared_access_policy_key: The shared access policy key for the specified shared access - policy. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_key: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", - "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - :param event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. - :type event_hub_name: str - :param consumer_group_name: The name of an Event Hub Consumer Group that should be used to read - events from the Event Hub. Specifying distinct consumer group names for multiple inputs allows - each of those inputs to receive the same events from the Event Hub. If not specified, the input - uses the Event Hub’s default consumer group. - :type consumer_group_name: str - """ - - _attribute_map = { - 'service_bus_namespace': {'key': 'serviceBusNamespace', 'type': 'str'}, - 'shared_access_policy_name': {'key': 'sharedAccessPolicyName', 'type': 'str'}, - 'shared_access_policy_key': {'key': 'sharedAccessPolicyKey', 'type': 'str'}, - 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, - 'event_hub_name': {'key': 'eventHubName', 'type': 'str'}, - 'consumer_group_name': {'key': 'consumerGroupName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(EventHubStreamInputDataSourceProperties, self).__init__(**kwargs) - self.consumer_group_name = kwargs.get('consumer_group_name', None) - - -class EventHubV2OutputDataSource(OutputDataSource): - """Describes an Event Hub output data source. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of data source output will be written to. Required on - PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param service_bus_namespace: The namespace that is associated with the desired Event Hub, - Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type service_bus_namespace: str - :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus - Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_name: str - :param shared_access_policy_key: The shared access policy key for the specified shared access - policy. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_key: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", - "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - :param event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. - :type event_hub_name: str - :param partition_key: The key/column that is used to determine to which partition to send event - data. - :type partition_key: str - :param property_columns: - :type property_columns: list[str] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, - 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, - 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, - 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, - 'event_hub_name': {'key': 'properties.eventHubName', 'type': 'str'}, - 'partition_key': {'key': 'properties.partitionKey', 'type': 'str'}, - 'property_columns': {'key': 'properties.propertyColumns', 'type': '[str]'}, - } - - def __init__( - self, - **kwargs - ): - super(EventHubV2OutputDataSource, self).__init__(**kwargs) - self.type = 'Microsoft.EventHub/EventHub' # type: str - self.service_bus_namespace = kwargs.get('service_bus_namespace', None) - self.shared_access_policy_name = kwargs.get('shared_access_policy_name', None) - self.shared_access_policy_key = kwargs.get('shared_access_policy_key', None) - self.authentication_mode = kwargs.get('authentication_mode', None) - self.event_hub_name = kwargs.get('event_hub_name', None) - self.partition_key = kwargs.get('partition_key', None) - self.property_columns = kwargs.get('property_columns', None) - - -class EventHubV2StreamInputDataSource(StreamInputDataSource): - """Describes an Event Hub input data source that contains stream data. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of input data source containing stream data. Required - on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param service_bus_namespace: The namespace that is associated with the desired Event Hub, - Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type service_bus_namespace: str - :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus - Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_name: str - :param shared_access_policy_key: The shared access policy key for the specified shared access - policy. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_key: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", - "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - :param event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. - :type event_hub_name: str - :param consumer_group_name: The name of an Event Hub Consumer Group that should be used to read - events from the Event Hub. Specifying distinct consumer group names for multiple inputs allows - each of those inputs to receive the same events from the Event Hub. If not specified, the input - uses the Event Hub’s default consumer group. - :type consumer_group_name: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, - 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, - 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, - 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, - 'event_hub_name': {'key': 'properties.eventHubName', 'type': 'str'}, - 'consumer_group_name': {'key': 'properties.consumerGroupName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(EventHubV2StreamInputDataSource, self).__init__(**kwargs) - self.type = 'Microsoft.EventHub/EventHub' # type: str - self.service_bus_namespace = kwargs.get('service_bus_namespace', None) - self.shared_access_policy_name = kwargs.get('shared_access_policy_name', None) - self.shared_access_policy_key = kwargs.get('shared_access_policy_key', None) - self.authentication_mode = kwargs.get('authentication_mode', None) - self.event_hub_name = kwargs.get('event_hub_name', None) - self.consumer_group_name = kwargs.get('consumer_group_name', None) - - -class External(msrest.serialization.Model): - """The storage account where the custom code artifacts are located. - - :param storage_account: The properties that are associated with an Azure Storage account. - :type storage_account: ~stream_analytics_management_client.models.StorageAccount - :param container: - :type container: str - :param path: - :type path: str - """ - - _attribute_map = { - 'storage_account': {'key': 'storageAccount', 'type': 'StorageAccount'}, - 'container': {'key': 'container', 'type': 'str'}, - 'path': {'key': 'path', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(External, self).__init__(**kwargs) - self.storage_account = kwargs.get('storage_account', None) - self.container = kwargs.get('container', None) - self.path = kwargs.get('path', None) - - -class SubResource(msrest.serialization.Model): - """The base sub-resource model definition. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Resource Id. - :vartype id: str - :param name: Resource name. - :type name: str - :ivar type: Resource type. - :vartype type: str - """ - - _validation = { - 'id': {'readonly': True}, - 'type': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SubResource, self).__init__(**kwargs) - self.id = None - self.name = kwargs.get('name', None) - self.type = None - - -class Function(SubResource): - """A function object, containing all information associated with the named function. All functions are contained under a streaming job. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Resource Id. - :vartype id: str - :param name: Resource name. - :type name: str - :ivar type: Resource type. - :vartype type: str - :param properties: The properties that are associated with a function. - :type properties: ~stream_analytics_management_client.models.FunctionProperties - """ - - _validation = { - 'id': {'readonly': True}, - 'type': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'FunctionProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(Function, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - - -class FunctionInput(msrest.serialization.Model): - """Describes one input parameter of a function. - - :param data_type: The (Azure Stream Analytics supported) data type of the function input - parameter. A list of valid Azure Stream Analytics data types are described at - https://msdn.microsoft.com/en-us/library/azure/dn835065.aspx. - :type data_type: str - :param is_configuration_parameter: A flag indicating if the parameter is a configuration - parameter. True if this input parameter is expected to be a constant. Default is false. - :type is_configuration_parameter: bool - """ - - _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'is_configuration_parameter': {'key': 'isConfigurationParameter', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - super(FunctionInput, self).__init__(**kwargs) - self.data_type = kwargs.get('data_type', None) - self.is_configuration_parameter = kwargs.get('is_configuration_parameter', None) - - -class FunctionListResult(msrest.serialization.Model): - """Object containing a list of functions under a streaming job. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: A list of functions under a streaming job. Populated by a 'List' operation. - :vartype value: list[~stream_analytics_management_client.models.Function] - :ivar next_link: The link (url) to the next page of results. - :vartype next_link: str - """ - - _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[Function]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(FunctionListResult, self).__init__(**kwargs) - self.value = None - self.next_link = None - - -class FunctionOutput(msrest.serialization.Model): - """Describes the output of a function. - - :param data_type: The (Azure Stream Analytics supported) data type of the function output. A - list of valid Azure Stream Analytics data types are described at https://msdn.microsoft.com/en- - us/library/azure/dn835065.aspx. - :type data_type: str - """ - - _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(FunctionOutput, self).__init__(**kwargs) - self.data_type = kwargs.get('data_type', None) - - -class Identity(msrest.serialization.Model): - """Describes how identity is verified. - - :param tenant_id: - :type tenant_id: str - :param principal_id: - :type principal_id: str - :param type: - :type type: str - """ - - _attribute_map = { - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(Identity, self).__init__(**kwargs) - self.tenant_id = kwargs.get('tenant_id', None) - self.principal_id = kwargs.get('principal_id', None) - self.type = kwargs.get('type', None) - - -class Input(SubResource): - """An input object, containing all information associated with the named input. All inputs are contained under a streaming job. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Resource Id. - :vartype id: str - :param name: Resource name. - :type name: str - :ivar type: Resource type. - :vartype type: str - :param properties: The properties that are associated with an input. Required on PUT - (CreateOrReplace) requests. - :type properties: ~stream_analytics_management_client.models.InputProperties - """ - - _validation = { - 'id': {'readonly': True}, - 'type': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'InputProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(Input, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - - -class InputListResult(msrest.serialization.Model): - """Object containing a list of inputs under a streaming job. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: A list of inputs under a streaming job. Populated by a 'List' operation. - :vartype value: list[~stream_analytics_management_client.models.Input] - :ivar next_link: The link (url) to the next page of results. - :vartype next_link: str - """ - - _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[Input]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(InputListResult, self).__init__(**kwargs) - self.value = None - self.next_link = None - - -class InputProperties(msrest.serialization.Model): - """The properties that are associated with an input. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ReferenceInputProperties, StreamInputProperties. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates whether the input is a source of reference data or stream - data. Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param serialization: Describes how data from an input is serialized or how data is serialized - when written to an output. Required on PUT (CreateOrReplace) requests. - :type serialization: ~stream_analytics_management_client.models.Serialization - :ivar diagnostics: Describes conditions applicable to the Input, Output, or the job overall, - that warrant customer attention. - :vartype diagnostics: ~stream_analytics_management_client.models.Diagnostics - :ivar etag: The current entity tag for the input. This is an opaque string. You can use it to - detect whether the resource has changed between requests. You can also use it in the If-Match - or If-None-Match headers for write operations for optimistic concurrency. - :vartype etag: str - :param compression: Describes how input data is compressed. - :type compression: ~stream_analytics_management_client.models.Compression - :param partition_key: partitionKey Describes a key in the input data which is used for - partitioning the input data. - :type partition_key: str - """ - - _validation = { - 'type': {'required': True}, - 'diagnostics': {'readonly': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'serialization': {'key': 'serialization', 'type': 'Serialization'}, - 'diagnostics': {'key': 'diagnostics', 'type': 'Diagnostics'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'compression': {'key': 'compression', 'type': 'Compression'}, - 'partition_key': {'key': 'partitionKey', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'Reference': 'ReferenceInputProperties', 'Stream': 'StreamInputProperties'} - } - - def __init__( - self, - **kwargs - ): - super(InputProperties, self).__init__(**kwargs) - self.type = None # type: Optional[str] - self.serialization = kwargs.get('serialization', None) - self.diagnostics = None - self.etag = None - self.compression = kwargs.get('compression', None) - self.partition_key = kwargs.get('partition_key', None) - - -class IoTHubStreamInputDataSource(StreamInputDataSource): - """Describes an IoT Hub input data source that contains stream data. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of input data source containing stream data. Required - on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param iot_hub_namespace: The name or the URI of the IoT Hub. Required on PUT (CreateOrReplace) - requests. - :type iot_hub_namespace: str - :param shared_access_policy_name: The shared access policy name for the IoT Hub. This policy - must contain at least the Service connect permission. Required on PUT (CreateOrReplace) - requests. - :type shared_access_policy_name: str - :param shared_access_policy_key: The shared access policy key for the specified shared access - policy. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_key: str - :param consumer_group_name: The name of an IoT Hub Consumer Group that should be used to read - events from the IoT Hub. If not specified, the input uses the Iot Hub’s default consumer group. - :type consumer_group_name: str - :param endpoint: The IoT Hub endpoint to connect to (ie. messages/events, - messages/operationsMonitoringEvents, etc.). - :type endpoint: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'iot_hub_namespace': {'key': 'properties.iotHubNamespace', 'type': 'str'}, - 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, - 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, - 'consumer_group_name': {'key': 'properties.consumerGroupName', 'type': 'str'}, - 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(IoTHubStreamInputDataSource, self).__init__(**kwargs) - self.type = 'Microsoft.Devices/IotHubs' # type: str - self.iot_hub_namespace = kwargs.get('iot_hub_namespace', None) - self.shared_access_policy_name = kwargs.get('shared_access_policy_name', None) - self.shared_access_policy_key = kwargs.get('shared_access_policy_key', None) - self.consumer_group_name = kwargs.get('consumer_group_name', None) - self.endpoint = kwargs.get('endpoint', None) - - -class JavaScriptFunctionBinding(FunctionBinding): - """The binding to a JavaScript function. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the function binding type.Constant filled by server. - :type type: str - :param script: The JavaScript code containing a single function definition. For example: - 'function (x, y) { return x + y; }'. - :type script: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'script': {'key': 'properties.script', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(JavaScriptFunctionBinding, self).__init__(**kwargs) - self.type = 'Microsoft.StreamAnalytics/JavascriptUdf' # type: str - self.script = kwargs.get('script', None) - - -class JavaScriptFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): - """The parameters needed to retrieve the default function definition for a JavaScript function. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param binding_type: Required. Indicates the function binding type.Constant filled by server. - :type binding_type: str - :param script: The JavaScript code containing a single function definition. For example: - 'function (x, y) { return x + y; }'. - :type script: str - :ivar udf_type: The function type. Default value: "Scalar". - :vartype udf_type: str - """ - - _validation = { - 'binding_type': {'required': True}, - 'udf_type': {'constant': True}, - } - - _attribute_map = { - 'binding_type': {'key': 'bindingType', 'type': 'str'}, - 'script': {'key': 'bindingRetrievalProperties.script', 'type': 'str'}, - 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, - } - - udf_type = "Scalar" - - def __init__( - self, - **kwargs - ): - super(JavaScriptFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) - self.binding_type = 'Microsoft.StreamAnalytics/JavascriptUdf' # type: str - self.script = kwargs.get('script', None) - - -class StorageAccount(msrest.serialization.Model): - """The properties that are associated with an Azure Storage account. - - :param account_name: The name of the Azure Storage account. Required on PUT (CreateOrReplace) - requests. - :type account_name: str - :param account_key: The account key for the Azure Storage account. Required on PUT - (CreateOrReplace) requests. - :type account_key: str - """ - - _attribute_map = { - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'account_key': {'key': 'accountKey', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(StorageAccount, self).__init__(**kwargs) - self.account_name = kwargs.get('account_name', None) - self.account_key = kwargs.get('account_key', None) - - -class JobStorageAccount(StorageAccount): - """The properties that are associated with an Azure Storage account with MSI. - - :param account_name: The name of the Azure Storage account. Required on PUT (CreateOrReplace) - requests. - :type account_name: str - :param account_key: The account key for the Azure Storage account. Required on PUT - (CreateOrReplace) requests. - :type account_key: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", - "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - """ - - _attribute_map = { - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'account_key': {'key': 'accountKey', 'type': 'str'}, - 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(JobStorageAccount, self).__init__(**kwargs) - self.authentication_mode = kwargs.get('authentication_mode', None) - - -class JsonSerialization(Serialization): - """Describes how data from an input is serialized or how data is serialized when written to an output in JSON format. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of serialization that the input or output uses. - Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: - "Csv", "Avro", "Json", "CustomClr", "Parquet". - :type type: str or ~stream_analytics_management_client.models.EventSerializationType - :param encoding: Specifies the encoding of the incoming data in the case of input and the - encoding of outgoing data in the case of output. Required on PUT (CreateOrReplace) requests. - Possible values include: "UTF8". - :type encoding: str or ~stream_analytics_management_client.models.Encoding - :param format: This property only applies to JSON serialization of outputs only. It is not - applicable to inputs. This property specifies the format of the JSON the output will be written - in. The currently supported values are 'lineSeparated' indicating the output will be formatted - by having each JSON object separated by a new line and 'array' indicating the output will be - formatted as an array of JSON objects. Default value is 'lineSeparated' if left null. Possible - values include: "LineSeparated", "Array". - :type format: str or ~stream_analytics_management_client.models.JsonOutputSerializationFormat - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'encoding': {'key': 'properties.encoding', 'type': 'str'}, - 'format': {'key': 'properties.format', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(JsonSerialization, self).__init__(**kwargs) - self.type = 'Json' # type: str - self.encoding = kwargs.get('encoding', None) - self.format = kwargs.get('format', None) - - -class Operation(msrest.serialization.Model): - """A Stream Analytics REST API operation. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar name: The name of the operation being performed on this particular object. - :vartype name: str - :ivar display: Contains the localized display information for this particular operation / - action. - :vartype display: ~stream_analytics_management_client.models.OperationDisplay - """ - - _validation = { - 'name': {'readonly': True}, - 'display': {'readonly': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display': {'key': 'display', 'type': 'OperationDisplay'}, - } - - def __init__( - self, - **kwargs - ): - super(Operation, self).__init__(**kwargs) - self.name = None - self.display = None - - -class OperationDisplay(msrest.serialization.Model): - """Contains the localized display information for this particular operation / action. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar provider: The localized friendly form of the resource provider name. - :vartype provider: str - :ivar resource: The localized friendly form of the resource type related to this - action/operation. - :vartype resource: str - :ivar operation: The localized friendly name for the operation. - :vartype operation: str - :ivar description: The localized friendly description for the operation. - :vartype description: str - """ - - _validation = { - 'provider': {'readonly': True}, - 'resource': {'readonly': True}, - 'operation': {'readonly': True}, - 'description': {'readonly': True}, - } - - _attribute_map = { - 'provider': {'key': 'provider', 'type': 'str'}, - 'resource': {'key': 'resource', 'type': 'str'}, - 'operation': {'key': 'operation', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(OperationDisplay, self).__init__(**kwargs) - self.provider = None - self.resource = None - self.operation = None - self.description = None - - -class OperationListResult(msrest.serialization.Model): - """Result of the request to list Stream Analytics operations. It contains a list of operations and a URL link to get the next set of results. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: List of Stream Analytics operations supported by the Microsoft.StreamAnalytics - resource provider. - :vartype value: list[~stream_analytics_management_client.models.Operation] - :ivar next_link: URL to get the next set of operation list results if there are any. - :vartype next_link: str - """ - - _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[Operation]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(OperationListResult, self).__init__(**kwargs) - self.value = None - self.next_link = None - - -class Output(SubResource): - """An output object, containing all information associated with the named output. All outputs are contained under a streaming job. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Resource Id. - :vartype id: str - :param name: Resource name. - :type name: str - :ivar type: Resource type. - :vartype type: str - :param datasource: Describes the data source that output will be written to. Required on PUT - (CreateOrReplace) requests. - :type datasource: ~stream_analytics_management_client.models.OutputDataSource - :param time_window: - :type time_window: str - :param size_window: - :type size_window: float - :param serialization: Describes how data from an input is serialized or how data is serialized - when written to an output. Required on PUT (CreateOrReplace) requests. - :type serialization: ~stream_analytics_management_client.models.Serialization - :ivar diagnostics: Describes conditions applicable to the Input, Output, or the job overall, - that warrant customer attention. - :vartype diagnostics: ~stream_analytics_management_client.models.Diagnostics - :ivar etag: The current entity tag for the output. This is an opaque string. You can use it to - detect whether the resource has changed between requests. You can also use it in the If-Match - or If-None-Match headers for write operations for optimistic concurrency. - :vartype etag: str - """ - - _validation = { - 'id': {'readonly': True}, - 'type': {'readonly': True}, - 'diagnostics': {'readonly': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'datasource': {'key': 'properties.datasource', 'type': 'OutputDataSource'}, - 'time_window': {'key': 'properties.timeWindow', 'type': 'str'}, - 'size_window': {'key': 'properties.sizeWindow', 'type': 'float'}, - 'serialization': {'key': 'properties.serialization', 'type': 'Serialization'}, - 'diagnostics': {'key': 'properties.diagnostics', 'type': 'Diagnostics'}, - 'etag': {'key': 'properties.etag', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(Output, self).__init__(**kwargs) - self.datasource = kwargs.get('datasource', None) - self.time_window = kwargs.get('time_window', None) - self.size_window = kwargs.get('size_window', None) - self.serialization = kwargs.get('serialization', None) - self.diagnostics = None - self.etag = None - - -class OutputListResult(msrest.serialization.Model): - """Object containing a list of outputs under a streaming job. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: A list of outputs under a streaming job. Populated by a 'List' operation. - :vartype value: list[~stream_analytics_management_client.models.Output] - :ivar next_link: The link (url) to the next page of results. - :vartype next_link: str - """ - - _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[Output]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(OutputListResult, self).__init__(**kwargs) - self.value = None - self.next_link = None - - -class ParquetSerialization(Serialization): - """Describes how data from an input is serialized or how data is serialized when written to an output in Parquet format. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of serialization that the input or output uses. - Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: - "Csv", "Avro", "Json", "CustomClr", "Parquet". - :type type: str or ~stream_analytics_management_client.models.EventSerializationType - :param properties: The properties that are associated with the Parquet serialization type. - Required on PUT (CreateOrReplace) requests. - :type properties: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(ParquetSerialization, self).__init__(**kwargs) - self.type = 'Parquet' # type: str - self.properties = kwargs.get('properties', None) - - -class PowerBIOutputDataSource(OutputDataSource): - """Describes a Power BI output data source. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of data source output will be written to. Required on - PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param refresh_token: A refresh token that can be used to obtain a valid access token that can - then be used to authenticate with the data source. A valid refresh token is currently only - obtainable via the Azure Portal. It is recommended to put a dummy string value here when - creating the data source and then going to the Azure Portal to authenticate the data source - which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) - requests. - :type refresh_token: str - :param token_user_principal_name: The user principal name (UPN) of the user that was used to - obtain the refresh token. Use this property to help remember which user was used to obtain the - refresh token. - :type token_user_principal_name: str - :param token_user_display_name: The user display name of the user that was used to obtain the - refresh token. Use this property to help remember which user was used to obtain the refresh - token. - :type token_user_display_name: str - :param dataset: The name of the Power BI dataset. Required on PUT (CreateOrReplace) requests. - :type dataset: str - :param table: The name of the Power BI table under the specified dataset. Required on PUT - (CreateOrReplace) requests. - :type table: str - :param group_id: The ID of the Power BI group. - :type group_id: str - :param group_name: The name of the Power BI group. Use this property to help remember which - specific Power BI group id was used. - :type group_name: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", - "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'refresh_token': {'key': 'properties.refreshToken', 'type': 'str'}, - 'token_user_principal_name': {'key': 'properties.tokenUserPrincipalName', 'type': 'str'}, - 'token_user_display_name': {'key': 'properties.tokenUserDisplayName', 'type': 'str'}, - 'dataset': {'key': 'properties.dataset', 'type': 'str'}, - 'table': {'key': 'properties.table', 'type': 'str'}, - 'group_id': {'key': 'properties.groupId', 'type': 'str'}, - 'group_name': {'key': 'properties.groupName', 'type': 'str'}, - 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PowerBIOutputDataSource, self).__init__(**kwargs) - self.type = 'PowerBI' # type: str - self.refresh_token = kwargs.get('refresh_token', None) - self.token_user_principal_name = kwargs.get('token_user_principal_name', None) - self.token_user_display_name = kwargs.get('token_user_display_name', None) - self.dataset = kwargs.get('dataset', None) - self.table = kwargs.get('table', None) - self.group_id = kwargs.get('group_id', None) - self.group_name = kwargs.get('group_name', None) - self.authentication_mode = kwargs.get('authentication_mode', None) - - -class PowerBIOutputDataSourceProperties(OAuthBasedDataSourceProperties): - """The properties that are associated with a Power BI output. - - :param refresh_token: A refresh token that can be used to obtain a valid access token that can - then be used to authenticate with the data source. A valid refresh token is currently only - obtainable via the Azure Portal. It is recommended to put a dummy string value here when - creating the data source and then going to the Azure Portal to authenticate the data source - which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) - requests. - :type refresh_token: str - :param token_user_principal_name: The user principal name (UPN) of the user that was used to - obtain the refresh token. Use this property to help remember which user was used to obtain the - refresh token. - :type token_user_principal_name: str - :param token_user_display_name: The user display name of the user that was used to obtain the - refresh token. Use this property to help remember which user was used to obtain the refresh - token. - :type token_user_display_name: str - :param dataset: The name of the Power BI dataset. Required on PUT (CreateOrReplace) requests. - :type dataset: str - :param table: The name of the Power BI table under the specified dataset. Required on PUT - (CreateOrReplace) requests. - :type table: str - :param group_id: The ID of the Power BI group. - :type group_id: str - :param group_name: The name of the Power BI group. Use this property to help remember which - specific Power BI group id was used. - :type group_name: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", - "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - """ - - _attribute_map = { - 'refresh_token': {'key': 'refreshToken', 'type': 'str'}, - 'token_user_principal_name': {'key': 'tokenUserPrincipalName', 'type': 'str'}, - 'token_user_display_name': {'key': 'tokenUserDisplayName', 'type': 'str'}, - 'dataset': {'key': 'dataset', 'type': 'str'}, - 'table': {'key': 'table', 'type': 'str'}, - 'group_id': {'key': 'groupId', 'type': 'str'}, - 'group_name': {'key': 'groupName', 'type': 'str'}, - 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PowerBIOutputDataSourceProperties, self).__init__(**kwargs) - self.dataset = kwargs.get('dataset', None) - self.table = kwargs.get('table', None) - self.group_id = kwargs.get('group_id', None) - self.group_name = kwargs.get('group_name', None) - self.authentication_mode = kwargs.get('authentication_mode', None) - - -class PrivateEndpoint(Resource): - """Complete information about the private endpoint. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource Id for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or - Microsoft.Storage/storageAccounts. - :vartype type: str - :param properties: The properties associated with a private endpoint. - :type properties: ~stream_analytics_management_client.models.PrivateEndpointProperties - :ivar etag: Unique opaque string (generally a GUID) that represents the metadata state of the - resource (private endpoint) and changes whenever the resource is updated. Required on PUT - (CreateOrUpdate) requests. - :vartype etag: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'PrivateEndpointProperties'}, - 'etag': {'key': 'etag', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateEndpoint, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - self.etag = None - - -class PrivateEndpointListResult(msrest.serialization.Model): - """A list of private endpoints. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: A list of private endpoints. - :vartype value: list[~stream_analytics_management_client.models.PrivateEndpoint] - :ivar next_link: The URL to fetch the next set of private endpoints. - :vartype next_link: str - """ - - _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[PrivateEndpoint]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateEndpointListResult, self).__init__(**kwargs) - self.value = None - self.next_link = None - - -class PrivateEndpointProperties(msrest.serialization.Model): - """The properties associated with a private endpoint. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar created_date: The date when this private endpoint was created. - :vartype created_date: str - :param manual_private_link_service_connections: A list of connections to the remote resource. - Immutable after it is set. - :type manual_private_link_service_connections: - list[~stream_analytics_management_client.models.PrivateLinkServiceConnection] - """ - - _validation = { - 'created_date': {'readonly': True}, - } - - _attribute_map = { - 'created_date': {'key': 'createdDate', 'type': 'str'}, - 'manual_private_link_service_connections': {'key': 'manualPrivateLinkServiceConnections', 'type': '[PrivateLinkServiceConnection]'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateEndpointProperties, self).__init__(**kwargs) - self.created_date = None - self.manual_private_link_service_connections = kwargs.get('manual_private_link_service_connections', None) - - -class PrivateLinkConnectionState(msrest.serialization.Model): - """A collection of read-only information about the state of the connection to the private remote resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar status: Indicates whether the connection has been Approved/Rejected/Removed by the owner - of the remote resource/service. - :vartype status: str - :ivar description: The reason for approval/rejection of the connection. - :vartype description: str - :ivar actions_required: A message indicating if changes on the service provider require any - updates on the consumer. - :vartype actions_required: str - """ - - _validation = { - 'status': {'readonly': True}, - 'description': {'readonly': True}, - 'actions_required': {'readonly': True}, - } - - _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateLinkConnectionState, self).__init__(**kwargs) - self.status = None - self.description = None - self.actions_required = None - - -class PrivateLinkServiceConnection(msrest.serialization.Model): - """A grouping of information about the connection to the remote resource. - - :param private_link_service_id: The resource id of the private link service. Required on PUT - (CreateOrUpdate) requests. - :type private_link_service_id: str - :param group_ids: The ID(s) of the group(s) obtained from the remote resource that this private - endpoint should connect to. Required on PUT (CreateOrUpdate) requests. - :type group_ids: list[str] - :param request_message: A message passed to the owner of the remote resource with this - connection request. Restricted to 140 chars. - :type request_message: str - :param private_link_service_connection_state: A collection of read-only information about the - state of the connection to the private remote resource. - :type private_link_service_connection_state: - ~stream_analytics_management_client.models.PrivateLinkConnectionState - """ - - _attribute_map = { - 'private_link_service_id': {'key': 'properties.privateLinkServiceId', 'type': 'str'}, - 'group_ids': {'key': 'properties.groupIds', 'type': '[str]'}, - 'request_message': {'key': 'properties.requestMessage', 'type': 'str'}, - 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkConnectionState'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateLinkServiceConnection, self).__init__(**kwargs) - self.private_link_service_id = kwargs.get('private_link_service_id', None) - self.group_ids = kwargs.get('group_ids', None) - self.request_message = kwargs.get('request_message', None) - self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None) - - -class ProxyResource(Resource): - """The resource model definition for a ARM proxy resource. It will have everything other than required location and tags. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource Id for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or - Microsoft.Storage/storageAccounts. - :vartype type: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ProxyResource, self).__init__(**kwargs) - - -class ReferenceInputProperties(InputProperties): - """The properties that are associated with an input containing reference data. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates whether the input is a source of reference data or stream - data. Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param serialization: Describes how data from an input is serialized or how data is serialized - when written to an output. Required on PUT (CreateOrReplace) requests. - :type serialization: ~stream_analytics_management_client.models.Serialization - :ivar diagnostics: Describes conditions applicable to the Input, Output, or the job overall, - that warrant customer attention. - :vartype diagnostics: ~stream_analytics_management_client.models.Diagnostics - :ivar etag: The current entity tag for the input. This is an opaque string. You can use it to - detect whether the resource has changed between requests. You can also use it in the If-Match - or If-None-Match headers for write operations for optimistic concurrency. - :vartype etag: str - :param compression: Describes how input data is compressed. - :type compression: ~stream_analytics_management_client.models.Compression - :param partition_key: partitionKey Describes a key in the input data which is used for - partitioning the input data. - :type partition_key: str - :param datasource: Describes an input data source that contains reference data. Required on PUT - (CreateOrReplace) requests. - :type datasource: ~stream_analytics_management_client.models.ReferenceInputDataSource - """ - - _validation = { - 'type': {'required': True}, - 'diagnostics': {'readonly': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'serialization': {'key': 'serialization', 'type': 'Serialization'}, - 'diagnostics': {'key': 'diagnostics', 'type': 'Diagnostics'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'compression': {'key': 'compression', 'type': 'Compression'}, - 'partition_key': {'key': 'partitionKey', 'type': 'str'}, - 'datasource': {'key': 'datasource', 'type': 'ReferenceInputDataSource'}, - } - - def __init__( - self, - **kwargs - ): - super(ReferenceInputProperties, self).__init__(**kwargs) - self.type = 'Reference' # type: str - self.datasource = kwargs.get('datasource', None) - - -class ResourceTestStatus(msrest.serialization.Model): - """Describes the status of the test operation along with error information, if applicable. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar status: The status of the test operation. - :vartype status: str - :ivar error: Describes the error that occurred. - :vartype error: ~stream_analytics_management_client.models.ErrorResponse - """ - - _validation = { - 'status': {'readonly': True}, - 'error': {'readonly': True}, - } - - _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'ErrorResponse'}, - } - - def __init__( - self, - **kwargs - ): - super(ResourceTestStatus, self).__init__(**kwargs) - self.status = None - self.error = None - - -class ScalarFunctionProperties(FunctionProperties): - """The properties that are associated with a scalar function. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of function.Constant filled by server. - :type type: str - :ivar etag: The current entity tag for the function. This is an opaque string. You can use it - to detect whether the resource has changed between requests. You can also use it in the If- - Match or If-None-Match headers for write operations for optimistic concurrency. - :vartype etag: str - :param inputs: - :type inputs: list[~stream_analytics_management_client.models.FunctionInput] - :param output: Describes the output of a function. - :type output: ~stream_analytics_management_client.models.FunctionOutput - :param binding: The physical binding of the function. For example, in the Azure Machine - Learning web service’s case, this describes the endpoint. - :type binding: ~stream_analytics_management_client.models.FunctionBinding - """ - - _validation = { - 'type': {'required': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'inputs': {'key': 'properties.inputs', 'type': '[FunctionInput]'}, - 'output': {'key': 'properties.output', 'type': 'FunctionOutput'}, - 'binding': {'key': 'properties.binding', 'type': 'FunctionBinding'}, - } - - def __init__( - self, - **kwargs - ): - super(ScalarFunctionProperties, self).__init__(**kwargs) - self.type = 'Scalar' # type: str - - -class ServiceBusQueueOutputDataSource(OutputDataSource): - """Describes a Service Bus Queue output data source. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of data source output will be written to. Required on - PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param service_bus_namespace: The namespace that is associated with the desired Event Hub, - Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type service_bus_namespace: str - :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus - Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_name: str - :param shared_access_policy_key: The shared access policy key for the specified shared access - policy. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_key: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", - "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - :param queue_name: The name of the Service Bus Queue. Required on PUT (CreateOrReplace) - requests. - :type queue_name: str - :param property_columns: A string array of the names of output columns to be attached to - Service Bus messages as custom properties. - :type property_columns: list[str] - :param system_property_columns: Dictionary of :code:``. - :type system_property_columns: dict[str, str] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, - 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, - 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, - 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, - 'queue_name': {'key': 'properties.queueName', 'type': 'str'}, - 'property_columns': {'key': 'properties.propertyColumns', 'type': '[str]'}, - 'system_property_columns': {'key': 'properties.systemPropertyColumns', 'type': '{str}'}, - } - - def __init__( - self, - **kwargs - ): - super(ServiceBusQueueOutputDataSource, self).__init__(**kwargs) - self.type = 'Microsoft.ServiceBus/Queue' # type: str - self.service_bus_namespace = kwargs.get('service_bus_namespace', None) - self.shared_access_policy_name = kwargs.get('shared_access_policy_name', None) - self.shared_access_policy_key = kwargs.get('shared_access_policy_key', None) - self.authentication_mode = kwargs.get('authentication_mode', None) - self.queue_name = kwargs.get('queue_name', None) - self.property_columns = kwargs.get('property_columns', None) - self.system_property_columns = kwargs.get('system_property_columns', None) - - -class ServiceBusQueueOutputDataSourceProperties(ServiceBusDataSourceProperties): - """The properties that are associated with a Service Bus Queue output. - - :param service_bus_namespace: The namespace that is associated with the desired Event Hub, - Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type service_bus_namespace: str - :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus - Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_name: str - :param shared_access_policy_key: The shared access policy key for the specified shared access - policy. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_key: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", - "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - :param queue_name: The name of the Service Bus Queue. Required on PUT (CreateOrReplace) - requests. - :type queue_name: str - :param property_columns: A string array of the names of output columns to be attached to - Service Bus messages as custom properties. - :type property_columns: list[str] - :param system_property_columns: Dictionary of :code:``. - :type system_property_columns: dict[str, str] - """ - - _attribute_map = { - 'service_bus_namespace': {'key': 'serviceBusNamespace', 'type': 'str'}, - 'shared_access_policy_name': {'key': 'sharedAccessPolicyName', 'type': 'str'}, - 'shared_access_policy_key': {'key': 'sharedAccessPolicyKey', 'type': 'str'}, - 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, - 'queue_name': {'key': 'queueName', 'type': 'str'}, - 'property_columns': {'key': 'propertyColumns', 'type': '[str]'}, - 'system_property_columns': {'key': 'systemPropertyColumns', 'type': '{str}'}, - } - - def __init__( - self, - **kwargs - ): - super(ServiceBusQueueOutputDataSourceProperties, self).__init__(**kwargs) - self.queue_name = kwargs.get('queue_name', None) - self.property_columns = kwargs.get('property_columns', None) - self.system_property_columns = kwargs.get('system_property_columns', None) - - -class ServiceBusTopicOutputDataSource(OutputDataSource): - """Describes a Service Bus Topic output data source. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of data source output will be written to. Required on - PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param service_bus_namespace: The namespace that is associated with the desired Event Hub, - Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type service_bus_namespace: str - :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus - Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_name: str - :param shared_access_policy_key: The shared access policy key for the specified shared access - policy. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_key: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", - "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - :param topic_name: The name of the Service Bus Topic. Required on PUT (CreateOrReplace) - requests. - :type topic_name: str - :param property_columns: A string array of the names of output columns to be attached to - Service Bus messages as custom properties. - :type property_columns: list[str] - :param system_property_columns: Dictionary of :code:``. - :type system_property_columns: dict[str, str] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, - 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, - 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, - 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, - 'topic_name': {'key': 'properties.topicName', 'type': 'str'}, - 'property_columns': {'key': 'properties.propertyColumns', 'type': '[str]'}, - 'system_property_columns': {'key': 'properties.systemPropertyColumns', 'type': '{str}'}, - } - - def __init__( - self, - **kwargs - ): - super(ServiceBusTopicOutputDataSource, self).__init__(**kwargs) - self.type = 'Microsoft.ServiceBus/Topic' # type: str - self.service_bus_namespace = kwargs.get('service_bus_namespace', None) - self.shared_access_policy_name = kwargs.get('shared_access_policy_name', None) - self.shared_access_policy_key = kwargs.get('shared_access_policy_key', None) - self.authentication_mode = kwargs.get('authentication_mode', None) - self.topic_name = kwargs.get('topic_name', None) - self.property_columns = kwargs.get('property_columns', None) - self.system_property_columns = kwargs.get('system_property_columns', None) - - -class ServiceBusTopicOutputDataSourceProperties(ServiceBusDataSourceProperties): - """The properties that are associated with a Service Bus Topic output. - - :param service_bus_namespace: The namespace that is associated with the desired Event Hub, - Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type service_bus_namespace: str - :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus - Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_name: str - :param shared_access_policy_key: The shared access policy key for the specified shared access - policy. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_key: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", - "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - :param topic_name: The name of the Service Bus Topic. Required on PUT (CreateOrReplace) - requests. - :type topic_name: str - :param property_columns: A string array of the names of output columns to be attached to - Service Bus messages as custom properties. - :type property_columns: list[str] - :param system_property_columns: Dictionary of :code:``. - :type system_property_columns: dict[str, str] - """ - - _attribute_map = { - 'service_bus_namespace': {'key': 'serviceBusNamespace', 'type': 'str'}, - 'shared_access_policy_name': {'key': 'sharedAccessPolicyName', 'type': 'str'}, - 'shared_access_policy_key': {'key': 'sharedAccessPolicyKey', 'type': 'str'}, - 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, - 'topic_name': {'key': 'topicName', 'type': 'str'}, - 'property_columns': {'key': 'propertyColumns', 'type': '[str]'}, - 'system_property_columns': {'key': 'systemPropertyColumns', 'type': '{str}'}, - } - - def __init__( - self, - **kwargs - ): - super(ServiceBusTopicOutputDataSourceProperties, self).__init__(**kwargs) - self.topic_name = kwargs.get('topic_name', None) - self.property_columns = kwargs.get('property_columns', None) - self.system_property_columns = kwargs.get('system_property_columns', None) - - -class StartStreamingJobParameters(msrest.serialization.Model): - """Parameters supplied to the Start Streaming Job operation. - - :param output_start_mode: Value may be JobStartTime, CustomTime, or LastOutputEventTime to - indicate whether the starting point of the output event stream should start whenever the job is - started, start at a custom user time stamp specified via the outputStartTime property, or start - from the last event output time. Possible values include: "JobStartTime", "CustomTime", - "LastOutputEventTime". - :type output_start_mode: str or ~stream_analytics_management_client.models.OutputStartMode - :param output_start_time: Value is either an ISO-8601 formatted time stamp that indicates the - starting point of the output event stream, or null to indicate that the output event stream - will start whenever the streaming job is started. This property must have a value if - outputStartMode is set to CustomTime. - :type output_start_time: ~datetime.datetime - """ - - _attribute_map = { - 'output_start_mode': {'key': 'outputStartMode', 'type': 'str'}, - 'output_start_time': {'key': 'outputStartTime', 'type': 'iso-8601'}, - } - - def __init__( - self, - **kwargs - ): - super(StartStreamingJobParameters, self).__init__(**kwargs) - self.output_start_mode = kwargs.get('output_start_mode', None) - self.output_start_time = kwargs.get('output_start_time', None) - - -class StreamingJob(TrackedResource): - """A streaming job object, containing all information associated with the named streaming job. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource Id for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or - Microsoft.Storage/storageAccounts. - :vartype type: str - :param tags: A set of tags. Resource tags. - :type tags: dict[str, str] - :param location: The geo-location where the resource lives. - :type location: str - :param identity: Describes the system-assigned managed identity assigned to this job that can - be used to authenticate with inputs and outputs. - :type identity: ~stream_analytics_management_client.models.Identity - :param sku: Describes the SKU of the streaming job. Required on PUT (CreateOrReplace) requests. - :type sku: ~stream_analytics_management_client.models.StreamingJobSku - :ivar job_id: A GUID uniquely identifying the streaming job. This GUID is generated upon - creation of the streaming job. - :vartype job_id: str - :ivar provisioning_state: Describes the provisioning status of the streaming job. - :vartype provisioning_state: str - :ivar job_state: Describes the state of the streaming job. - :vartype job_state: str - :param job_type: Describes the type of the job. Valid modes are ``Cloud`` and 'Edge'. Possible - values include: "Cloud", "Edge". - :type job_type: str or ~stream_analytics_management_client.models.JobType - :param output_start_mode: This property should only be utilized when it is desired that the job - be started immediately upon creation. Value may be JobStartTime, CustomTime, or - LastOutputEventTime to indicate whether the starting point of the output event stream should - start whenever the job is started, start at a custom user time stamp specified via the - outputStartTime property, or start from the last event output time. Possible values include: - "JobStartTime", "CustomTime", "LastOutputEventTime". - :type output_start_mode: str or ~stream_analytics_management_client.models.OutputStartMode - :param output_start_time: Value is either an ISO-8601 formatted time stamp that indicates the - starting point of the output event stream, or null to indicate that the output event stream - will start whenever the streaming job is started. This property must have a value if - outputStartMode is set to CustomTime. - :type output_start_time: ~datetime.datetime - :ivar last_output_event_time: Value is either an ISO-8601 formatted timestamp indicating the - last output event time of the streaming job or null indicating that output has not yet been - produced. In case of multiple outputs or multiple streams, this shows the latest value in that - set. - :vartype last_output_event_time: ~datetime.datetime - :param events_out_of_order_policy: Indicates the policy to apply to events that arrive out of - order in the input event stream. Possible values include: "Adjust", "Drop". - :type events_out_of_order_policy: str or - ~stream_analytics_management_client.models.EventsOutOfOrderPolicy - :param output_error_policy: Indicates the policy to apply to events that arrive at the output - and cannot be written to the external storage due to being malformed (missing column values, - column values of wrong type or size). Possible values include: "Stop", "Drop". - :type output_error_policy: str or ~stream_analytics_management_client.models.OutputErrorPolicy - :param events_out_of_order_max_delay_in_seconds: The maximum tolerable delay in seconds where - out-of-order events can be adjusted to be back in order. - :type events_out_of_order_max_delay_in_seconds: int - :param events_late_arrival_max_delay_in_seconds: The maximum tolerable delay in seconds where - events arriving late could be included. Supported range is -1 to 1814399 (20.23:59:59 days) - and -1 is used to specify wait indefinitely. If the property is absent, it is interpreted to - have a value of -1. - :type events_late_arrival_max_delay_in_seconds: int - :param data_locale: The data locale of the stream analytics job. Value should be the name of a - supported .NET Culture from the set https://msdn.microsoft.com/en- - us/library/system.globalization.culturetypes(v=vs.110).aspx. Defaults to 'en-US' if none - specified. - :type data_locale: str - :param compatibility_level: Controls certain runtime behaviors of the streaming job. Possible - values include: "1.0". - :type compatibility_level: str or ~stream_analytics_management_client.models.CompatibilityLevel - :ivar created_date: Value is an ISO-8601 formatted UTC timestamp indicating when the streaming - job was created. - :vartype created_date: ~datetime.datetime - :param inputs: A list of one or more inputs to the streaming job. The name property for each - input is required when specifying this property in a PUT request. This property cannot be - modify via a PATCH operation. You must use the PATCH API available for the individual input. - :type inputs: list[~stream_analytics_management_client.models.Input] - :param transformation: Indicates the query and the number of streaming units to use for the - streaming job. The name property of the transformation is required when specifying this - property in a PUT request. This property cannot be modify via a PATCH operation. You must use - the PATCH API available for the individual transformation. - :type transformation: ~stream_analytics_management_client.models.Transformation - :param outputs: A list of one or more outputs for the streaming job. The name property for each - output is required when specifying this property in a PUT request. This property cannot be - modify via a PATCH operation. You must use the PATCH API available for the individual output. - :type outputs: list[~stream_analytics_management_client.models.Output] - :param functions: A list of one or more functions for the streaming job. The name property for - each function is required when specifying this property in a PUT request. This property cannot - be modify via a PATCH operation. You must use the PATCH API available for the individual - transformation. - :type functions: list[~stream_analytics_management_client.models.Function] - :ivar etag: The current entity tag for the streaming job. This is an opaque string. You can use - it to detect whether the resource has changed between requests. You can also use it in the If- - Match or If-None-Match headers for write operations for optimistic concurrency. - :vartype etag: str - :param job_storage_account: The properties that are associated with an Azure Storage account - with MSI. - :type job_storage_account: ~stream_analytics_management_client.models.JobStorageAccount - :ivar content_storage_policy: Valid values are JobStorageAccount and SystemAccount. If set to - JobStorageAccount, this requires the user to also specify jobStorageAccount property. . - Possible values include: "SystemAccount", "JobStorageAccount". - :vartype content_storage_policy: str or - ~stream_analytics_management_client.models.ContentStoragePolicy - :param externals: The storage account where the custom code artifacts are located. - :type externals: ~stream_analytics_management_client.models.External - :param cluster: The cluster which streaming jobs will run on. - :type cluster: ~stream_analytics_management_client.models.ClusterInfo - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'job_id': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - 'job_state': {'readonly': True}, - 'last_output_event_time': {'readonly': True}, - 'created_date': {'readonly': True}, - 'etag': {'readonly': True}, - 'content_storage_policy': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'Identity'}, - 'sku': {'key': 'properties.sku', 'type': 'StreamingJobSku'}, - 'job_id': {'key': 'properties.jobId', 'type': 'str'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - 'job_state': {'key': 'properties.jobState', 'type': 'str'}, - 'job_type': {'key': 'properties.jobType', 'type': 'str'}, - 'output_start_mode': {'key': 'properties.outputStartMode', 'type': 'str'}, - 'output_start_time': {'key': 'properties.outputStartTime', 'type': 'iso-8601'}, - 'last_output_event_time': {'key': 'properties.lastOutputEventTime', 'type': 'iso-8601'}, - 'events_out_of_order_policy': {'key': 'properties.eventsOutOfOrderPolicy', 'type': 'str'}, - 'output_error_policy': {'key': 'properties.outputErrorPolicy', 'type': 'str'}, - 'events_out_of_order_max_delay_in_seconds': {'key': 'properties.eventsOutOfOrderMaxDelayInSeconds', 'type': 'int'}, - 'events_late_arrival_max_delay_in_seconds': {'key': 'properties.eventsLateArrivalMaxDelayInSeconds', 'type': 'int'}, - 'data_locale': {'key': 'properties.dataLocale', 'type': 'str'}, - 'compatibility_level': {'key': 'properties.compatibilityLevel', 'type': 'str'}, - 'created_date': {'key': 'properties.createdDate', 'type': 'iso-8601'}, - 'inputs': {'key': 'properties.inputs', 'type': '[Input]'}, - 'transformation': {'key': 'properties.transformation', 'type': 'Transformation'}, - 'outputs': {'key': 'properties.outputs', 'type': '[Output]'}, - 'functions': {'key': 'properties.functions', 'type': '[Function]'}, - 'etag': {'key': 'properties.etag', 'type': 'str'}, - 'job_storage_account': {'key': 'properties.jobStorageAccount', 'type': 'JobStorageAccount'}, - 'content_storage_policy': {'key': 'properties.contentStoragePolicy', 'type': 'str'}, - 'externals': {'key': 'properties.externals', 'type': 'External'}, - 'cluster': {'key': 'properties.cluster', 'type': 'ClusterInfo'}, - } - - def __init__( - self, - **kwargs - ): - super(StreamingJob, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.sku = kwargs.get('sku', None) - self.job_id = None - self.provisioning_state = None - self.job_state = None - self.job_type = kwargs.get('job_type', None) - self.output_start_mode = kwargs.get('output_start_mode', None) - self.output_start_time = kwargs.get('output_start_time', None) - self.last_output_event_time = None - self.events_out_of_order_policy = kwargs.get('events_out_of_order_policy', None) - self.output_error_policy = kwargs.get('output_error_policy', None) - self.events_out_of_order_max_delay_in_seconds = kwargs.get('events_out_of_order_max_delay_in_seconds', None) - self.events_late_arrival_max_delay_in_seconds = kwargs.get('events_late_arrival_max_delay_in_seconds', None) - self.data_locale = kwargs.get('data_locale', None) - self.compatibility_level = kwargs.get('compatibility_level', None) - self.created_date = None - self.inputs = kwargs.get('inputs', None) - self.transformation = kwargs.get('transformation', None) - self.outputs = kwargs.get('outputs', None) - self.functions = kwargs.get('functions', None) - self.etag = None - self.job_storage_account = kwargs.get('job_storage_account', None) - self.content_storage_policy = None - self.externals = kwargs.get('externals', None) - self.cluster = kwargs.get('cluster', None) - - -class StreamingJobListResult(msrest.serialization.Model): - """Object containing a list of streaming jobs. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: A list of streaming jobs. Populated by a 'List' operation. - :vartype value: list[~stream_analytics_management_client.models.StreamingJob] - :ivar next_link: The link (url) to the next page of results. - :vartype next_link: str - """ - - _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[StreamingJob]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(StreamingJobListResult, self).__init__(**kwargs) - self.value = None - self.next_link = None - - -class StreamingJobSku(msrest.serialization.Model): - """The properties that are associated with a SKU. - - :param name: The name of the SKU. Required on PUT (CreateOrReplace) requests. Possible values - include: "Standard". - :type name: str or ~stream_analytics_management_client.models.StreamingJobSkuName - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(StreamingJobSku, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - - -class StreamInputProperties(InputProperties): - """The properties that are associated with an input containing stream data. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates whether the input is a source of reference data or stream - data. Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param serialization: Describes how data from an input is serialized or how data is serialized - when written to an output. Required on PUT (CreateOrReplace) requests. - :type serialization: ~stream_analytics_management_client.models.Serialization - :ivar diagnostics: Describes conditions applicable to the Input, Output, or the job overall, - that warrant customer attention. - :vartype diagnostics: ~stream_analytics_management_client.models.Diagnostics - :ivar etag: The current entity tag for the input. This is an opaque string. You can use it to - detect whether the resource has changed between requests. You can also use it in the If-Match - or If-None-Match headers for write operations for optimistic concurrency. - :vartype etag: str - :param compression: Describes how input data is compressed. - :type compression: ~stream_analytics_management_client.models.Compression - :param partition_key: partitionKey Describes a key in the input data which is used for - partitioning the input data. - :type partition_key: str - :param datasource: Describes an input data source that contains stream data. Required on PUT - (CreateOrReplace) requests. - :type datasource: ~stream_analytics_management_client.models.StreamInputDataSource - """ - - _validation = { - 'type': {'required': True}, - 'diagnostics': {'readonly': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'serialization': {'key': 'serialization', 'type': 'Serialization'}, - 'diagnostics': {'key': 'diagnostics', 'type': 'Diagnostics'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'compression': {'key': 'compression', 'type': 'Compression'}, - 'partition_key': {'key': 'partitionKey', 'type': 'str'}, - 'datasource': {'key': 'datasource', 'type': 'StreamInputDataSource'}, - } - - def __init__( - self, - **kwargs - ): - super(StreamInputProperties, self).__init__(**kwargs) - self.type = 'Stream' # type: str - self.datasource = kwargs.get('datasource', None) - - -class SubscriptionQuota(SubResource): - """Describes the current quota for the subscription. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Resource Id. - :vartype id: str - :param name: Resource name. - :type name: str - :ivar type: Resource type. - :vartype type: str - :ivar max_count: The max permitted usage of this resource. - :vartype max_count: int - :ivar current_count: The current usage of this resource. - :vartype current_count: int - """ - - _validation = { - 'id': {'readonly': True}, - 'type': {'readonly': True}, - 'max_count': {'readonly': True}, - 'current_count': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_count': {'key': 'properties.maxCount', 'type': 'int'}, - 'current_count': {'key': 'properties.currentCount', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(SubscriptionQuota, self).__init__(**kwargs) - self.max_count = None - self.current_count = None - - -class SubscriptionQuotasListResult(msrest.serialization.Model): - """Result of the GetQuotas operation. It contains a list of quotas for the subscription in a particular region. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: List of quotas for the subscription in a particular region. - :vartype value: list[~stream_analytics_management_client.models.SubscriptionQuota] - """ - - _validation = { - 'value': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[SubscriptionQuota]'}, - } - - def __init__( - self, - **kwargs - ): - super(SubscriptionQuotasListResult, self).__init__(**kwargs) - self.value = None - - -class Transformation(SubResource): - """A transformation object, containing all information associated with the named transformation. All transformations are contained under a streaming job. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Resource Id. - :vartype id: str - :param name: Resource name. - :type name: str - :ivar type: Resource type. - :vartype type: str - :param streaming_units: Specifies the number of streaming units that the streaming job uses. - :type streaming_units: int - :param query: Specifies the query that will be run in the streaming job. You can learn more - about the Stream Analytics Query Language (SAQL) here: - https://msdn.microsoft.com/library/azure/dn834998 . Required on PUT (CreateOrReplace) requests. - :type query: str - :ivar etag: The current entity tag for the transformation. This is an opaque string. You can - use it to detect whether the resource has changed between requests. You can also use it in the - If-Match or If-None-Match headers for write operations for optimistic concurrency. - :vartype etag: str - """ - - _validation = { - 'id': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'streaming_units': {'key': 'properties.streamingUnits', 'type': 'int'}, - 'query': {'key': 'properties.query', 'type': 'str'}, - 'etag': {'key': 'properties.etag', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(Transformation, self).__init__(**kwargs) - self.streaming_units = kwargs.get('streaming_units', None) - self.query = kwargs.get('query', None) - self.etag = None diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py index aa07f713986e..49628eb32af4 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py @@ -7,7 +7,7 @@ # -------------------------------------------------------------------------- import datetime -from typing import Dict, List, Optional, Union +from typing import Any, Dict, List, Optional, Union from azure.core.exceptions import HttpResponseError import msrest.serialization @@ -15,123 +15,18 @@ from ._stream_analytics_management_client_enums import * -class FunctionProperties(msrest.serialization.Model): - """The properties that are associated with a function. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AggregateFunctionProperties, ScalarFunctionProperties. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of function.Constant filled by server. - :type type: str - :ivar etag: The current entity tag for the function. This is an opaque string. You can use it - to detect whether the resource has changed between requests. You can also use it in the If- - Match or If-None-Match headers for write operations for optimistic concurrency. - :vartype etag: str - :param inputs: - :type inputs: list[~stream_analytics_management_client.models.FunctionInput] - :param output: Describes the output of a function. - :type output: ~stream_analytics_management_client.models.FunctionOutput - :param binding: The physical binding of the function. For example, in the Azure Machine - Learning web service’s case, this describes the endpoint. - :type binding: ~stream_analytics_management_client.models.FunctionBinding - """ - - _validation = { - 'type': {'required': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'inputs': {'key': 'properties.inputs', 'type': '[FunctionInput]'}, - 'output': {'key': 'properties.output', 'type': 'FunctionOutput'}, - 'binding': {'key': 'properties.binding', 'type': 'FunctionBinding'}, - } - - _subtype_map = { - 'type': {'Aggregate': 'AggregateFunctionProperties', 'Scalar': 'ScalarFunctionProperties'} - } - - def __init__( - self, - *, - inputs: Optional[List["FunctionInput"]] = None, - output: Optional["FunctionOutput"] = None, - binding: Optional["FunctionBinding"] = None, - **kwargs - ): - super(FunctionProperties, self).__init__(**kwargs) - self.type = None # type: Optional[str] - self.etag = None - self.inputs = inputs - self.output = output - self.binding = binding - - -class AggregateFunctionProperties(FunctionProperties): - """The properties that are associated with an aggregate function. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of function.Constant filled by server. - :type type: str - :ivar etag: The current entity tag for the function. This is an opaque string. You can use it - to detect whether the resource has changed between requests. You can also use it in the If- - Match or If-None-Match headers for write operations for optimistic concurrency. - :vartype etag: str - :param inputs: - :type inputs: list[~stream_analytics_management_client.models.FunctionInput] - :param output: Describes the output of a function. - :type output: ~stream_analytics_management_client.models.FunctionOutput - :param binding: The physical binding of the function. For example, in the Azure Machine - Learning web service’s case, this describes the endpoint. - :type binding: ~stream_analytics_management_client.models.FunctionBinding - """ - - _validation = { - 'type': {'required': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'inputs': {'key': 'properties.inputs', 'type': '[FunctionInput]'}, - 'output': {'key': 'properties.output', 'type': 'FunctionOutput'}, - 'binding': {'key': 'properties.binding', 'type': 'FunctionBinding'}, - } - - def __init__( - self, - *, - inputs: Optional[List["FunctionInput"]] = None, - output: Optional["FunctionOutput"] = None, - binding: Optional["FunctionBinding"] = None, - **kwargs - ): - super(AggregateFunctionProperties, self).__init__(inputs=inputs, output=output, binding=binding, **kwargs) - self.type = 'Aggregate' # type: str - - class Serialization(msrest.serialization.Model): """Describes how data from an input is serialized or how data is serialized when written to an output. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSerialization, CsvSerialization, CustomClrSerialization, JsonSerialization, ParquetSerialization. + sub-classes are: AvroSerialization, CsvSerialization, JsonSerialization, ParquetSerialization. All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the type of serialization that the input or output uses. - Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: - "Csv", "Avro", "Json", "CustomClr", "Parquet". - :type type: str or ~stream_analytics_management_client.models.EventSerializationType + :ivar type: Required. Indicates the type of serialization that the input or output uses. + Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: + "Csv", "Avro", "Json", "Parquet". + :vartype type: str or ~stream_analytics_management_client.models.EventSerializationType """ _validation = { @@ -143,13 +38,15 @@ class Serialization(msrest.serialization.Model): } _subtype_map = { - 'type': {'Avro': 'AvroSerialization', 'Csv': 'CsvSerialization', 'CustomClr': 'CustomClrSerialization', 'Json': 'JsonSerialization', 'Parquet': 'ParquetSerialization'} + 'type': {'Avro': 'AvroSerialization', 'Csv': 'CsvSerialization', 'Json': 'JsonSerialization', 'Parquet': 'ParquetSerialization'} } def __init__( self, **kwargs ): + """ + """ super(Serialization, self).__init__(**kwargs) self.type = None # type: Optional[str] @@ -159,13 +56,13 @@ class AvroSerialization(Serialization): All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the type of serialization that the input or output uses. - Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: - "Csv", "Avro", "Json", "CustomClr", "Parquet". - :type type: str or ~stream_analytics_management_client.models.EventSerializationType - :param properties: The properties that are associated with the Avro serialization type. - Required on PUT (CreateOrReplace) requests. - :type properties: object + :ivar type: Required. Indicates the type of serialization that the input or output uses. + Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: + "Csv", "Avro", "Json", "Parquet". + :vartype type: str or ~stream_analytics_management_client.models.EventSerializationType + :ivar properties: The properties that are associated with the Avro serialization type. Required + on PUT (CreateOrReplace) requests. + :vartype properties: any """ _validation = { @@ -180,9 +77,14 @@ class AvroSerialization(Serialization): def __init__( self, *, - properties: Optional[object] = None, + properties: Optional[Any] = None, **kwargs ): + """ + :keyword properties: The properties that are associated with the Avro serialization type. + Required on PUT (CreateOrReplace) requests. + :paramtype properties: any + """ super(AvroSerialization, self).__init__(**kwargs) self.type = 'Avro' # type: str self.properties = properties @@ -192,13 +94,13 @@ class OutputDataSource(msrest.serialization.Model): """Describes the data source that output will be written to. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureFunctionOutputDataSource, AzureDataLakeStoreOutputDataSource, EventHubV2OutputDataSource, EventHubOutputDataSource, ServiceBusQueueOutputDataSource, ServiceBusTopicOutputDataSource, AzureSynapseOutputDataSource, AzureSqlDatabaseOutputDataSource, BlobOutputDataSource, DocumentDbOutputDataSource, AzureTableOutputDataSource, PowerBIOutputDataSource. + sub-classes are: AzureDataLakeStoreOutputDataSource, EventHubV2OutputDataSource, EventHubOutputDataSource, ServiceBusQueueOutputDataSource, ServiceBusTopicOutputDataSource, AzureSynapseOutputDataSource, AzureSqlDatabaseOutputDataSource, BlobOutputDataSource, DocumentDbOutputDataSource, AzureTableOutputDataSource, PowerBIOutputDataSource. All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the type of data source output will be written to. Required on + :ivar type: Required. Indicates the type of data source output will be written to. Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str + :vartype type: str """ _validation = { @@ -210,13 +112,15 @@ class OutputDataSource(msrest.serialization.Model): } _subtype_map = { - 'type': {'Microsoft.AzureFunction': 'AzureFunctionOutputDataSource', 'Microsoft.DataLake/Accounts': 'AzureDataLakeStoreOutputDataSource', 'Microsoft.EventHub/EventHub': 'EventHubV2OutputDataSource', 'Microsoft.ServiceBus/EventHub': 'EventHubOutputDataSource', 'Microsoft.ServiceBus/Queue': 'ServiceBusQueueOutputDataSource', 'Microsoft.ServiceBus/Topic': 'ServiceBusTopicOutputDataSource', 'Microsoft.Sql/Server/DataWarehouse': 'AzureSynapseOutputDataSource', 'Microsoft.Sql/Server/Database': 'AzureSqlDatabaseOutputDataSource', 'Microsoft.Storage/Blob': 'BlobOutputDataSource', 'Microsoft.Storage/DocumentDB': 'DocumentDbOutputDataSource', 'Microsoft.Storage/Table': 'AzureTableOutputDataSource', 'PowerBI': 'PowerBIOutputDataSource'} + 'type': {'Microsoft.DataLake/Accounts': 'AzureDataLakeStoreOutputDataSource', 'Microsoft.EventHub/EventHub': 'EventHubV2OutputDataSource', 'Microsoft.ServiceBus/EventHub': 'EventHubOutputDataSource', 'Microsoft.ServiceBus/Queue': 'ServiceBusQueueOutputDataSource', 'Microsoft.ServiceBus/Topic': 'ServiceBusTopicOutputDataSource', 'Microsoft.Sql/Server/DataWarehouse': 'AzureSynapseOutputDataSource', 'Microsoft.Sql/Server/Database': 'AzureSqlDatabaseOutputDataSource', 'Microsoft.Storage/Blob': 'BlobOutputDataSource', 'Microsoft.Storage/DocumentDB': 'DocumentDbOutputDataSource', 'Microsoft.Storage/Table': 'AzureTableOutputDataSource', 'PowerBI': 'PowerBIOutputDataSource'} } def __init__( self, **kwargs ): + """ + """ super(OutputDataSource, self).__init__(**kwargs) self.type = None # type: Optional[str] @@ -226,42 +130,43 @@ class AzureDataLakeStoreOutputDataSource(OutputDataSource): All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the type of data source output will be written to. Required on + :ivar type: Required. Indicates the type of data source output will be written to. Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param refresh_token: A refresh token that can be used to obtain a valid access token that can + :vartype type: str + :ivar refresh_token: A refresh token that can be used to obtain a valid access token that can then be used to authenticate with the data source. A valid refresh token is currently only obtainable via the Azure Portal. It is recommended to put a dummy string value here when creating the data source and then going to the Azure Portal to authenticate the data source which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) requests. - :type refresh_token: str - :param token_user_principal_name: The user principal name (UPN) of the user that was used to + :vartype refresh_token: str + :ivar token_user_principal_name: The user principal name (UPN) of the user that was used to obtain the refresh token. Use this property to help remember which user was used to obtain the refresh token. - :type token_user_principal_name: str - :param token_user_display_name: The user display name of the user that was used to obtain the + :vartype token_user_principal_name: str + :ivar token_user_display_name: The user display name of the user that was used to obtain the refresh token. Use this property to help remember which user was used to obtain the refresh token. - :type token_user_display_name: str - :param account_name: The name of the Azure Data Lake Store account. Required on PUT + :vartype token_user_display_name: str + :ivar account_name: The name of the Azure Data Lake Store account. Required on PUT (CreateOrReplace) requests. - :type account_name: str - :param tenant_id: The tenant id of the user used to obtain the refresh token. Required on PUT + :vartype account_name: str + :ivar tenant_id: The tenant id of the user used to obtain the refresh token. Required on PUT (CreateOrReplace) requests. - :type tenant_id: str - :param file_path_prefix: The location of the file to which the output should be written to. + :vartype tenant_id: str + :ivar file_path_prefix: The location of the file to which the output should be written to. Required on PUT (CreateOrReplace) requests. - :type file_path_prefix: str - :param date_format: The date format. Wherever {date} appears in filePathPrefix, the value of + :vartype file_path_prefix: str + :ivar date_format: The date format. Wherever {date} appears in filePathPrefix, the value of this property is used as the date format instead. - :type date_format: str - :param time_format: The time format. Wherever {time} appears in filePathPrefix, the value of + :vartype date_format: str + :ivar time_format: The time format. Wherever {time} appears in filePathPrefix, the value of this property is used as the time format instead. - :type time_format: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + :vartype time_format: str + :ivar authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :vartype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode """ _validation = { @@ -295,6 +200,42 @@ def __init__( authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, **kwargs ): + """ + :keyword refresh_token: A refresh token that can be used to obtain a valid access token that + can then be used to authenticate with the data source. A valid refresh token is currently only + obtainable via the Azure Portal. It is recommended to put a dummy string value here when + creating the data source and then going to the Azure Portal to authenticate the data source + which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) + requests. + :paramtype refresh_token: str + :keyword token_user_principal_name: The user principal name (UPN) of the user that was used to + obtain the refresh token. Use this property to help remember which user was used to obtain the + refresh token. + :paramtype token_user_principal_name: str + :keyword token_user_display_name: The user display name of the user that was used to obtain the + refresh token. Use this property to help remember which user was used to obtain the refresh + token. + :paramtype token_user_display_name: str + :keyword account_name: The name of the Azure Data Lake Store account. Required on PUT + (CreateOrReplace) requests. + :paramtype account_name: str + :keyword tenant_id: The tenant id of the user used to obtain the refresh token. Required on PUT + (CreateOrReplace) requests. + :paramtype tenant_id: str + :keyword file_path_prefix: The location of the file to which the output should be written to. + Required on PUT (CreateOrReplace) requests. + :paramtype file_path_prefix: str + :keyword date_format: The date format. Wherever {date} appears in filePathPrefix, the value of + this property is used as the date format instead. + :paramtype date_format: str + :keyword time_format: The time format. Wherever {time} appears in filePathPrefix, the value of + this property is used as the time format instead. + :paramtype time_format: str + :keyword authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :paramtype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + """ super(AzureDataLakeStoreOutputDataSource, self).__init__(**kwargs) self.type = 'Microsoft.DataLake/Accounts' # type: str self.refresh_token = refresh_token @@ -311,21 +252,21 @@ def __init__( class OAuthBasedDataSourceProperties(msrest.serialization.Model): """The properties that are associated with data sources that use OAuth as their authentication model. - :param refresh_token: A refresh token that can be used to obtain a valid access token that can + :ivar refresh_token: A refresh token that can be used to obtain a valid access token that can then be used to authenticate with the data source. A valid refresh token is currently only obtainable via the Azure Portal. It is recommended to put a dummy string value here when creating the data source and then going to the Azure Portal to authenticate the data source which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) requests. - :type refresh_token: str - :param token_user_principal_name: The user principal name (UPN) of the user that was used to + :vartype refresh_token: str + :ivar token_user_principal_name: The user principal name (UPN) of the user that was used to obtain the refresh token. Use this property to help remember which user was used to obtain the refresh token. - :type token_user_principal_name: str - :param token_user_display_name: The user display name of the user that was used to obtain the + :vartype token_user_principal_name: str + :ivar token_user_display_name: The user display name of the user that was used to obtain the refresh token. Use this property to help remember which user was used to obtain the refresh token. - :type token_user_display_name: str + :vartype token_user_display_name: str """ _attribute_map = { @@ -342,6 +283,23 @@ def __init__( token_user_display_name: Optional[str] = None, **kwargs ): + """ + :keyword refresh_token: A refresh token that can be used to obtain a valid access token that + can then be used to authenticate with the data source. A valid refresh token is currently only + obtainable via the Azure Portal. It is recommended to put a dummy string value here when + creating the data source and then going to the Azure Portal to authenticate the data source + which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) + requests. + :paramtype refresh_token: str + :keyword token_user_principal_name: The user principal name (UPN) of the user that was used to + obtain the refresh token. Use this property to help remember which user was used to obtain the + refresh token. + :paramtype token_user_principal_name: str + :keyword token_user_display_name: The user display name of the user that was used to obtain the + refresh token. Use this property to help remember which user was used to obtain the refresh + token. + :paramtype token_user_display_name: str + """ super(OAuthBasedDataSourceProperties, self).__init__(**kwargs) self.refresh_token = refresh_token self.token_user_principal_name = token_user_principal_name @@ -351,39 +309,40 @@ def __init__( class AzureDataLakeStoreOutputDataSourceProperties(OAuthBasedDataSourceProperties): """The properties that are associated with an Azure Data Lake Store. - :param refresh_token: A refresh token that can be used to obtain a valid access token that can + :ivar refresh_token: A refresh token that can be used to obtain a valid access token that can then be used to authenticate with the data source. A valid refresh token is currently only obtainable via the Azure Portal. It is recommended to put a dummy string value here when creating the data source and then going to the Azure Portal to authenticate the data source which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) requests. - :type refresh_token: str - :param token_user_principal_name: The user principal name (UPN) of the user that was used to + :vartype refresh_token: str + :ivar token_user_principal_name: The user principal name (UPN) of the user that was used to obtain the refresh token. Use this property to help remember which user was used to obtain the refresh token. - :type token_user_principal_name: str - :param token_user_display_name: The user display name of the user that was used to obtain the + :vartype token_user_principal_name: str + :ivar token_user_display_name: The user display name of the user that was used to obtain the refresh token. Use this property to help remember which user was used to obtain the refresh token. - :type token_user_display_name: str - :param account_name: The name of the Azure Data Lake Store account. Required on PUT + :vartype token_user_display_name: str + :ivar account_name: The name of the Azure Data Lake Store account. Required on PUT (CreateOrReplace) requests. - :type account_name: str - :param tenant_id: The tenant id of the user used to obtain the refresh token. Required on PUT + :vartype account_name: str + :ivar tenant_id: The tenant id of the user used to obtain the refresh token. Required on PUT (CreateOrReplace) requests. - :type tenant_id: str - :param file_path_prefix: The location of the file to which the output should be written to. + :vartype tenant_id: str + :ivar file_path_prefix: The location of the file to which the output should be written to. Required on PUT (CreateOrReplace) requests. - :type file_path_prefix: str - :param date_format: The date format. Wherever {date} appears in filePathPrefix, the value of + :vartype file_path_prefix: str + :ivar date_format: The date format. Wherever {date} appears in filePathPrefix, the value of this property is used as the date format instead. - :type date_format: str - :param time_format: The time format. Wherever {time} appears in filePathPrefix, the value of + :vartype date_format: str + :ivar time_format: The time format. Wherever {time} appears in filePathPrefix, the value of this property is used as the time format instead. - :type time_format: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + :vartype time_format: str + :ivar authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :vartype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode """ _attribute_map = { @@ -412,6 +371,42 @@ def __init__( authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, **kwargs ): + """ + :keyword refresh_token: A refresh token that can be used to obtain a valid access token that + can then be used to authenticate with the data source. A valid refresh token is currently only + obtainable via the Azure Portal. It is recommended to put a dummy string value here when + creating the data source and then going to the Azure Portal to authenticate the data source + which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) + requests. + :paramtype refresh_token: str + :keyword token_user_principal_name: The user principal name (UPN) of the user that was used to + obtain the refresh token. Use this property to help remember which user was used to obtain the + refresh token. + :paramtype token_user_principal_name: str + :keyword token_user_display_name: The user display name of the user that was used to obtain the + refresh token. Use this property to help remember which user was used to obtain the refresh + token. + :paramtype token_user_display_name: str + :keyword account_name: The name of the Azure Data Lake Store account. Required on PUT + (CreateOrReplace) requests. + :paramtype account_name: str + :keyword tenant_id: The tenant id of the user used to obtain the refresh token. Required on PUT + (CreateOrReplace) requests. + :paramtype tenant_id: str + :keyword file_path_prefix: The location of the file to which the output should be written to. + Required on PUT (CreateOrReplace) requests. + :paramtype file_path_prefix: str + :keyword date_format: The date format. Wherever {date} appears in filePathPrefix, the value of + this property is used as the date format instead. + :paramtype date_format: str + :keyword time_format: The time format. Wherever {time} appears in filePathPrefix, the value of + this property is used as the time format instead. + :paramtype time_format: str + :keyword authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :paramtype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + """ super(AzureDataLakeStoreOutputDataSourceProperties, self).__init__(refresh_token=refresh_token, token_user_principal_name=token_user_principal_name, token_user_display_name=token_user_display_name, **kwargs) self.account_name = account_name self.tenant_id = tenant_id @@ -421,72 +416,16 @@ def __init__( self.authentication_mode = authentication_mode -class AzureFunctionOutputDataSource(OutputDataSource): - """Defines the metadata of AzureFunctionOutputDataSource. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of data source output will be written to. Required on - PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param function_app_name: The name of your Azure Functions app. - :type function_app_name: str - :param function_name: The name of the function in your Azure Functions app. - :type function_name: str - :param api_key: If you want to use an Azure Function from another subscription, you can do so - by providing the key to access your function. - :type api_key: str - :param max_batch_size: A property that lets you set the maximum size for each output batch - that's sent to your Azure function. The input unit is in bytes. By default, this value is - 262,144 bytes (256 KB). - :type max_batch_size: float - :param max_batch_count: A property that lets you specify the maximum number of events in each - batch that's sent to Azure Functions. The default value is 100. - :type max_batch_count: float - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'function_app_name': {'key': 'properties.functionAppName', 'type': 'str'}, - 'function_name': {'key': 'properties.functionName', 'type': 'str'}, - 'api_key': {'key': 'properties.apiKey', 'type': 'str'}, - 'max_batch_size': {'key': 'properties.maxBatchSize', 'type': 'float'}, - 'max_batch_count': {'key': 'properties.maxBatchCount', 'type': 'float'}, - } - - def __init__( - self, - *, - function_app_name: Optional[str] = None, - function_name: Optional[str] = None, - api_key: Optional[str] = None, - max_batch_size: Optional[float] = None, - max_batch_count: Optional[float] = None, - **kwargs - ): - super(AzureFunctionOutputDataSource, self).__init__(**kwargs) - self.type = 'Microsoft.AzureFunction' # type: str - self.function_app_name = function_app_name - self.function_name = function_name - self.api_key = api_key - self.max_batch_size = max_batch_size - self.max_batch_count = max_batch_count - - class FunctionBinding(msrest.serialization.Model): """The physical binding of the function. For example, in the Azure Machine Learning web service’s case, this describes the endpoint. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureMachineLearningStudioFunctionBinding, AzureMachineLearningServiceFunctionBinding, CSharpFunctionBinding, JavaScriptFunctionBinding. + sub-classes are: AzureMachineLearningWebServiceFunctionBinding, JavaScriptFunctionBinding. All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the function binding type.Constant filled by server. - :type type: str + :ivar type: Required. Indicates the function binding type.Constant filled by server. + :vartype type: str """ _validation = { @@ -498,42 +437,42 @@ class FunctionBinding(msrest.serialization.Model): } _subtype_map = { - 'type': {'Microsoft.MachineLearning/WebService': 'AzureMachineLearningStudioFunctionBinding', 'Microsoft.MachineLearningServices': 'AzureMachineLearningServiceFunctionBinding', 'Microsoft.StreamAnalytics/CLRUdf': 'CSharpFunctionBinding', 'Microsoft.StreamAnalytics/JavascriptUdf': 'JavaScriptFunctionBinding'} + 'type': {'Microsoft.MachineLearning/WebService': 'AzureMachineLearningWebServiceFunctionBinding', 'Microsoft.StreamAnalytics/JavascriptUdf': 'JavaScriptFunctionBinding'} } def __init__( self, **kwargs ): + """ + """ super(FunctionBinding, self).__init__(**kwargs) self.type = None # type: Optional[str] -class AzureMachineLearningServiceFunctionBinding(FunctionBinding): +class AzureMachineLearningWebServiceFunctionBinding(FunctionBinding): """The binding to an Azure Machine Learning web service. All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the function binding type.Constant filled by server. - :type type: str - :param endpoint: The Request-Response execute endpoint of the Azure Machine Learning web - service. - :type endpoint: str - :param api_key: The API key used to authenticate with Request-Response endpoint. - :type api_key: str - :param inputs: The inputs for the Azure Machine Learning web service endpoint. - :type inputs: - list[~stream_analytics_management_client.models.AzureMachineLearningServiceInputColumn] - :param outputs: A list of outputs from the Azure Machine Learning web service endpoint + :ivar type: Required. Indicates the function binding type.Constant filled by server. + :vartype type: str + :ivar endpoint: The Request-Response execute endpoint of the Azure Machine Learning web + service. Find out more here: + https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs. + :vartype endpoint: str + :ivar api_key: The API key used to authenticate with Request-Response endpoint. + :vartype api_key: str + :ivar inputs: The inputs for the Azure Machine Learning web service endpoint. + :vartype inputs: + ~stream_analytics_management_client.models.AzureMachineLearningWebServiceInputs + :ivar outputs: A list of outputs from the Azure Machine Learning web service endpoint execution. - :type outputs: - list[~stream_analytics_management_client.models.AzureMachineLearningServiceOutputColumn] - :param batch_size: Number between 1 and 10000 describing maximum number of rows for every Azure + :vartype outputs: + list[~stream_analytics_management_client.models.AzureMachineLearningWebServiceOutputColumn] + :ivar batch_size: Number between 1 and 10000 describing maximum number of rows for every Azure ML RRS execute request. Default is 1000. - :type batch_size: int - :param number_of_parallel_requests: The number of parallel requests that will be sent per - partition of your job to the machine learning service. Default is 1. - :type number_of_parallel_requests: int + :vartype batch_size: int """ _validation = { @@ -544,10 +483,9 @@ class AzureMachineLearningServiceFunctionBinding(FunctionBinding): 'type': {'key': 'type', 'type': 'str'}, 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, 'api_key': {'key': 'properties.apiKey', 'type': 'str'}, - 'inputs': {'key': 'properties.inputs', 'type': '[AzureMachineLearningServiceInputColumn]'}, - 'outputs': {'key': 'properties.outputs', 'type': '[AzureMachineLearningServiceOutputColumn]'}, + 'inputs': {'key': 'properties.inputs', 'type': 'AzureMachineLearningWebServiceInputs'}, + 'outputs': {'key': 'properties.outputs', 'type': '[AzureMachineLearningWebServiceOutputColumn]'}, 'batch_size': {'key': 'properties.batchSize', 'type': 'int'}, - 'number_of_parallel_requests': {'key': 'properties.numberOfParallelRequests', 'type': 'int'}, } def __init__( @@ -555,32 +493,48 @@ def __init__( *, endpoint: Optional[str] = None, api_key: Optional[str] = None, - inputs: Optional[List["AzureMachineLearningServiceInputColumn"]] = None, - outputs: Optional[List["AzureMachineLearningServiceOutputColumn"]] = None, + inputs: Optional["AzureMachineLearningWebServiceInputs"] = None, + outputs: Optional[List["AzureMachineLearningWebServiceOutputColumn"]] = None, batch_size: Optional[int] = None, - number_of_parallel_requests: Optional[int] = None, **kwargs ): - super(AzureMachineLearningServiceFunctionBinding, self).__init__(**kwargs) - self.type = 'Microsoft.MachineLearningServices' # type: str + """ + :keyword endpoint: The Request-Response execute endpoint of the Azure Machine Learning web + service. Find out more here: + https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs. + :paramtype endpoint: str + :keyword api_key: The API key used to authenticate with Request-Response endpoint. + :paramtype api_key: str + :keyword inputs: The inputs for the Azure Machine Learning web service endpoint. + :paramtype inputs: + ~stream_analytics_management_client.models.AzureMachineLearningWebServiceInputs + :keyword outputs: A list of outputs from the Azure Machine Learning web service endpoint + execution. + :paramtype outputs: + list[~stream_analytics_management_client.models.AzureMachineLearningWebServiceOutputColumn] + :keyword batch_size: Number between 1 and 10000 describing maximum number of rows for every + Azure ML RRS execute request. Default is 1000. + :paramtype batch_size: int + """ + super(AzureMachineLearningWebServiceFunctionBinding, self).__init__(**kwargs) + self.type = 'Microsoft.MachineLearning/WebService' # type: str self.endpoint = endpoint self.api_key = api_key self.inputs = inputs self.outputs = outputs self.batch_size = batch_size - self.number_of_parallel_requests = number_of_parallel_requests class FunctionRetrieveDefaultDefinitionParameters(msrest.serialization.Model): """Parameters used to specify the type of function to retrieve the default definition for. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters, AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters, CSharpFunctionRetrieveDefaultDefinitionParameters, JavaScriptFunctionRetrieveDefaultDefinitionParameters. + sub-classes are: AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters, JavaScriptFunctionRetrieveDefaultDefinitionParameters. All required parameters must be populated in order to send to Azure. - :param binding_type: Required. Indicates the function binding type.Constant filled by server. - :type binding_type: str + :ivar binding_type: Required. Indicates the function binding type.Constant filled by server. + :vartype binding_type: str """ _validation = { @@ -592,36 +546,37 @@ class FunctionRetrieveDefaultDefinitionParameters(msrest.serialization.Model): } _subtype_map = { - 'binding_type': {'Microsoft.MachineLearning/WebService': 'AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters', 'Microsoft.MachineLearningServices': 'AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters', 'Microsoft.StreamAnalytics/CLRUdf': 'CSharpFunctionRetrieveDefaultDefinitionParameters', 'Microsoft.StreamAnalytics/JavascriptUdf': 'JavaScriptFunctionRetrieveDefaultDefinitionParameters'} + 'binding_type': {'Microsoft.MachineLearning/WebService': 'AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters', 'Microsoft.StreamAnalytics/JavascriptUdf': 'JavaScriptFunctionRetrieveDefaultDefinitionParameters'} } def __init__( self, **kwargs ): + """ + """ super(FunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) self.binding_type = None # type: Optional[str] -class AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): +class AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): """The parameters needed to retrieve the default function definition for an Azure Machine Learning web service function. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :param binding_type: Required. Indicates the function binding type.Constant filled by server. - :type binding_type: str - :param execute_endpoint: The Request-Response execute endpoint of the Azure Machine Learning - web service. - :type execute_endpoint: str - :ivar udf_type: The function type. Default value: "Scalar". + :ivar binding_type: Required. Indicates the function binding type.Constant filled by server. + :vartype binding_type: str + :ivar execute_endpoint: The Request-Response execute endpoint of the Azure Machine Learning web + service. Find out more here: + https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs. + :vartype execute_endpoint: str + :ivar udf_type: The function type. The only acceptable values to pass in are None and "Scalar". + The default value is None. :vartype udf_type: str """ _validation = { 'binding_type': {'required': True}, - 'udf_type': {'constant': True}, } _attribute_map = { @@ -630,28 +585,39 @@ class AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters(Fun 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, } - udf_type = "Scalar" - def __init__( self, *, execute_endpoint: Optional[str] = None, + udf_type: Optional[str] = None, **kwargs ): - super(AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) - self.binding_type = 'Microsoft.MachineLearningServices' # type: str + """ + :keyword execute_endpoint: The Request-Response execute endpoint of the Azure Machine Learning + web service. Find out more here: + https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs. + :paramtype execute_endpoint: str + :keyword udf_type: The function type. The only acceptable values to pass in are None and + "Scalar". The default value is None. + :paramtype udf_type: str + """ + super(AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) + self.binding_type = 'Microsoft.MachineLearning/WebService' # type: str self.execute_endpoint = execute_endpoint + self.udf_type = udf_type -class AzureMachineLearningServiceInputColumn(msrest.serialization.Model): +class AzureMachineLearningWebServiceInputColumn(msrest.serialization.Model): """Describes an input column for the Azure Machine Learning web service endpoint. - :param name: The name of the input column. - :type name: str - :param data_type: The (Azure Machine Learning supported) data type of the input column. - :type data_type: str - :param map_to: The zero based index of the function parameter this input maps to. - :type map_to: int + :ivar name: The name of the input column. + :vartype name: str + :ivar data_type: The (Azure Machine Learning supported) data type of the input column. A list + of valid Azure Machine Learning data types are described at + https://msdn.microsoft.com/en-us/library/azure/dn905923.aspx . + :vartype data_type: str + :ivar map_to: The zero based index of the function parameter this input maps to. + :vartype map_to: int """ _attribute_map = { @@ -668,236 +634,67 @@ def __init__( map_to: Optional[int] = None, **kwargs ): - super(AzureMachineLearningServiceInputColumn, self).__init__(**kwargs) + """ + :keyword name: The name of the input column. + :paramtype name: str + :keyword data_type: The (Azure Machine Learning supported) data type of the input column. A + list of valid Azure Machine Learning data types are described at + https://msdn.microsoft.com/en-us/library/azure/dn905923.aspx . + :paramtype data_type: str + :keyword map_to: The zero based index of the function parameter this input maps to. + :paramtype map_to: int + """ + super(AzureMachineLearningWebServiceInputColumn, self).__init__(**kwargs) self.name = name self.data_type = data_type self.map_to = map_to -class AzureMachineLearningServiceInputs(msrest.serialization.Model): +class AzureMachineLearningWebServiceInputs(msrest.serialization.Model): """The inputs for the Azure Machine Learning web service endpoint. - :param name: The name of the input. This is the name provided while authoring the endpoint. - :type name: str - :param column_names: A list of input columns for the Azure Machine Learning web service + :ivar name: The name of the input. This is the name provided while authoring the endpoint. + :vartype name: str + :ivar column_names: A list of input columns for the Azure Machine Learning web service endpoint. - :type column_names: - list[~stream_analytics_management_client.models.AzureMachineLearningServiceInputColumn] + :vartype column_names: + list[~stream_analytics_management_client.models.AzureMachineLearningWebServiceInputColumn] """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, - 'column_names': {'key': 'columnNames', 'type': '[AzureMachineLearningServiceInputColumn]'}, + 'column_names': {'key': 'columnNames', 'type': '[AzureMachineLearningWebServiceInputColumn]'}, } def __init__( self, *, name: Optional[str] = None, - column_names: Optional[List["AzureMachineLearningServiceInputColumn"]] = None, + column_names: Optional[List["AzureMachineLearningWebServiceInputColumn"]] = None, **kwargs ): - super(AzureMachineLearningServiceInputs, self).__init__(**kwargs) + """ + :keyword name: The name of the input. This is the name provided while authoring the endpoint. + :paramtype name: str + :keyword column_names: A list of input columns for the Azure Machine Learning web service + endpoint. + :paramtype column_names: + list[~stream_analytics_management_client.models.AzureMachineLearningWebServiceInputColumn] + """ + super(AzureMachineLearningWebServiceInputs, self).__init__(**kwargs) self.name = name self.column_names = column_names -class AzureMachineLearningServiceOutputColumn(msrest.serialization.Model): +class AzureMachineLearningWebServiceOutputColumn(msrest.serialization.Model): """Describes an output column for the Azure Machine Learning web service endpoint. - :param name: The name of the output column. - :type name: str - :param data_type: The (Azure Machine Learning supported) data type of the output column. - :type data_type: str - :param map_to: The zero based index of the function parameter this input maps to. - :type map_to: int - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'map_to': {'key': 'mapTo', 'type': 'int'}, - } - - def __init__( - self, - *, - name: Optional[str] = None, - data_type: Optional[str] = None, - map_to: Optional[int] = None, - **kwargs - ): - super(AzureMachineLearningServiceOutputColumn, self).__init__(**kwargs) - self.name = name - self.data_type = data_type - self.map_to = map_to - - -class AzureMachineLearningStudioFunctionBinding(FunctionBinding): - """The binding to an Azure Machine Learning Studio. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the function binding type.Constant filled by server. - :type type: str - :param endpoint: The Request-Response execute endpoint of the Azure Machine Learning Studio. - Find out more here: https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning- - consume-web-services#request-response-service-rrs. - :type endpoint: str - :param api_key: The API key used to authenticate with Request-Response endpoint. - :type api_key: str - :param inputs: The inputs for the Azure Machine Learning Studio endpoint. - :type inputs: ~stream_analytics_management_client.models.AzureMachineLearningStudioInputs - :param outputs: A list of outputs from the Azure Machine Learning Studio endpoint execution. - :type outputs: - list[~stream_analytics_management_client.models.AzureMachineLearningStudioOutputColumn] - :param batch_size: Number between 1 and 10000 describing maximum number of rows for every Azure - ML RRS execute request. Default is 1000. - :type batch_size: int - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, - 'api_key': {'key': 'properties.apiKey', 'type': 'str'}, - 'inputs': {'key': 'properties.inputs', 'type': 'AzureMachineLearningStudioInputs'}, - 'outputs': {'key': 'properties.outputs', 'type': '[AzureMachineLearningStudioOutputColumn]'}, - 'batch_size': {'key': 'properties.batchSize', 'type': 'int'}, - } - - def __init__( - self, - *, - endpoint: Optional[str] = None, - api_key: Optional[str] = None, - inputs: Optional["AzureMachineLearningStudioInputs"] = None, - outputs: Optional[List["AzureMachineLearningStudioOutputColumn"]] = None, - batch_size: Optional[int] = None, - **kwargs - ): - super(AzureMachineLearningStudioFunctionBinding, self).__init__(**kwargs) - self.type = 'Microsoft.MachineLearning/WebService' # type: str - self.endpoint = endpoint - self.api_key = api_key - self.inputs = inputs - self.outputs = outputs - self.batch_size = batch_size - - -class AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): - """The parameters needed to retrieve the default function definition for an Azure Machine Learning Studio function. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param binding_type: Required. Indicates the function binding type.Constant filled by server. - :type binding_type: str - :param execute_endpoint: The Request-Response execute endpoint of the Azure Machine Learning - Studio. Find out more here: https://docs.microsoft.com/en-us/azure/machine-learning/machine- - learning-consume-web-services#request-response-service-rrs. - :type execute_endpoint: str - :ivar udf_type: The function type. Default value: "Scalar". - :vartype udf_type: str - """ - - _validation = { - 'binding_type': {'required': True}, - 'udf_type': {'constant': True}, - } - - _attribute_map = { - 'binding_type': {'key': 'bindingType', 'type': 'str'}, - 'execute_endpoint': {'key': 'bindingRetrievalProperties.executeEndpoint', 'type': 'str'}, - 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, - } - - udf_type = "Scalar" - - def __init__( - self, - *, - execute_endpoint: Optional[str] = None, - **kwargs - ): - super(AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) - self.binding_type = 'Microsoft.MachineLearning/WebService' # type: str - self.execute_endpoint = execute_endpoint - - -class AzureMachineLearningStudioInputColumn(msrest.serialization.Model): - """Describes an input column for the Azure Machine Learning Studio endpoint. - - :param name: The name of the input column. - :type name: str - :param data_type: The (Azure Machine Learning supported) data type of the input column. A list - of valid Azure Machine Learning data types are described at https://msdn.microsoft.com/en- - us/library/azure/dn905923.aspx . - :type data_type: str - :param map_to: The zero based index of the function parameter this input maps to. - :type map_to: int - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'map_to': {'key': 'mapTo', 'type': 'int'}, - } - - def __init__( - self, - *, - name: Optional[str] = None, - data_type: Optional[str] = None, - map_to: Optional[int] = None, - **kwargs - ): - super(AzureMachineLearningStudioInputColumn, self).__init__(**kwargs) - self.name = name - self.data_type = data_type - self.map_to = map_to - - -class AzureMachineLearningStudioInputs(msrest.serialization.Model): - """The inputs for the Azure Machine Learning Studio endpoint. - - :param name: The name of the input. This is the name provided while authoring the endpoint. - :type name: str - :param column_names: A list of input columns for the Azure Machine Learning Studio endpoint. - :type column_names: - list[~stream_analytics_management_client.models.AzureMachineLearningStudioInputColumn] - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'column_names': {'key': 'columnNames', 'type': '[AzureMachineLearningStudioInputColumn]'}, - } - - def __init__( - self, - *, - name: Optional[str] = None, - column_names: Optional[List["AzureMachineLearningStudioInputColumn"]] = None, - **kwargs - ): - super(AzureMachineLearningStudioInputs, self).__init__(**kwargs) - self.name = name - self.column_names = column_names - - -class AzureMachineLearningStudioOutputColumn(msrest.serialization.Model): - """Describes an output column for the Azure Machine Learning Studio endpoint. - - :param name: The name of the output column. - :type name: str - :param data_type: The (Azure Machine Learning supported) data type of the output column. A list - of valid Azure Machine Learning data types are described at https://msdn.microsoft.com/en- - us/library/azure/dn905923.aspx . - :type data_type: str + :ivar name: The name of the output column. + :vartype name: str + :ivar data_type: The (Azure Machine Learning supported) data type of the output column. A list + of valid Azure Machine Learning data types are described at + https://msdn.microsoft.com/en-us/library/azure/dn905923.aspx . + :vartype data_type: str """ _attribute_map = { @@ -912,7 +709,15 @@ def __init__( data_type: Optional[str] = None, **kwargs ): - super(AzureMachineLearningStudioOutputColumn, self).__init__(**kwargs) + """ + :keyword name: The name of the output column. + :paramtype name: str + :keyword data_type: The (Azure Machine Learning supported) data type of the output column. A + list of valid Azure Machine Learning data types are described at + https://msdn.microsoft.com/en-us/library/azure/dn905923.aspx . + :paramtype data_type: str + """ + super(AzureMachineLearningWebServiceOutputColumn, self).__init__(**kwargs) self.name = name self.data_type = data_type @@ -920,30 +725,30 @@ def __init__( class AzureSqlDatabaseDataSourceProperties(msrest.serialization.Model): """The properties that are associated with an Azure SQL database data source. - :param server: The name of the SQL server containing the Azure SQL database. Required on PUT + :ivar server: The name of the SQL server containing the Azure SQL database. Required on PUT (CreateOrReplace) requests. - :type server: str - :param database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) - requests. - :type database: str - :param user: The user name that will be used to connect to the Azure SQL database. Required on + :vartype server: str + :ivar database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) requests. + :vartype database: str + :ivar user: The user name that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. - :type user: str - :param password: The password that will be used to connect to the Azure SQL database. Required + :vartype user: str + :ivar password: The password that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. - :type password: str - :param table: The name of the table in the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :type table: str - :param max_batch_count: Max Batch count for write to Sql database, the default value is 10,000. + :vartype password: str + :ivar table: The name of the table in the Azure SQL database. Required on PUT (CreateOrReplace) + requests. + :vartype table: str + :ivar max_batch_count: Max Batch count for write to Sql database, the default value is 10,000. Optional on PUT requests. - :type max_batch_count: float - :param max_writer_count: Max Write r count, currently only 1(single writer) and 0(based on - query partition) are available. Optional on PUT requests. - :type max_writer_count: float - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + :vartype max_batch_count: float + :ivar max_writer_count: Max Writer count, currently only 1(single writer) and 0(based on query + partition) are available. Optional on PUT requests. + :vartype max_writer_count: float + :ivar authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :vartype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode """ _attribute_map = { @@ -970,6 +775,33 @@ def __init__( authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, **kwargs ): + """ + :keyword server: The name of the SQL server containing the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :paramtype server: str + :keyword database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) + requests. + :paramtype database: str + :keyword user: The user name that will be used to connect to the Azure SQL database. Required + on PUT (CreateOrReplace) requests. + :paramtype user: str + :keyword password: The password that will be used to connect to the Azure SQL database. + Required on PUT (CreateOrReplace) requests. + :paramtype password: str + :keyword table: The name of the table in the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :paramtype table: str + :keyword max_batch_count: Max Batch count for write to Sql database, the default value is + 10,000. Optional on PUT requests. + :paramtype max_batch_count: float + :keyword max_writer_count: Max Writer count, currently only 1(single writer) and 0(based on + query partition) are available. Optional on PUT requests. + :paramtype max_writer_count: float + :keyword authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :paramtype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + """ super(AzureSqlDatabaseDataSourceProperties, self).__init__(**kwargs) self.server = server self.database = database @@ -986,33 +818,33 @@ class AzureSqlDatabaseOutputDataSource(OutputDataSource): All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the type of data source output will be written to. Required on + :ivar type: Required. Indicates the type of data source output will be written to. Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param server: The name of the SQL server containing the Azure SQL database. Required on PUT + :vartype type: str + :ivar server: The name of the SQL server containing the Azure SQL database. Required on PUT (CreateOrReplace) requests. - :type server: str - :param database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) - requests. - :type database: str - :param user: The user name that will be used to connect to the Azure SQL database. Required on + :vartype server: str + :ivar database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) requests. + :vartype database: str + :ivar user: The user name that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. - :type user: str - :param password: The password that will be used to connect to the Azure SQL database. Required + :vartype user: str + :ivar password: The password that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. - :type password: str - :param table: The name of the table in the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :type table: str - :param max_batch_count: Max Batch count for write to Sql database, the default value is 10,000. + :vartype password: str + :ivar table: The name of the table in the Azure SQL database. Required on PUT (CreateOrReplace) + requests. + :vartype table: str + :ivar max_batch_count: Max Batch count for write to Sql database, the default value is 10,000. Optional on PUT requests. - :type max_batch_count: float - :param max_writer_count: Max Write r count, currently only 1(single writer) and 0(based on - query partition) are available. Optional on PUT requests. - :type max_writer_count: float - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + :vartype max_batch_count: float + :ivar max_writer_count: Max Writer count, currently only 1(single writer) and 0(based on query + partition) are available. Optional on PUT requests. + :vartype max_writer_count: float + :ivar authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :vartype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode """ _validation = { @@ -1044,6 +876,33 @@ def __init__( authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, **kwargs ): + """ + :keyword server: The name of the SQL server containing the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :paramtype server: str + :keyword database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) + requests. + :paramtype database: str + :keyword user: The user name that will be used to connect to the Azure SQL database. Required + on PUT (CreateOrReplace) requests. + :paramtype user: str + :keyword password: The password that will be used to connect to the Azure SQL database. + Required on PUT (CreateOrReplace) requests. + :paramtype password: str + :keyword table: The name of the table in the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :paramtype table: str + :keyword max_batch_count: Max Batch count for write to Sql database, the default value is + 10,000. Optional on PUT requests. + :paramtype max_batch_count: float + :keyword max_writer_count: Max Writer count, currently only 1(single writer) and 0(based on + query partition) are available. Optional on PUT requests. + :paramtype max_writer_count: float + :keyword authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :paramtype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + """ super(AzureSqlDatabaseOutputDataSource, self).__init__(**kwargs) self.type = 'Microsoft.Sql/Server/Database' # type: str self.server = server @@ -1059,30 +918,30 @@ def __init__( class AzureSqlDatabaseOutputDataSourceProperties(AzureSqlDatabaseDataSourceProperties): """The properties that are associated with an Azure SQL database output. - :param server: The name of the SQL server containing the Azure SQL database. Required on PUT + :ivar server: The name of the SQL server containing the Azure SQL database. Required on PUT (CreateOrReplace) requests. - :type server: str - :param database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) - requests. - :type database: str - :param user: The user name that will be used to connect to the Azure SQL database. Required on + :vartype server: str + :ivar database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) requests. + :vartype database: str + :ivar user: The user name that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. - :type user: str - :param password: The password that will be used to connect to the Azure SQL database. Required + :vartype user: str + :ivar password: The password that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. - :type password: str - :param table: The name of the table in the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :type table: str - :param max_batch_count: Max Batch count for write to Sql database, the default value is 10,000. + :vartype password: str + :ivar table: The name of the table in the Azure SQL database. Required on PUT (CreateOrReplace) + requests. + :vartype table: str + :ivar max_batch_count: Max Batch count for write to Sql database, the default value is 10,000. Optional on PUT requests. - :type max_batch_count: float - :param max_writer_count: Max Write r count, currently only 1(single writer) and 0(based on - query partition) are available. Optional on PUT requests. - :type max_writer_count: float - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + :vartype max_batch_count: float + :ivar max_writer_count: Max Writer count, currently only 1(single writer) and 0(based on query + partition) are available. Optional on PUT requests. + :vartype max_writer_count: float + :ivar authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :vartype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode """ _attribute_map = { @@ -1109,6 +968,33 @@ def __init__( authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, **kwargs ): + """ + :keyword server: The name of the SQL server containing the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :paramtype server: str + :keyword database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) + requests. + :paramtype database: str + :keyword user: The user name that will be used to connect to the Azure SQL database. Required + on PUT (CreateOrReplace) requests. + :paramtype user: str + :keyword password: The password that will be used to connect to the Azure SQL database. + Required on PUT (CreateOrReplace) requests. + :paramtype password: str + :keyword table: The name of the table in the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :paramtype table: str + :keyword max_batch_count: Max Batch count for write to Sql database, the default value is + 10,000. Optional on PUT requests. + :paramtype max_batch_count: float + :keyword max_writer_count: Max Writer count, currently only 1(single writer) and 0(based on + query partition) are available. Optional on PUT requests. + :paramtype max_writer_count: float + :keyword authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :paramtype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + """ super(AzureSqlDatabaseOutputDataSourceProperties, self).__init__(server=server, database=database, user=user, password=password, table=table, max_batch_count=max_batch_count, max_writer_count=max_writer_count, authentication_mode=authentication_mode, **kwargs) @@ -1120,9 +1006,9 @@ class ReferenceInputDataSource(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the type of input data source containing reference data. + :ivar type: Required. Indicates the type of input data source containing reference data. Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str + :vartype type: str """ _validation = { @@ -1141,6 +1027,8 @@ def __init__( self, **kwargs ): + """ + """ super(ReferenceInputDataSource, self).__init__(**kwargs) self.type = None # type: Optional[str] @@ -1150,78 +1038,54 @@ class AzureSqlReferenceInputDataSource(ReferenceInputDataSource): All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the type of input data source containing reference data. + :ivar type: Required. Indicates the type of input data source containing reference data. Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param properties: - :type properties: - ~stream_analytics_management_client.models.AzureSqlReferenceInputDataSourceProperties - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'AzureSqlReferenceInputDataSourceProperties'}, - } - - def __init__( - self, - *, - properties: Optional["AzureSqlReferenceInputDataSourceProperties"] = None, - **kwargs - ): - super(AzureSqlReferenceInputDataSource, self).__init__(**kwargs) - self.type = 'Microsoft.Sql/Server/Database' # type: str - self.properties = properties - - -class AzureSqlReferenceInputDataSourceProperties(msrest.serialization.Model): - """AzureSqlReferenceInputDataSourceProperties. - - :param server: This element is associated with the datasource element. This is the name of the + :vartype type: str + :ivar server: This element is associated with the datasource element. This is the name of the server that contains the database that will be written to. - :type server: str - :param database: This element is associated with the datasource element. This is the name of - the database that output will be written to. - :type database: str - :param user: This element is associated with the datasource element. This is the user name that + :vartype server: str + :ivar database: This element is associated with the datasource element. This is the name of the + database that output will be written to. + :vartype database: str + :ivar user: This element is associated with the datasource element. This is the user name that will be used to connect to the SQL Database instance. - :type user: str - :param password: This element is associated with the datasource element. This is the password + :vartype user: str + :ivar password: This element is associated with the datasource element. This is the password that will be used to connect to the SQL Database instance. - :type password: str - :param table: This element is associated with the datasource element. The name of the table in + :vartype password: str + :ivar table: This element is associated with the datasource element. The name of the table in the Azure SQL database.. - :type table: str - :param refresh_type: This element is associated with the datasource element. This element is of - enum type. It indicates what kind of data refresh option do we want to - use:Static/RefreshPeriodicallyWithFull/RefreshPeriodicallyWithDelta. - :type refresh_type: str - :param refresh_rate: This element is associated with the datasource element. This indicates how + :vartype table: str + :ivar refresh_type: Indicates the type of data refresh option. Possible values include: + "Static", "RefreshPeriodicallyWithFull", "RefreshPeriodicallyWithDelta". + :vartype refresh_type: str or ~stream_analytics_management_client.models.RefreshType + :ivar refresh_rate: This element is associated with the datasource element. This indicates how frequently the data will be fetched from the database. It is of DateTime format. - :type refresh_rate: str - :param full_snapshot_query: This element is associated with the datasource element. This query + :vartype refresh_rate: str + :ivar full_snapshot_query: This element is associated with the datasource element. This query is used to fetch data from the sql database. - :type full_snapshot_query: str - :param delta_snapshot_query: This element is associated with the datasource element. This query + :vartype full_snapshot_query: str + :ivar delta_snapshot_query: This element is associated with the datasource element. This query is used to fetch incremental changes from the SQL database. To use this option, we recommend using temporal tables in Azure SQL Database. - :type delta_snapshot_query: str + :vartype delta_snapshot_query: str """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { - 'server': {'key': 'server', 'type': 'str'}, - 'database': {'key': 'database', 'type': 'str'}, - 'user': {'key': 'user', 'type': 'str'}, - 'password': {'key': 'password', 'type': 'str'}, - 'table': {'key': 'table', 'type': 'str'}, - 'refresh_type': {'key': 'refreshType', 'type': 'str'}, - 'refresh_rate': {'key': 'refreshRate', 'type': 'str'}, - 'full_snapshot_query': {'key': 'fullSnapshotQuery', 'type': 'str'}, - 'delta_snapshot_query': {'key': 'deltaSnapshotQuery', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'properties.server', 'type': 'str'}, + 'database': {'key': 'properties.database', 'type': 'str'}, + 'user': {'key': 'properties.user', 'type': 'str'}, + 'password': {'key': 'properties.password', 'type': 'str'}, + 'table': {'key': 'properties.table', 'type': 'str'}, + 'refresh_type': {'key': 'properties.refreshType', 'type': 'str'}, + 'refresh_rate': {'key': 'properties.refreshRate', 'type': 'str'}, + 'full_snapshot_query': {'key': 'properties.fullSnapshotQuery', 'type': 'str'}, + 'delta_snapshot_query': {'key': 'properties.deltaSnapshotQuery', 'type': 'str'}, } def __init__( @@ -1232,13 +1096,44 @@ def __init__( user: Optional[str] = None, password: Optional[str] = None, table: Optional[str] = None, - refresh_type: Optional[str] = None, + refresh_type: Optional[Union[str, "RefreshType"]] = None, refresh_rate: Optional[str] = None, full_snapshot_query: Optional[str] = None, delta_snapshot_query: Optional[str] = None, **kwargs ): - super(AzureSqlReferenceInputDataSourceProperties, self).__init__(**kwargs) + """ + :keyword server: This element is associated with the datasource element. This is the name of + the server that contains the database that will be written to. + :paramtype server: str + :keyword database: This element is associated with the datasource element. This is the name of + the database that output will be written to. + :paramtype database: str + :keyword user: This element is associated with the datasource element. This is the user name + that will be used to connect to the SQL Database instance. + :paramtype user: str + :keyword password: This element is associated with the datasource element. This is the password + that will be used to connect to the SQL Database instance. + :paramtype password: str + :keyword table: This element is associated with the datasource element. The name of the table + in the Azure SQL database.. + :paramtype table: str + :keyword refresh_type: Indicates the type of data refresh option. Possible values include: + "Static", "RefreshPeriodicallyWithFull", "RefreshPeriodicallyWithDelta". + :paramtype refresh_type: str or ~stream_analytics_management_client.models.RefreshType + :keyword refresh_rate: This element is associated with the datasource element. This indicates + how frequently the data will be fetched from the database. It is of DateTime format. + :paramtype refresh_rate: str + :keyword full_snapshot_query: This element is associated with the datasource element. This + query is used to fetch data from the sql database. + :paramtype full_snapshot_query: str + :keyword delta_snapshot_query: This element is associated with the datasource element. This + query is used to fetch incremental changes from the SQL database. To use this option, we + recommend using temporal tables in Azure SQL Database. + :paramtype delta_snapshot_query: str + """ + super(AzureSqlReferenceInputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.Sql/Server/Database' # type: str self.server = server self.database = database self.user = user @@ -1253,21 +1148,20 @@ def __init__( class AzureSynapseDataSourceProperties(msrest.serialization.Model): """The properties that are associated with an Azure SQL database data source. - :param server: The name of the SQL server containing the Azure SQL database. Required on PUT + :ivar server: The name of the SQL server containing the Azure SQL database. Required on PUT (CreateOrReplace) requests. - :type server: str - :param database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) + :vartype server: str + :ivar database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) requests. + :vartype database: str + :ivar table: The name of the table in the Azure SQL database. Required on PUT (CreateOrReplace) requests. - :type database: str - :param table: The name of the table in the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :type table: str - :param user: The user name that will be used to connect to the Azure SQL database. Required on + :vartype table: str + :ivar user: The user name that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. - :type user: str - :param password: The password that will be used to connect to the Azure SQL database. Required + :vartype user: str + :ivar password: The password that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. - :type password: str + :vartype password: str """ _attribute_map = { @@ -1288,6 +1182,23 @@ def __init__( password: Optional[str] = None, **kwargs ): + """ + :keyword server: The name of the SQL server containing the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :paramtype server: str + :keyword database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) + requests. + :paramtype database: str + :keyword table: The name of the table in the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :paramtype table: str + :keyword user: The user name that will be used to connect to the Azure SQL database. Required + on PUT (CreateOrReplace) requests. + :paramtype user: str + :keyword password: The password that will be used to connect to the Azure SQL database. + Required on PUT (CreateOrReplace) requests. + :paramtype password: str + """ super(AzureSynapseDataSourceProperties, self).__init__(**kwargs) self.server = server self.database = database @@ -1301,24 +1212,23 @@ class AzureSynapseOutputDataSource(OutputDataSource): All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the type of data source output will be written to. Required on + :ivar type: Required. Indicates the type of data source output will be written to. Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param server: The name of the SQL server containing the Azure SQL database. Required on PUT + :vartype type: str + :ivar server: The name of the SQL server containing the Azure SQL database. Required on PUT (CreateOrReplace) requests. - :type server: str - :param database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) + :vartype server: str + :ivar database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) requests. + :vartype database: str + :ivar table: The name of the table in the Azure SQL database. Required on PUT (CreateOrReplace) requests. - :type database: str - :param table: The name of the table in the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :type table: str - :param user: The user name that will be used to connect to the Azure SQL database. Required on + :vartype table: str + :ivar user: The user name that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. - :type user: str - :param password: The password that will be used to connect to the Azure SQL database. Required + :vartype user: str + :ivar password: The password that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. - :type password: str + :vartype password: str """ _validation = { @@ -1344,6 +1254,23 @@ def __init__( password: Optional[str] = None, **kwargs ): + """ + :keyword server: The name of the SQL server containing the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :paramtype server: str + :keyword database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) + requests. + :paramtype database: str + :keyword table: The name of the table in the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :paramtype table: str + :keyword user: The user name that will be used to connect to the Azure SQL database. Required + on PUT (CreateOrReplace) requests. + :paramtype user: str + :keyword password: The password that will be used to connect to the Azure SQL database. + Required on PUT (CreateOrReplace) requests. + :paramtype password: str + """ super(AzureSynapseOutputDataSource, self).__init__(**kwargs) self.type = 'Microsoft.Sql/Server/DataWarehouse' # type: str self.server = server @@ -1356,21 +1283,20 @@ def __init__( class AzureSynapseOutputDataSourceProperties(AzureSynapseDataSourceProperties): """The properties that are associated with an Azure Synapse output. - :param server: The name of the SQL server containing the Azure SQL database. Required on PUT + :ivar server: The name of the SQL server containing the Azure SQL database. Required on PUT (CreateOrReplace) requests. - :type server: str - :param database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) + :vartype server: str + :ivar database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) requests. + :vartype database: str + :ivar table: The name of the table in the Azure SQL database. Required on PUT (CreateOrReplace) requests. - :type database: str - :param table: The name of the table in the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :type table: str - :param user: The user name that will be used to connect to the Azure SQL database. Required on + :vartype table: str + :ivar user: The user name that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. - :type user: str - :param password: The password that will be used to connect to the Azure SQL database. Required + :vartype user: str + :ivar password: The password that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. - :type password: str + :vartype password: str """ _attribute_map = { @@ -1391,6 +1317,23 @@ def __init__( password: Optional[str] = None, **kwargs ): + """ + :keyword server: The name of the SQL server containing the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :paramtype server: str + :keyword database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) + requests. + :paramtype database: str + :keyword table: The name of the table in the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :paramtype table: str + :keyword user: The user name that will be used to connect to the Azure SQL database. Required + on PUT (CreateOrReplace) requests. + :paramtype user: str + :keyword password: The password that will be used to connect to the Azure SQL database. + Required on PUT (CreateOrReplace) requests. + :paramtype password: str + """ super(AzureSynapseOutputDataSourceProperties, self).__init__(server=server, database=database, table=table, user=user, password=password, **kwargs) @@ -1399,30 +1342,30 @@ class AzureTableOutputDataSource(OutputDataSource): All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the type of data source output will be written to. Required on + :ivar type: Required. Indicates the type of data source output will be written to. Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param account_name: The name of the Azure Storage account. Required on PUT (CreateOrReplace) + :vartype type: str + :ivar account_name: The name of the Azure Storage account. Required on PUT (CreateOrReplace) requests. - :type account_name: str - :param account_key: The account key for the Azure Storage account. Required on PUT + :vartype account_name: str + :ivar account_key: The account key for the Azure Storage account. Required on PUT (CreateOrReplace) requests. - :type account_key: str - :param table: The name of the Azure Table. Required on PUT (CreateOrReplace) requests. - :type table: str - :param partition_key: This element indicates the name of a column from the SELECT statement in + :vartype account_key: str + :ivar table: The name of the Azure Table. Required on PUT (CreateOrReplace) requests. + :vartype table: str + :ivar partition_key: This element indicates the name of a column from the SELECT statement in the query that will be used as the partition key for the Azure Table. Required on PUT (CreateOrReplace) requests. - :type partition_key: str - :param row_key: This element indicates the name of a column from the SELECT statement in the + :vartype partition_key: str + :ivar row_key: This element indicates the name of a column from the SELECT statement in the query that will be used as the row key for the Azure Table. Required on PUT (CreateOrReplace) requests. - :type row_key: str - :param columns_to_remove: If specified, each item in the array is the name of a column to - remove (if present) from output event entities. - :type columns_to_remove: list[str] - :param batch_size: The number of rows to write to the Azure Table at a time. - :type batch_size: int + :vartype row_key: str + :ivar columns_to_remove: If specified, each item in the array is the name of a column to remove + (if present) from output event entities. + :vartype columns_to_remove: list[str] + :ivar batch_size: The number of rows to write to the Azure Table at a time. + :vartype batch_size: int """ _validation = { @@ -1452,6 +1395,29 @@ def __init__( batch_size: Optional[int] = None, **kwargs ): + """ + :keyword account_name: The name of the Azure Storage account. Required on PUT (CreateOrReplace) + requests. + :paramtype account_name: str + :keyword account_key: The account key for the Azure Storage account. Required on PUT + (CreateOrReplace) requests. + :paramtype account_key: str + :keyword table: The name of the Azure Table. Required on PUT (CreateOrReplace) requests. + :paramtype table: str + :keyword partition_key: This element indicates the name of a column from the SELECT statement + in the query that will be used as the partition key for the Azure Table. Required on PUT + (CreateOrReplace) requests. + :paramtype partition_key: str + :keyword row_key: This element indicates the name of a column from the SELECT statement in the + query that will be used as the row key for the Azure Table. Required on PUT (CreateOrReplace) + requests. + :paramtype row_key: str + :keyword columns_to_remove: If specified, each item in the array is the name of a column to + remove (if present) from output event entities. + :paramtype columns_to_remove: list[str] + :keyword batch_size: The number of rows to write to the Azure Table at a time. + :paramtype batch_size: int + """ super(AzureTableOutputDataSource, self).__init__(**kwargs) self.type = 'Microsoft.Storage/Table' # type: str self.account_name = account_name @@ -1466,26 +1432,26 @@ def __init__( class BlobDataSourceProperties(msrest.serialization.Model): """The properties that are associated with a blob data source. - :param storage_accounts: A list of one or more Azure Storage accounts. Required on PUT + :ivar storage_accounts: A list of one or more Azure Storage accounts. Required on PUT (CreateOrReplace) requests. - :type storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] - :param container: The name of a container within the associated Storage account. This container + :vartype storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] + :ivar container: The name of a container within the associated Storage account. This container contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) requests. - :type container: str - :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern + :vartype container: str + :ivar path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. - :type path_pattern: str - :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. + :vartype path_pattern: str + :ivar date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. - :type date_format: str - :param time_format: The time format. Wherever {time} appears in pathPattern, the value of this + :vartype date_format: str + :ivar time_format: The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. - :type time_format: str + :vartype time_format: str """ _attribute_map = { @@ -1506,6 +1472,28 @@ def __init__( time_format: Optional[str] = None, **kwargs ): + """ + :keyword storage_accounts: A list of one or more Azure Storage accounts. Required on PUT + (CreateOrReplace) requests. + :paramtype storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] + :keyword container: The name of a container within the associated Storage account. This + container contains either the blob(s) to be read from or written to. Required on PUT + (CreateOrReplace) requests. + :paramtype container: str + :keyword path_pattern: The blob path pattern. Not a regular expression. It represents a pattern + against which blob names will be matched to determine whether or not they should be included as + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. + :paramtype path_pattern: str + :keyword date_format: The date format. Wherever {date} appears in pathPattern, the value of + this property is used as the date format instead. + :paramtype date_format: str + :keyword time_format: The time format. Wherever {time} appears in pathPattern, the value of + this property is used as the time format instead. + :paramtype time_format: str + """ super(BlobDataSourceProperties, self).__init__(**kwargs) self.storage_accounts = storage_accounts self.container = container @@ -1519,32 +1507,33 @@ class BlobOutputDataSource(OutputDataSource): All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the type of data source output will be written to. Required on + :ivar type: Required. Indicates the type of data source output will be written to. Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param storage_accounts: A list of one or more Azure Storage accounts. Required on PUT + :vartype type: str + :ivar storage_accounts: A list of one or more Azure Storage accounts. Required on PUT (CreateOrReplace) requests. - :type storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] - :param container: The name of a container within the associated Storage account. This container + :vartype storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] + :ivar container: The name of a container within the associated Storage account. This container contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) requests. - :type container: str - :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern + :vartype container: str + :ivar path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. - :type path_pattern: str - :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. + :vartype path_pattern: str + :ivar date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. - :type date_format: str - :param time_format: The time format. Wherever {time} appears in pathPattern, the value of this + :vartype date_format: str + :ivar time_format: The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. - :type time_format: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + :vartype time_format: str + :ivar authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :vartype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode """ _validation = { @@ -1572,6 +1561,32 @@ def __init__( authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, **kwargs ): + """ + :keyword storage_accounts: A list of one or more Azure Storage accounts. Required on PUT + (CreateOrReplace) requests. + :paramtype storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] + :keyword container: The name of a container within the associated Storage account. This + container contains either the blob(s) to be read from or written to. Required on PUT + (CreateOrReplace) requests. + :paramtype container: str + :keyword path_pattern: The blob path pattern. Not a regular expression. It represents a pattern + against which blob names will be matched to determine whether or not they should be included as + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. + :paramtype path_pattern: str + :keyword date_format: The date format. Wherever {date} appears in pathPattern, the value of + this property is used as the date format instead. + :paramtype date_format: str + :keyword time_format: The time format. Wherever {time} appears in pathPattern, the value of + this property is used as the time format instead. + :paramtype time_format: str + :keyword authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :paramtype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + """ super(BlobOutputDataSource, self).__init__(**kwargs) self.type = 'Microsoft.Storage/Blob' # type: str self.storage_accounts = storage_accounts @@ -1585,29 +1600,30 @@ def __init__( class BlobOutputDataSourceProperties(BlobDataSourceProperties): """The properties that are associated with a blob output. - :param storage_accounts: A list of one or more Azure Storage accounts. Required on PUT + :ivar storage_accounts: A list of one or more Azure Storage accounts. Required on PUT (CreateOrReplace) requests. - :type storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] - :param container: The name of a container within the associated Storage account. This container + :vartype storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] + :ivar container: The name of a container within the associated Storage account. This container contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) requests. - :type container: str - :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern + :vartype container: str + :ivar path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. - :type path_pattern: str - :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. + :vartype path_pattern: str + :ivar date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. - :type date_format: str - :param time_format: The time format. Wherever {time} appears in pathPattern, the value of this + :vartype date_format: str + :ivar time_format: The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. - :type time_format: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + :vartype time_format: str + :ivar authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :vartype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode """ _attribute_map = { @@ -1630,6 +1646,32 @@ def __init__( authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, **kwargs ): + """ + :keyword storage_accounts: A list of one or more Azure Storage accounts. Required on PUT + (CreateOrReplace) requests. + :paramtype storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] + :keyword container: The name of a container within the associated Storage account. This + container contains either the blob(s) to be read from or written to. Required on PUT + (CreateOrReplace) requests. + :paramtype container: str + :keyword path_pattern: The blob path pattern. Not a regular expression. It represents a pattern + against which blob names will be matched to determine whether or not they should be included as + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. + :paramtype path_pattern: str + :keyword date_format: The date format. Wherever {date} appears in pathPattern, the value of + this property is used as the date format instead. + :paramtype date_format: str + :keyword time_format: The time format. Wherever {time} appears in pathPattern, the value of + this property is used as the time format instead. + :paramtype time_format: str + :keyword authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :paramtype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + """ super(BlobOutputDataSourceProperties, self).__init__(storage_accounts=storage_accounts, container=container, path_pattern=path_pattern, date_format=date_format, time_format=time_format, **kwargs) self.authentication_mode = authentication_mode @@ -1639,29 +1681,29 @@ class BlobReferenceInputDataSource(ReferenceInputDataSource): All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the type of input data source containing reference data. + :ivar type: Required. Indicates the type of input data source containing reference data. Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param storage_accounts: A list of one or more Azure Storage accounts. Required on PUT + :vartype type: str + :ivar storage_accounts: A list of one or more Azure Storage accounts. Required on PUT (CreateOrReplace) requests. - :type storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] - :param container: The name of a container within the associated Storage account. This container + :vartype storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] + :ivar container: The name of a container within the associated Storage account. This container contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) requests. - :type container: str - :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern + :vartype container: str + :ivar path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. - :type path_pattern: str - :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. + :vartype path_pattern: str + :ivar date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. - :type date_format: str - :param time_format: The time format. Wherever {time} appears in pathPattern, the value of this + :vartype date_format: str + :ivar time_format: The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. - :type time_format: str + :vartype time_format: str """ _validation = { @@ -1687,6 +1729,28 @@ def __init__( time_format: Optional[str] = None, **kwargs ): + """ + :keyword storage_accounts: A list of one or more Azure Storage accounts. Required on PUT + (CreateOrReplace) requests. + :paramtype storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] + :keyword container: The name of a container within the associated Storage account. This + container contains either the blob(s) to be read from or written to. Required on PUT + (CreateOrReplace) requests. + :paramtype container: str + :keyword path_pattern: The blob path pattern. Not a regular expression. It represents a pattern + against which blob names will be matched to determine whether or not they should be included as + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. + :paramtype path_pattern: str + :keyword date_format: The date format. Wherever {date} appears in pathPattern, the value of + this property is used as the date format instead. + :paramtype date_format: str + :keyword time_format: The time format. Wherever {time} appears in pathPattern, the value of + this property is used as the time format instead. + :paramtype time_format: str + """ super(BlobReferenceInputDataSource, self).__init__(**kwargs) self.type = 'Microsoft.Storage/Blob' # type: str self.storage_accounts = storage_accounts @@ -1699,26 +1763,26 @@ def __init__( class BlobReferenceInputDataSourceProperties(BlobDataSourceProperties): """The properties that are associated with a blob input containing reference data. - :param storage_accounts: A list of one or more Azure Storage accounts. Required on PUT + :ivar storage_accounts: A list of one or more Azure Storage accounts. Required on PUT (CreateOrReplace) requests. - :type storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] - :param container: The name of a container within the associated Storage account. This container + :vartype storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] + :ivar container: The name of a container within the associated Storage account. This container contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) requests. - :type container: str - :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern + :vartype container: str + :ivar path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. - :type path_pattern: str - :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. + :vartype path_pattern: str + :ivar date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. - :type date_format: str - :param time_format: The time format. Wherever {time} appears in pathPattern, the value of this + :vartype date_format: str + :ivar time_format: The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. - :type time_format: str + :vartype time_format: str """ _attribute_map = { @@ -1739,6 +1803,28 @@ def __init__( time_format: Optional[str] = None, **kwargs ): + """ + :keyword storage_accounts: A list of one or more Azure Storage accounts. Required on PUT + (CreateOrReplace) requests. + :paramtype storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] + :keyword container: The name of a container within the associated Storage account. This + container contains either the blob(s) to be read from or written to. Required on PUT + (CreateOrReplace) requests. + :paramtype container: str + :keyword path_pattern: The blob path pattern. Not a regular expression. It represents a pattern + against which blob names will be matched to determine whether or not they should be included as + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. + :paramtype path_pattern: str + :keyword date_format: The date format. Wherever {date} appears in pathPattern, the value of + this property is used as the date format instead. + :paramtype date_format: str + :keyword time_format: The time format. Wherever {time} appears in pathPattern, the value of + this property is used as the time format instead. + :paramtype time_format: str + """ super(BlobReferenceInputDataSourceProperties, self).__init__(storage_accounts=storage_accounts, container=container, path_pattern=path_pattern, date_format=date_format, time_format=time_format, **kwargs) @@ -1750,9 +1836,9 @@ class StreamInputDataSource(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the type of input data source containing stream data. Required + :ivar type: Required. Indicates the type of input data source containing stream data. Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str + :vartype type: str """ _validation = { @@ -1771,6 +1857,8 @@ def __init__( self, **kwargs ): + """ + """ super(StreamInputDataSource, self).__init__(**kwargs) self.type = None # type: Optional[str] @@ -1780,32 +1868,31 @@ class BlobStreamInputDataSource(StreamInputDataSource): All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the type of input data source containing stream data. Required + :ivar type: Required. Indicates the type of input data source containing stream data. Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param storage_accounts: A list of one or more Azure Storage accounts. Required on PUT + :vartype type: str + :ivar storage_accounts: A list of one or more Azure Storage accounts. Required on PUT (CreateOrReplace) requests. - :type storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] - :param container: The name of a container within the associated Storage account. This container + :vartype storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] + :ivar container: The name of a container within the associated Storage account. This container contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) requests. - :type container: str - :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern + :vartype container: str + :ivar path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. - :type path_pattern: str - :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. + :vartype path_pattern: str + :ivar date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. - :type date_format: str - :param time_format: The time format. Wherever {time} appears in pathPattern, the value of this + :vartype date_format: str + :ivar time_format: The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. - :type time_format: str - :param source_partition_count: The partition count of the blob input data source. Range 1 - - 256. - :type source_partition_count: int + :vartype time_format: str + :ivar source_partition_count: The partition count of the blob input data source. Range 1 - 256. + :vartype source_partition_count: int """ _validation = { @@ -1833,6 +1920,31 @@ def __init__( source_partition_count: Optional[int] = None, **kwargs ): + """ + :keyword storage_accounts: A list of one or more Azure Storage accounts. Required on PUT + (CreateOrReplace) requests. + :paramtype storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] + :keyword container: The name of a container within the associated Storage account. This + container contains either the blob(s) to be read from or written to. Required on PUT + (CreateOrReplace) requests. + :paramtype container: str + :keyword path_pattern: The blob path pattern. Not a regular expression. It represents a pattern + against which blob names will be matched to determine whether or not they should be included as + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. + :paramtype path_pattern: str + :keyword date_format: The date format. Wherever {date} appears in pathPattern, the value of + this property is used as the date format instead. + :paramtype date_format: str + :keyword time_format: The time format. Wherever {time} appears in pathPattern, the value of + this property is used as the time format instead. + :paramtype time_format: str + :keyword source_partition_count: The partition count of the blob input data source. Range 1 - + 256. + :paramtype source_partition_count: int + """ super(BlobStreamInputDataSource, self).__init__(**kwargs) self.type = 'Microsoft.Storage/Blob' # type: str self.storage_accounts = storage_accounts @@ -1846,29 +1958,28 @@ def __init__( class BlobStreamInputDataSourceProperties(BlobDataSourceProperties): """The properties that are associated with a blob input containing stream data. - :param storage_accounts: A list of one or more Azure Storage accounts. Required on PUT + :ivar storage_accounts: A list of one or more Azure Storage accounts. Required on PUT (CreateOrReplace) requests. - :type storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] - :param container: The name of a container within the associated Storage account. This container + :vartype storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] + :ivar container: The name of a container within the associated Storage account. This container contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) requests. - :type container: str - :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern + :vartype container: str + :ivar path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. - :type path_pattern: str - :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. + :vartype path_pattern: str + :ivar date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. - :type date_format: str - :param time_format: The time format. Wherever {time} appears in pathPattern, the value of this + :vartype date_format: str + :ivar time_format: The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. - :type time_format: str - :param source_partition_count: The partition count of the blob input data source. Range 1 - - 256. - :type source_partition_count: int + :vartype time_format: str + :ivar source_partition_count: The partition count of the blob input data source. Range 1 - 256. + :vartype source_partition_count: int """ _attribute_map = { @@ -1891,12 +2002,37 @@ def __init__( source_partition_count: Optional[int] = None, **kwargs ): + """ + :keyword storage_accounts: A list of one or more Azure Storage accounts. Required on PUT + (CreateOrReplace) requests. + :paramtype storage_accounts: list[~stream_analytics_management_client.models.StorageAccount] + :keyword container: The name of a container within the associated Storage account. This + container contains either the blob(s) to be read from or written to. Required on PUT + (CreateOrReplace) requests. + :paramtype container: str + :keyword path_pattern: The blob path pattern. Not a regular expression. It represents a pattern + against which blob names will be matched to determine whether or not they should be included as + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. + :paramtype path_pattern: str + :keyword date_format: The date format. Wherever {date} appears in pathPattern, the value of + this property is used as the date format instead. + :paramtype date_format: str + :keyword time_format: The time format. Wherever {time} appears in pathPattern, the value of + this property is used as the time format instead. + :paramtype time_format: str + :keyword source_partition_count: The partition count of the blob input data source. Range 1 - + 256. + :paramtype source_partition_count: int + """ super(BlobStreamInputDataSourceProperties, self).__init__(storage_accounts=storage_accounts, container=container, path_pattern=path_pattern, date_format=date_format, time_format=time_format, **kwargs) self.source_partition_count = source_partition_count class Resource(msrest.serialization.Model): - """Resource. + """The base resource definition. Variables are only populated by the server, and will be ignored when sending a request. @@ -1926,6 +2062,8 @@ def __init__( self, **kwargs ): + """ + """ super(Resource, self).__init__(**kwargs) self.id = None self.name = None @@ -1945,10 +2083,10 @@ class TrackedResource(Resource): :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. :vartype type: str - :param tags: A set of tags. Resource tags. - :type tags: dict[str, str] - :param location: The geo-location where the resource lives. - :type location: str + :ivar tags: A set of tags. Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. + :vartype location: str """ _validation = { @@ -1972,6 +2110,12 @@ def __init__( location: Optional[str] = None, **kwargs ): + """ + :keyword tags: A set of tags. Resource tags. + :paramtype tags: dict[str, str] + :keyword location: The geo-location where the resource lives. + :paramtype location: str + """ super(TrackedResource, self).__init__(**kwargs) self.tags = tags self.location = location @@ -1990,19 +2134,32 @@ class Cluster(TrackedResource): :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. :vartype type: str - :param tags: A set of tags. Resource tags. - :type tags: dict[str, str] - :param location: The geo-location where the resource lives. - :type location: str - :param sku: The SKU of the cluster. This determines the size/capacity of the cluster. Required + :ivar tags: A set of tags. Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. + :vartype location: str + :ivar sku: The SKU of the cluster. This determines the size/capacity of the cluster. Required on PUT (CreateOrUpdate) requests. - :type sku: ~stream_analytics_management_client.models.ClusterSku + :vartype sku: ~stream_analytics_management_client.models.ClusterSku :ivar etag: The current entity tag for the cluster. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. :vartype etag: str - :param properties: The properties associated with a Stream Analytics cluster. - :type properties: ~stream_analytics_management_client.models.ClusterProperties + :ivar created_date: The date this cluster was created. + :vartype created_date: ~datetime.datetime + :ivar cluster_id: Unique identifier for the cluster. + :vartype cluster_id: str + :ivar provisioning_state: The status of the cluster provisioning. The three terminal states + are: Succeeded, Failed and Canceled. Possible values include: "Succeeded", "Failed", + "Canceled", "InProgress". + :vartype provisioning_state: str or + ~stream_analytics_management_client.models.ClusterProvisioningState + :ivar capacity_allocated: Represents the number of streaming units currently being used on the + cluster. + :vartype capacity_allocated: int + :ivar capacity_assigned: Represents the sum of the SUs of all streaming jobs associated with + the cluster. If all of the jobs were running, this would be the capacity allocated. + :vartype capacity_assigned: int """ _validation = { @@ -2010,6 +2167,11 @@ class Cluster(TrackedResource): 'name': {'readonly': True}, 'type': {'readonly': True}, 'etag': {'readonly': True}, + 'created_date': {'readonly': True}, + 'cluster_id': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'capacity_allocated': {'readonly': True}, + 'capacity_assigned': {'readonly': True}, } _attribute_map = { @@ -2020,7 +2182,11 @@ class Cluster(TrackedResource): 'location': {'key': 'location', 'type': 'str'}, 'sku': {'key': 'sku', 'type': 'ClusterSku'}, 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'ClusterProperties'}, + 'created_date': {'key': 'properties.createdDate', 'type': 'iso-8601'}, + 'cluster_id': {'key': 'properties.clusterId', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'capacity_allocated': {'key': 'properties.capacityAllocated', 'type': 'int'}, + 'capacity_assigned': {'key': 'properties.capacityAssigned', 'type': 'int'}, } def __init__( @@ -2029,20 +2195,32 @@ def __init__( tags: Optional[Dict[str, str]] = None, location: Optional[str] = None, sku: Optional["ClusterSku"] = None, - properties: Optional["ClusterProperties"] = None, **kwargs ): + """ + :keyword tags: A set of tags. Resource tags. + :paramtype tags: dict[str, str] + :keyword location: The geo-location where the resource lives. + :paramtype location: str + :keyword sku: The SKU of the cluster. This determines the size/capacity of the cluster. + Required on PUT (CreateOrUpdate) requests. + :paramtype sku: ~stream_analytics_management_client.models.ClusterSku + """ super(Cluster, self).__init__(tags=tags, location=location, **kwargs) self.sku = sku self.etag = None - self.properties = properties + self.created_date = None + self.cluster_id = None + self.provisioning_state = None + self.capacity_allocated = None + self.capacity_assigned = None class ClusterInfo(msrest.serialization.Model): """The properties associated with a Stream Analytics cluster. - :param id: The resource id of cluster. - :type id: str + :ivar id: The resource id of cluster. + :vartype id: str """ _attribute_map = { @@ -2055,6 +2233,10 @@ def __init__( id: Optional[str] = None, **kwargs ): + """ + :keyword id: The resource id of cluster. + :paramtype id: str + """ super(ClusterInfo, self).__init__(**kwargs) self.id = id @@ -2090,6 +2272,8 @@ def __init__( self, **kwargs ): + """ + """ super(ClusterJob, self).__init__(**kwargs) self.id = None self.streaming_units = None @@ -2121,6 +2305,8 @@ def __init__( self, **kwargs ): + """ + """ super(ClusterJobListResult, self).__init__(**kwargs) self.value = None self.next_link = None @@ -2151,71 +2337,23 @@ def __init__( self, **kwargs ): + """ + """ super(ClusterListResult, self).__init__(**kwargs) self.value = None self.next_link = None -class ClusterProperties(msrest.serialization.Model): - """The properties associated with a Stream Analytics cluster. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar created_date: The date this cluster was created. - :vartype created_date: ~datetime.datetime - :ivar cluster_id: Unique identifier for the cluster. - :vartype cluster_id: str - :ivar provisioning_state: The status of the cluster provisioning. The three terminal states - are: Succeeded, Failed and Canceled. Possible values include: "Succeeded", "Failed", - "Canceled", "InProgress". - :vartype provisioning_state: str or - ~stream_analytics_management_client.models.ClusterProvisioningState - :ivar capacity_allocated: Represents the number of streaming units currently being used on the - cluster. - :vartype capacity_allocated: int - :ivar capacity_assigned: Represents the sum of the SUs of all streaming jobs associated with - the cluster. If all of the jobs were running, this would be the capacity allocated. - :vartype capacity_assigned: int - """ - - _validation = { - 'created_date': {'readonly': True}, - 'cluster_id': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - 'capacity_allocated': {'readonly': True}, - 'capacity_assigned': {'readonly': True}, - } - - _attribute_map = { - 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, - 'cluster_id': {'key': 'clusterId', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'capacity_allocated': {'key': 'capacityAllocated', 'type': 'int'}, - 'capacity_assigned': {'key': 'capacityAssigned', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(ClusterProperties, self).__init__(**kwargs) - self.created_date = None - self.cluster_id = None - self.provisioning_state = None - self.capacity_allocated = None - self.capacity_assigned = None - - class ClusterSku(msrest.serialization.Model): """The SKU of the cluster. This determines the size/capacity of the cluster. Required on PUT (CreateOrUpdate) requests. - :param name: Specifies the SKU name of the cluster. Required on PUT (CreateOrUpdate) requests. + :ivar name: Specifies the SKU name of the cluster. Required on PUT (CreateOrUpdate) requests. Possible values include: "Default". - :type name: str or ~stream_analytics_management_client.models.ClusterSkuName - :param capacity: Denotes the number of streaming units the cluster can support. Valid values - for this property are multiples of 36 with a minimum value of 36 and maximum value of 216. - Required on PUT (CreateOrUpdate) requests. - :type capacity: int + :vartype name: str or ~stream_analytics_management_client.models.ClusterSkuName + :ivar capacity: Denotes the number of streaming units the cluster can support. Valid values for + this property are multiples of 36 with a minimum value of 36 and maximum value of 216. Required + on PUT (CreateOrUpdate) requests. + :vartype capacity: int """ _validation = { @@ -2234,6 +2372,15 @@ def __init__( capacity: Optional[int] = None, **kwargs ): + """ + :keyword name: Specifies the SKU name of the cluster. Required on PUT (CreateOrUpdate) + requests. Possible values include: "Default". + :paramtype name: str or ~stream_analytics_management_client.models.ClusterSkuName + :keyword capacity: Denotes the number of streaming units the cluster can support. Valid values + for this property are multiples of 36 with a minimum value of 36 and maximum value of 216. + Required on PUT (CreateOrUpdate) requests. + :paramtype capacity: int + """ super(ClusterSku, self).__init__(**kwargs) self.name = name self.capacity = capacity @@ -2244,8 +2391,9 @@ class Compression(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. - :type type: str + :ivar type: Required. Indicates the type of compression that the input uses. Required on PUT + (CreateOrReplace) requests. Possible values include: "None", "GZip", "Deflate". + :vartype type: str or ~stream_analytics_management_client.models.CompressionType """ _validation = { @@ -2259,116 +2407,37 @@ class Compression(msrest.serialization.Model): def __init__( self, *, - type: str, + type: Union[str, "CompressionType"], **kwargs ): + """ + :keyword type: Required. Indicates the type of compression that the input uses. Required on PUT + (CreateOrReplace) requests. Possible values include: "None", "GZip", "Deflate". + :paramtype type: str or ~stream_analytics_management_client.models.CompressionType + """ super(Compression, self).__init__(**kwargs) self.type = type -class CSharpFunctionBinding(FunctionBinding): - """The binding to a CSharp function. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the function binding type.Constant filled by server. - :type type: str - :param script: The Csharp code containing a single function definition. - :type script: str - :param dll_path: The Csharp code containing a single function definition. - :type dll_path: str - :param class_property: The Csharp code containing a single function definition. - :type class_property: str - :param method: The Csharp code containing a single function definition. - :type method: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'script': {'key': 'properties.script', 'type': 'str'}, - 'dll_path': {'key': 'properties.dllPath', 'type': 'str'}, - 'class_property': {'key': 'properties.class', 'type': 'str'}, - 'method': {'key': 'properties.method', 'type': 'str'}, - } - - def __init__( - self, - *, - script: Optional[str] = None, - dll_path: Optional[str] = None, - class_property: Optional[str] = None, - method: Optional[str] = None, - **kwargs - ): - super(CSharpFunctionBinding, self).__init__(**kwargs) - self.type = 'Microsoft.StreamAnalytics/CLRUdf' # type: str - self.script = script - self.dll_path = dll_path - self.class_property = class_property - self.method = method - - -class CSharpFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): - """The parameters needed to retrieve the default function definition for a CSharp function. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param binding_type: Required. Indicates the function binding type.Constant filled by server. - :type binding_type: str - :param script: The CSharp code containing a single function definition. - :type script: str - :ivar udf_type: The function type. Default value: "Scalar". - :vartype udf_type: str - """ - - _validation = { - 'binding_type': {'required': True}, - 'udf_type': {'constant': True}, - } - - _attribute_map = { - 'binding_type': {'key': 'bindingType', 'type': 'str'}, - 'script': {'key': 'bindingRetrievalProperties.script', 'type': 'str'}, - 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, - } - - udf_type = "Scalar" - - def __init__( - self, - *, - script: Optional[str] = None, - **kwargs - ): - super(CSharpFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) - self.binding_type = 'Microsoft.StreamAnalytics/CLRUdf' # type: str - self.script = script - - class CsvSerialization(Serialization): """Describes how data from an input is serialized or how data is serialized when written to an output in CSV format. All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the type of serialization that the input or output uses. - Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: - "Csv", "Avro", "Json", "CustomClr", "Parquet". - :type type: str or ~stream_analytics_management_client.models.EventSerializationType - :param field_delimiter: Specifies the delimiter that will be used to separate comma-separated - value (CSV) records. See https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream- - analytics-input or https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics- - output for a list of supported values. Required on PUT (CreateOrReplace) requests. - :type field_delimiter: str - :param encoding: Specifies the encoding of the incoming data in the case of input and the + :ivar type: Required. Indicates the type of serialization that the input or output uses. + Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: + "Csv", "Avro", "Json", "Parquet". + :vartype type: str or ~stream_analytics_management_client.models.EventSerializationType + :ivar field_delimiter: Specifies the delimiter that will be used to separate comma-separated + value (CSV) records. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a list of + supported values. Required on PUT (CreateOrReplace) requests. + :vartype field_delimiter: str + :ivar encoding: Specifies the encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. Required on PUT (CreateOrReplace) requests. Possible values include: "UTF8". - :type encoding: str or ~stream_analytics_management_client.models.Encoding + :vartype encoding: str or ~stream_analytics_management_client.models.Encoding """ _validation = { @@ -2388,50 +2457,24 @@ def __init__( encoding: Optional[Union[str, "Encoding"]] = None, **kwargs ): + """ + :keyword field_delimiter: Specifies the delimiter that will be used to separate comma-separated + value (CSV) records. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a list of + supported values. Required on PUT (CreateOrReplace) requests. + :paramtype field_delimiter: str + :keyword encoding: Specifies the encoding of the incoming data in the case of input and the + encoding of outgoing data in the case of output. Required on PUT (CreateOrReplace) requests. + Possible values include: "UTF8". + :paramtype encoding: str or ~stream_analytics_management_client.models.Encoding + """ super(CsvSerialization, self).__init__(**kwargs) self.type = 'Csv' # type: str self.field_delimiter = field_delimiter self.encoding = encoding -class CustomClrSerialization(Serialization): - """Describes how data from an input is serialized or how data is serialized when written to an output in custom format. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of serialization that the input or output uses. - Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: - "Csv", "Avro", "Json", "CustomClr", "Parquet". - :type type: str or ~stream_analytics_management_client.models.EventSerializationType - :param serialization_dll_path: The serialization library path. - :type serialization_dll_path: str - :param serialization_class_name: The serialization class name. - :type serialization_class_name: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'serialization_dll_path': {'key': 'properties.serializationDllPath', 'type': 'str'}, - 'serialization_class_name': {'key': 'properties.serializationClassName', 'type': 'str'}, - } - - def __init__( - self, - *, - serialization_dll_path: Optional[str] = None, - serialization_class_name: Optional[str] = None, - **kwargs - ): - super(CustomClrSerialization, self).__init__(**kwargs) - self.type = 'CustomClr' # type: str - self.serialization_dll_path = serialization_dll_path - self.serialization_class_name = serialization_class_name - - class DiagnosticCondition(msrest.serialization.Model): """Condition applicable to the resource, or to the job overall, that warrant customer attention. @@ -2463,6 +2506,8 @@ def __init__( self, **kwargs ): + """ + """ super(DiagnosticCondition, self).__init__(**kwargs) self.since = None self.code = None @@ -2491,6 +2536,8 @@ def __init__( self, **kwargs ): + """ + """ super(Diagnostics, self).__init__(**kwargs) self.conditions = None @@ -2500,31 +2547,31 @@ class DocumentDbOutputDataSource(OutputDataSource): All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the type of data source output will be written to. Required on + :ivar type: Required. Indicates the type of data source output will be written to. Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param account_id: The DocumentDB account name or ID. Required on PUT (CreateOrReplace) + :vartype type: str + :ivar account_id: The DocumentDB account name or ID. Required on PUT (CreateOrReplace) requests. - :type account_id: str - :param account_key: The account key for the DocumentDB account. Required on PUT + :vartype account_id: str + :ivar account_key: The account key for the DocumentDB account. Required on PUT (CreateOrReplace) requests. - :type account_key: str - :param database: The name of the DocumentDB database. Required on PUT (CreateOrReplace) + :vartype account_key: str + :ivar database: The name of the DocumentDB database. Required on PUT (CreateOrReplace) requests. - :type database: str - :param collection_name_pattern: The collection name pattern for the collections to be used. The + :vartype database: str + :ivar collection_name_pattern: The collection name pattern for the collections to be used. The collection name format can be constructed using the optional {partition} token, where - partitions start from 0. See the DocumentDB section of https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for more information. Required on PUT - (CreateOrReplace) requests. - :type collection_name_pattern: str - :param partition_key: The name of the field in output events used to specify the key for + partitions start from 0. See the DocumentDB section of + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for more + information. Required on PUT (CreateOrReplace) requests. + :vartype collection_name_pattern: str + :ivar partition_key: The name of the field in output events used to specify the key for partitioning output across collections. If 'collectionNamePattern' contains the {partition} token, this property is required to be specified. - :type partition_key: str - :param document_id: The name of the field in output events used to specify the primary key - which insert or update operations are based on. - :type document_id: str + :vartype partition_key: str + :ivar document_id: The name of the field in output events used to specify the primary key which + insert or update operations are based on. + :vartype document_id: str """ _validation = { @@ -2552,6 +2599,30 @@ def __init__( document_id: Optional[str] = None, **kwargs ): + """ + :keyword account_id: The DocumentDB account name or ID. Required on PUT (CreateOrReplace) + requests. + :paramtype account_id: str + :keyword account_key: The account key for the DocumentDB account. Required on PUT + (CreateOrReplace) requests. + :paramtype account_key: str + :keyword database: The name of the DocumentDB database. Required on PUT (CreateOrReplace) + requests. + :paramtype database: str + :keyword collection_name_pattern: The collection name pattern for the collections to be used. + The collection name format can be constructed using the optional {partition} token, where + partitions start from 0. See the DocumentDB section of + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for more + information. Required on PUT (CreateOrReplace) requests. + :paramtype collection_name_pattern: str + :keyword partition_key: The name of the field in output events used to specify the key for + partitioning output across collections. If 'collectionNamePattern' contains the {partition} + token, this property is required to be specified. + :paramtype partition_key: str + :keyword document_id: The name of the field in output events used to specify the primary key + which insert or update operations are based on. + :paramtype document_id: str + """ super(DocumentDbOutputDataSource, self).__init__(**kwargs) self.type = 'Microsoft.Storage/DocumentDB' # type: str self.account_id = account_id @@ -2565,89 +2636,111 @@ def __init__( class Error(msrest.serialization.Model): """Common error representation. - :param error: Error definition properties. - :type error: ~stream_analytics_management_client.models.ErrorAutoGenerated + :ivar error: Error definition properties. + :vartype error: ~stream_analytics_management_client.models.ErrorError """ _attribute_map = { - 'error': {'key': 'error', 'type': 'ErrorAutoGenerated'}, + 'error': {'key': 'error', 'type': 'ErrorError'}, } def __init__( self, *, - error: Optional["ErrorAutoGenerated"] = None, + error: Optional["ErrorError"] = None, **kwargs ): + """ + :keyword error: Error definition properties. + :paramtype error: ~stream_analytics_management_client.models.ErrorError + """ super(Error, self).__init__(**kwargs) self.error = error -class ErrorAutoGenerated(msrest.serialization.Model): - """Error definition properties. +class ErrorDetails(msrest.serialization.Model): + """Common error details representation. - :param code: Error code. - :type code: str - :param message: Error message. - :type message: str - :param target: Error target. - :type target: str - :param details: Error details. - :type details: list[~stream_analytics_management_client.models.ErrorDetails] + :ivar code: Error code. + :vartype code: str + :ivar target: Error target. + :vartype target: str + :ivar message: Error message. + :vartype message: str """ _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, - 'details': {'key': 'details', 'type': '[ErrorDetails]'}, + 'message': {'key': 'message', 'type': 'str'}, } def __init__( self, *, code: Optional[str] = None, - message: Optional[str] = None, target: Optional[str] = None, - details: Optional[List["ErrorDetails"]] = None, + message: Optional[str] = None, **kwargs ): - super(ErrorAutoGenerated, self).__init__(**kwargs) + """ + :keyword code: Error code. + :paramtype code: str + :keyword target: Error target. + :paramtype target: str + :keyword message: Error message. + :paramtype message: str + """ + super(ErrorDetails, self).__init__(**kwargs) self.code = code - self.message = message self.target = target - self.details = details + self.message = message -class ErrorDetails(msrest.serialization.Model): - """Common error details representation. +class ErrorError(msrest.serialization.Model): + """Error definition properties. - :param code: Error code. - :type code: str - :param target: Error target. - :type target: str - :param message: Error message. - :type message: str + :ivar code: Error code. + :vartype code: str + :ivar message: Error message. + :vartype message: str + :ivar target: Error target. + :vartype target: str + :ivar details: Error details. + :vartype details: list[~stream_analytics_management_client.models.ErrorDetails] """ _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, - 'target': {'key': 'target', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ErrorDetails]'}, } def __init__( self, *, code: Optional[str] = None, - target: Optional[str] = None, message: Optional[str] = None, + target: Optional[str] = None, + details: Optional[List["ErrorDetails"]] = None, **kwargs ): - super(ErrorDetails, self).__init__(**kwargs) + """ + :keyword code: Error code. + :paramtype code: str + :keyword message: Error message. + :paramtype message: str + :keyword target: Error target. + :paramtype target: str + :keyword details: Error details. + :paramtype details: list[~stream_analytics_management_client.models.ErrorDetails] + """ + super(ErrorError, self).__init__(**kwargs) self.code = code - self.target = target self.message = message + self.target = target + self.details = details class ErrorResponse(msrest.serialization.Model): @@ -2675,6 +2768,8 @@ def __init__( self, **kwargs ): + """ + """ super(ErrorResponse, self).__init__(**kwargs) self.code = None self.message = None @@ -2683,18 +2778,19 @@ def __init__( class ServiceBusDataSourceProperties(msrest.serialization.Model): """The common properties that are associated with Service Bus data sources (Queues, Topics, Event Hubs, etc.). - :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + :ivar service_bus_namespace: The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type service_bus_namespace: str - :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + :vartype service_bus_namespace: str + :ivar shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_name: str - :param shared_access_policy_key: The shared access policy key for the specified shared access + :vartype shared_access_policy_name: str + :ivar shared_access_policy_key: The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_key: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + :vartype shared_access_policy_key: str + :ivar authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :vartype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode """ _attribute_map = { @@ -2713,6 +2809,21 @@ def __init__( authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, **kwargs ): + """ + :keyword service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :paramtype service_bus_namespace: str + :keyword shared_access_policy_name: The shared access policy name for the Event Hub, Service + Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :paramtype shared_access_policy_name: str + :keyword shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :paramtype shared_access_policy_key: str + :keyword authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :paramtype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + """ super(ServiceBusDataSourceProperties, self).__init__(**kwargs) self.service_bus_namespace = service_bus_namespace self.shared_access_policy_name = shared_access_policy_name @@ -2723,20 +2834,21 @@ def __init__( class EventHubDataSourceProperties(ServiceBusDataSourceProperties): """The common properties that are associated with Event Hub data sources. - :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + :ivar service_bus_namespace: The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type service_bus_namespace: str - :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + :vartype service_bus_namespace: str + :ivar shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_name: str - :param shared_access_policy_key: The shared access policy key for the specified shared access + :vartype shared_access_policy_name: str + :ivar shared_access_policy_key: The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_key: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + :vartype shared_access_policy_key: str + :ivar authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - :param event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. - :type event_hub_name: str + :vartype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + :ivar event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + :vartype event_hub_name: str """ _attribute_map = { @@ -2757,6 +2869,23 @@ def __init__( event_hub_name: Optional[str] = None, **kwargs ): + """ + :keyword service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :paramtype service_bus_namespace: str + :keyword shared_access_policy_name: The shared access policy name for the Event Hub, Service + Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :paramtype shared_access_policy_name: str + :keyword shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :paramtype shared_access_policy_key: str + :keyword authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :paramtype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + :keyword event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + :paramtype event_hub_name: str + """ super(EventHubDataSourceProperties, self).__init__(service_bus_namespace=service_bus_namespace, shared_access_policy_name=shared_access_policy_name, shared_access_policy_key=shared_access_policy_key, authentication_mode=authentication_mode, **kwargs) self.event_hub_name = event_hub_name @@ -2766,28 +2895,29 @@ class EventHubOutputDataSource(OutputDataSource): All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the type of data source output will be written to. Required on + :ivar type: Required. Indicates the type of data source output will be written to. Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + :vartype type: str + :ivar service_bus_namespace: The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type service_bus_namespace: str - :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + :vartype service_bus_namespace: str + :ivar shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_name: str - :param shared_access_policy_key: The shared access policy key for the specified shared access + :vartype shared_access_policy_name: str + :ivar shared_access_policy_key: The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_key: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + :vartype shared_access_policy_key: str + :ivar authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - :param event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. - :type event_hub_name: str - :param partition_key: The key/column that is used to determine to which partition to send event + :vartype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + :ivar event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + :vartype event_hub_name: str + :ivar partition_key: The key/column that is used to determine to which partition to send event data. - :type partition_key: str - :param property_columns: - :type property_columns: list[str] + :vartype partition_key: str + :ivar property_columns: The properties associated with this Event Hub output. + :vartype property_columns: list[str] """ _validation = { @@ -2817,6 +2947,28 @@ def __init__( property_columns: Optional[List[str]] = None, **kwargs ): + """ + :keyword service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :paramtype service_bus_namespace: str + :keyword shared_access_policy_name: The shared access policy name for the Event Hub, Service + Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :paramtype shared_access_policy_name: str + :keyword shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :paramtype shared_access_policy_key: str + :keyword authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :paramtype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + :keyword event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + :paramtype event_hub_name: str + :keyword partition_key: The key/column that is used to determine to which partition to send + event data. + :paramtype partition_key: str + :keyword property_columns: The properties associated with this Event Hub output. + :paramtype property_columns: list[str] + """ super(EventHubOutputDataSource, self).__init__(**kwargs) self.type = 'Microsoft.ServiceBus/EventHub' # type: str self.service_bus_namespace = service_bus_namespace @@ -2831,25 +2983,26 @@ def __init__( class EventHubOutputDataSourceProperties(EventHubDataSourceProperties): """The properties that are associated with an Event Hub output. - :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + :ivar service_bus_namespace: The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type service_bus_namespace: str - :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + :vartype service_bus_namespace: str + :ivar shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_name: str - :param shared_access_policy_key: The shared access policy key for the specified shared access + :vartype shared_access_policy_name: str + :ivar shared_access_policy_key: The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_key: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + :vartype shared_access_policy_key: str + :ivar authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - :param event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. - :type event_hub_name: str - :param partition_key: The key/column that is used to determine to which partition to send event + :vartype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + :ivar event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + :vartype event_hub_name: str + :ivar partition_key: The key/column that is used to determine to which partition to send event data. - :type partition_key: str - :param property_columns: - :type property_columns: list[str] + :vartype partition_key: str + :ivar property_columns: The properties associated with this Event Hub output. + :vartype property_columns: list[str] """ _attribute_map = { @@ -2874,6 +3027,28 @@ def __init__( property_columns: Optional[List[str]] = None, **kwargs ): + """ + :keyword service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :paramtype service_bus_namespace: str + :keyword shared_access_policy_name: The shared access policy name for the Event Hub, Service + Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :paramtype shared_access_policy_name: str + :keyword shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :paramtype shared_access_policy_key: str + :keyword authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :paramtype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + :keyword event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + :paramtype event_hub_name: str + :keyword partition_key: The key/column that is used to determine to which partition to send + event data. + :paramtype partition_key: str + :keyword property_columns: The properties associated with this Event Hub output. + :paramtype property_columns: list[str] + """ super(EventHubOutputDataSourceProperties, self).__init__(service_bus_namespace=service_bus_namespace, shared_access_policy_name=shared_access_policy_name, shared_access_policy_key=shared_access_policy_key, authentication_mode=authentication_mode, event_hub_name=event_hub_name, **kwargs) self.partition_key = partition_key self.property_columns = property_columns @@ -2884,28 +3059,29 @@ class EventHubStreamInputDataSource(StreamInputDataSource): All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the type of input data source containing stream data. Required + :ivar type: Required. Indicates the type of input data source containing stream data. Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + :vartype type: str + :ivar service_bus_namespace: The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type service_bus_namespace: str - :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + :vartype service_bus_namespace: str + :ivar shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_name: str - :param shared_access_policy_key: The shared access policy key for the specified shared access + :vartype shared_access_policy_name: str + :ivar shared_access_policy_key: The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_key: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + :vartype shared_access_policy_key: str + :ivar authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - :param event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. - :type event_hub_name: str - :param consumer_group_name: The name of an Event Hub Consumer Group that should be used to read + :vartype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + :ivar event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + :vartype event_hub_name: str + :ivar consumer_group_name: The name of an Event Hub Consumer Group that should be used to read events from the Event Hub. Specifying distinct consumer group names for multiple inputs allows each of those inputs to receive the same events from the Event Hub. If not specified, the input uses the Event Hub’s default consumer group. - :type consumer_group_name: str + :vartype consumer_group_name: str """ _validation = { @@ -2933,6 +3109,28 @@ def __init__( consumer_group_name: Optional[str] = None, **kwargs ): + """ + :keyword service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :paramtype service_bus_namespace: str + :keyword shared_access_policy_name: The shared access policy name for the Event Hub, Service + Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :paramtype shared_access_policy_name: str + :keyword shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :paramtype shared_access_policy_key: str + :keyword authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :paramtype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + :keyword event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + :paramtype event_hub_name: str + :keyword consumer_group_name: The name of an Event Hub Consumer Group that should be used to + read events from the Event Hub. Specifying distinct consumer group names for multiple inputs + allows each of those inputs to receive the same events from the Event Hub. If not specified, + the input uses the Event Hub’s default consumer group. + :paramtype consumer_group_name: str + """ super(EventHubStreamInputDataSource, self).__init__(**kwargs) self.type = 'Microsoft.ServiceBus/EventHub' # type: str self.service_bus_namespace = service_bus_namespace @@ -2946,25 +3144,26 @@ def __init__( class EventHubStreamInputDataSourceProperties(EventHubDataSourceProperties): """The properties that are associated with a Event Hub input containing stream data. - :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + :ivar service_bus_namespace: The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type service_bus_namespace: str - :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + :vartype service_bus_namespace: str + :ivar shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_name: str - :param shared_access_policy_key: The shared access policy key for the specified shared access + :vartype shared_access_policy_name: str + :ivar shared_access_policy_key: The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_key: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + :vartype shared_access_policy_key: str + :ivar authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - :param event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. - :type event_hub_name: str - :param consumer_group_name: The name of an Event Hub Consumer Group that should be used to read + :vartype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + :ivar event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + :vartype event_hub_name: str + :ivar consumer_group_name: The name of an Event Hub Consumer Group that should be used to read events from the Event Hub. Specifying distinct consumer group names for multiple inputs allows each of those inputs to receive the same events from the Event Hub. If not specified, the input uses the Event Hub’s default consumer group. - :type consumer_group_name: str + :vartype consumer_group_name: str """ _attribute_map = { @@ -2987,6 +3186,28 @@ def __init__( consumer_group_name: Optional[str] = None, **kwargs ): + """ + :keyword service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :paramtype service_bus_namespace: str + :keyword shared_access_policy_name: The shared access policy name for the Event Hub, Service + Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :paramtype shared_access_policy_name: str + :keyword shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :paramtype shared_access_policy_key: str + :keyword authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :paramtype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + :keyword event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + :paramtype event_hub_name: str + :keyword consumer_group_name: The name of an Event Hub Consumer Group that should be used to + read events from the Event Hub. Specifying distinct consumer group names for multiple inputs + allows each of those inputs to receive the same events from the Event Hub. If not specified, + the input uses the Event Hub’s default consumer group. + :paramtype consumer_group_name: str + """ super(EventHubStreamInputDataSourceProperties, self).__init__(service_bus_namespace=service_bus_namespace, shared_access_policy_name=shared_access_policy_name, shared_access_policy_key=shared_access_policy_key, authentication_mode=authentication_mode, event_hub_name=event_hub_name, **kwargs) self.consumer_group_name = consumer_group_name @@ -2996,28 +3217,29 @@ class EventHubV2OutputDataSource(OutputDataSource): All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the type of data source output will be written to. Required on + :ivar type: Required. Indicates the type of data source output will be written to. Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + :vartype type: str + :ivar service_bus_namespace: The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type service_bus_namespace: str - :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + :vartype service_bus_namespace: str + :ivar shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_name: str - :param shared_access_policy_key: The shared access policy key for the specified shared access + :vartype shared_access_policy_name: str + :ivar shared_access_policy_key: The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_key: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + :vartype shared_access_policy_key: str + :ivar authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - :param event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. - :type event_hub_name: str - :param partition_key: The key/column that is used to determine to which partition to send event + :vartype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + :ivar event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + :vartype event_hub_name: str + :ivar partition_key: The key/column that is used to determine to which partition to send event data. - :type partition_key: str - :param property_columns: - :type property_columns: list[str] + :vartype partition_key: str + :ivar property_columns: The properties associated with this Event Hub output. + :vartype property_columns: list[str] """ _validation = { @@ -3047,6 +3269,28 @@ def __init__( property_columns: Optional[List[str]] = None, **kwargs ): + """ + :keyword service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :paramtype service_bus_namespace: str + :keyword shared_access_policy_name: The shared access policy name for the Event Hub, Service + Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :paramtype shared_access_policy_name: str + :keyword shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :paramtype shared_access_policy_key: str + :keyword authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :paramtype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + :keyword event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + :paramtype event_hub_name: str + :keyword partition_key: The key/column that is used to determine to which partition to send + event data. + :paramtype partition_key: str + :keyword property_columns: The properties associated with this Event Hub output. + :paramtype property_columns: list[str] + """ super(EventHubV2OutputDataSource, self).__init__(**kwargs) self.type = 'Microsoft.EventHub/EventHub' # type: str self.service_bus_namespace = service_bus_namespace @@ -3063,28 +3307,29 @@ class EventHubV2StreamInputDataSource(StreamInputDataSource): All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the type of input data source containing stream data. Required + :ivar type: Required. Indicates the type of input data source containing stream data. Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + :vartype type: str + :ivar service_bus_namespace: The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type service_bus_namespace: str - :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + :vartype service_bus_namespace: str + :ivar shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_name: str - :param shared_access_policy_key: The shared access policy key for the specified shared access + :vartype shared_access_policy_name: str + :ivar shared_access_policy_key: The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_key: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + :vartype shared_access_policy_key: str + :ivar authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - :param event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. - :type event_hub_name: str - :param consumer_group_name: The name of an Event Hub Consumer Group that should be used to read + :vartype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + :ivar event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + :vartype event_hub_name: str + :ivar consumer_group_name: The name of an Event Hub Consumer Group that should be used to read events from the Event Hub. Specifying distinct consumer group names for multiple inputs allows each of those inputs to receive the same events from the Event Hub. If not specified, the input uses the Event Hub’s default consumer group. - :type consumer_group_name: str + :vartype consumer_group_name: str """ _validation = { @@ -3112,6 +3357,28 @@ def __init__( consumer_group_name: Optional[str] = None, **kwargs ): + """ + :keyword service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :paramtype service_bus_namespace: str + :keyword shared_access_policy_name: The shared access policy name for the Event Hub, Service + Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :paramtype shared_access_policy_name: str + :keyword shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :paramtype shared_access_policy_key: str + :keyword authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :paramtype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + :keyword event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + :paramtype event_hub_name: str + :keyword consumer_group_name: The name of an Event Hub Consumer Group that should be used to + read events from the Event Hub. Specifying distinct consumer group names for multiple inputs + allows each of those inputs to receive the same events from the Event Hub. If not specified, + the input uses the Event Hub’s default consumer group. + :paramtype consumer_group_name: str + """ super(EventHubV2StreamInputDataSource, self).__init__(**kwargs) self.type = 'Microsoft.EventHub/EventHub' # type: str self.service_bus_namespace = service_bus_namespace @@ -3122,37 +3389,6 @@ def __init__( self.consumer_group_name = consumer_group_name -class External(msrest.serialization.Model): - """The storage account where the custom code artifacts are located. - - :param storage_account: The properties that are associated with an Azure Storage account. - :type storage_account: ~stream_analytics_management_client.models.StorageAccount - :param container: - :type container: str - :param path: - :type path: str - """ - - _attribute_map = { - 'storage_account': {'key': 'storageAccount', 'type': 'StorageAccount'}, - 'container': {'key': 'container', 'type': 'str'}, - 'path': {'key': 'path', 'type': 'str'}, - } - - def __init__( - self, - *, - storage_account: Optional["StorageAccount"] = None, - container: Optional[str] = None, - path: Optional[str] = None, - **kwargs - ): - super(External, self).__init__(**kwargs) - self.storage_account = storage_account - self.container = container - self.path = path - - class SubResource(msrest.serialization.Model): """The base sub-resource model definition. @@ -3160,8 +3396,8 @@ class SubResource(msrest.serialization.Model): :ivar id: Resource Id. :vartype id: str - :param name: Resource name. - :type name: str + :ivar name: Resource name. + :vartype name: str :ivar type: Resource type. :vartype type: str """ @@ -3183,6 +3419,10 @@ def __init__( name: Optional[str] = None, **kwargs ): + """ + :keyword name: Resource name. + :paramtype name: str + """ super(SubResource, self).__init__(**kwargs) self.id = None self.name = name @@ -3196,12 +3436,12 @@ class Function(SubResource): :ivar id: Resource Id. :vartype id: str - :param name: Resource name. - :type name: str + :ivar name: Resource name. + :vartype name: str :ivar type: Resource type. :vartype type: str - :param properties: The properties that are associated with a function. - :type properties: ~stream_analytics_management_client.models.FunctionProperties + :ivar properties: The properties that are associated with a function. + :vartype properties: ~stream_analytics_management_client.models.FunctionProperties """ _validation = { @@ -3223,6 +3463,12 @@ def __init__( properties: Optional["FunctionProperties"] = None, **kwargs ): + """ + :keyword name: Resource name. + :paramtype name: str + :keyword properties: The properties that are associated with a function. + :paramtype properties: ~stream_analytics_management_client.models.FunctionProperties + """ super(Function, self).__init__(name=name, **kwargs) self.properties = properties @@ -3230,13 +3476,13 @@ def __init__( class FunctionInput(msrest.serialization.Model): """Describes one input parameter of a function. - :param data_type: The (Azure Stream Analytics supported) data type of the function input + :ivar data_type: The (Azure Stream Analytics supported) data type of the function input parameter. A list of valid Azure Stream Analytics data types are described at https://msdn.microsoft.com/en-us/library/azure/dn835065.aspx. - :type data_type: str - :param is_configuration_parameter: A flag indicating if the parameter is a configuration + :vartype data_type: str + :ivar is_configuration_parameter: A flag indicating if the parameter is a configuration parameter. True if this input parameter is expected to be a constant. Default is false. - :type is_configuration_parameter: bool + :vartype is_configuration_parameter: bool """ _attribute_map = { @@ -3251,6 +3497,15 @@ def __init__( is_configuration_parameter: Optional[bool] = None, **kwargs ): + """ + :keyword data_type: The (Azure Stream Analytics supported) data type of the function input + parameter. A list of valid Azure Stream Analytics data types are described at + https://msdn.microsoft.com/en-us/library/azure/dn835065.aspx. + :paramtype data_type: str + :keyword is_configuration_parameter: A flag indicating if the parameter is a configuration + parameter. True if this input parameter is expected to be a constant. Default is false. + :paramtype is_configuration_parameter: bool + """ super(FunctionInput, self).__init__(**kwargs) self.data_type = data_type self.is_configuration_parameter = is_configuration_parameter @@ -3281,6 +3536,8 @@ def __init__( self, **kwargs ): + """ + """ super(FunctionListResult, self).__init__(**kwargs) self.value = None self.next_link = None @@ -3289,10 +3546,10 @@ def __init__( class FunctionOutput(msrest.serialization.Model): """Describes the output of a function. - :param data_type: The (Azure Stream Analytics supported) data type of the function output. A - list of valid Azure Stream Analytics data types are described at https://msdn.microsoft.com/en- - us/library/azure/dn835065.aspx. - :type data_type: str + :ivar data_type: The (Azure Stream Analytics supported) data type of the function output. A + list of valid Azure Stream Analytics data types are described at + https://msdn.microsoft.com/en-us/library/azure/dn835065.aspx. + :vartype data_type: str """ _attribute_map = { @@ -3305,19 +3562,68 @@ def __init__( data_type: Optional[str] = None, **kwargs ): + """ + :keyword data_type: The (Azure Stream Analytics supported) data type of the function output. A + list of valid Azure Stream Analytics data types are described at + https://msdn.microsoft.com/en-us/library/azure/dn835065.aspx. + :paramtype data_type: str + """ super(FunctionOutput, self).__init__(**kwargs) self.data_type = data_type +class FunctionProperties(msrest.serialization.Model): + """The properties that are associated with a function. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ScalarFunctionProperties. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Indicates the type of function.Constant filled by server. + :vartype type: str + :ivar etag: The current entity tag for the function. This is an opaque string. You can use it + to detect whether the resource has changed between requests. You can also use it in the + If-Match or If-None-Match headers for write operations for optimistic concurrency. + :vartype etag: str + """ + + _validation = { + 'type': {'required': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Scalar': 'ScalarFunctionProperties'} + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(FunctionProperties, self).__init__(**kwargs) + self.type = None # type: Optional[str] + self.etag = None + + class Identity(msrest.serialization.Model): """Describes how identity is verified. - :param tenant_id: - :type tenant_id: str - :param principal_id: - :type principal_id: str - :param type: - :type type: str + :ivar tenant_id: The identity tenantId. + :vartype tenant_id: str + :ivar principal_id: The identity principal ID. + :vartype principal_id: str + :ivar type: The identity type. + :vartype type: str """ _attribute_map = { @@ -3334,6 +3640,14 @@ def __init__( type: Optional[str] = None, **kwargs ): + """ + :keyword tenant_id: The identity tenantId. + :paramtype tenant_id: str + :keyword principal_id: The identity principal ID. + :paramtype principal_id: str + :keyword type: The identity type. + :paramtype type: str + """ super(Identity, self).__init__(**kwargs) self.tenant_id = tenant_id self.principal_id = principal_id @@ -3347,13 +3661,13 @@ class Input(SubResource): :ivar id: Resource Id. :vartype id: str - :param name: Resource name. - :type name: str + :ivar name: Resource name. + :vartype name: str :ivar type: Resource type. :vartype type: str - :param properties: The properties that are associated with an input. Required on PUT + :ivar properties: The properties that are associated with an input. Required on PUT (CreateOrReplace) requests. - :type properties: ~stream_analytics_management_client.models.InputProperties + :vartype properties: ~stream_analytics_management_client.models.InputProperties """ _validation = { @@ -3375,6 +3689,13 @@ def __init__( properties: Optional["InputProperties"] = None, **kwargs ): + """ + :keyword name: Resource name. + :paramtype name: str + :keyword properties: The properties that are associated with an input. Required on PUT + (CreateOrReplace) requests. + :paramtype properties: ~stream_analytics_management_client.models.InputProperties + """ super(Input, self).__init__(name=name, **kwargs) self.properties = properties @@ -3404,6 +3725,8 @@ def __init__( self, **kwargs ): + """ + """ super(InputListResult, self).__init__(**kwargs) self.value = None self.next_link = None @@ -3419,12 +3742,12 @@ class InputProperties(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates whether the input is a source of reference data or stream - data. Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param serialization: Describes how data from an input is serialized or how data is serialized + :ivar type: Required. Indicates whether the input is a source of reference data or stream data. + Required on PUT (CreateOrReplace) requests.Constant filled by server. + :vartype type: str + :ivar serialization: Describes how data from an input is serialized or how data is serialized when written to an output. Required on PUT (CreateOrReplace) requests. - :type serialization: ~stream_analytics_management_client.models.Serialization + :vartype serialization: ~stream_analytics_management_client.models.Serialization :ivar diagnostics: Describes conditions applicable to the Input, Output, or the job overall, that warrant customer attention. :vartype diagnostics: ~stream_analytics_management_client.models.Diagnostics @@ -3432,11 +3755,11 @@ class InputProperties(msrest.serialization.Model): detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. :vartype etag: str - :param compression: Describes how input data is compressed. - :type compression: ~stream_analytics_management_client.models.Compression - :param partition_key: partitionKey Describes a key in the input data which is used for + :ivar compression: Describes how input data is compressed. + :vartype compression: ~stream_analytics_management_client.models.Compression + :ivar partition_key: partitionKey Describes a key in the input data which is used for partitioning the input data. - :type partition_key: str + :vartype partition_key: str """ _validation = { @@ -3466,6 +3789,16 @@ def __init__( partition_key: Optional[str] = None, **kwargs ): + """ + :keyword serialization: Describes how data from an input is serialized or how data is + serialized when written to an output. Required on PUT (CreateOrReplace) requests. + :paramtype serialization: ~stream_analytics_management_client.models.Serialization + :keyword compression: Describes how input data is compressed. + :paramtype compression: ~stream_analytics_management_client.models.Compression + :keyword partition_key: partitionKey Describes a key in the input data which is used for + partitioning the input data. + :paramtype partition_key: str + """ super(InputProperties, self).__init__(**kwargs) self.type = None # type: Optional[str] self.serialization = serialization @@ -3480,25 +3813,25 @@ class IoTHubStreamInputDataSource(StreamInputDataSource): All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the type of input data source containing stream data. Required + :ivar type: Required. Indicates the type of input data source containing stream data. Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param iot_hub_namespace: The name or the URI of the IoT Hub. Required on PUT (CreateOrReplace) + :vartype type: str + :ivar iot_hub_namespace: The name or the URI of the IoT Hub. Required on PUT (CreateOrReplace) requests. - :type iot_hub_namespace: str - :param shared_access_policy_name: The shared access policy name for the IoT Hub. This policy + :vartype iot_hub_namespace: str + :ivar shared_access_policy_name: The shared access policy name for the IoT Hub. This policy must contain at least the Service connect permission. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_name: str - :param shared_access_policy_key: The shared access policy key for the specified shared access + :vartype shared_access_policy_name: str + :ivar shared_access_policy_key: The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_key: str - :param consumer_group_name: The name of an IoT Hub Consumer Group that should be used to read + :vartype shared_access_policy_key: str + :ivar consumer_group_name: The name of an IoT Hub Consumer Group that should be used to read events from the IoT Hub. If not specified, the input uses the Iot Hub’s default consumer group. - :type consumer_group_name: str - :param endpoint: The IoT Hub endpoint to connect to (ie. messages/events, + :vartype consumer_group_name: str + :ivar endpoint: The IoT Hub endpoint to connect to (ie. messages/events, messages/operationsMonitoringEvents, etc.). - :type endpoint: str + :vartype endpoint: str """ _validation = { @@ -3524,6 +3857,24 @@ def __init__( endpoint: Optional[str] = None, **kwargs ): + """ + :keyword iot_hub_namespace: The name or the URI of the IoT Hub. Required on PUT + (CreateOrReplace) requests. + :paramtype iot_hub_namespace: str + :keyword shared_access_policy_name: The shared access policy name for the IoT Hub. This policy + must contain at least the Service connect permission. Required on PUT (CreateOrReplace) + requests. + :paramtype shared_access_policy_name: str + :keyword shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :paramtype shared_access_policy_key: str + :keyword consumer_group_name: The name of an IoT Hub Consumer Group that should be used to read + events from the IoT Hub. If not specified, the input uses the Iot Hub’s default consumer group. + :paramtype consumer_group_name: str + :keyword endpoint: The IoT Hub endpoint to connect to (ie. messages/events, + messages/operationsMonitoringEvents, etc.). + :paramtype endpoint: str + """ super(IoTHubStreamInputDataSource, self).__init__(**kwargs) self.type = 'Microsoft.Devices/IotHubs' # type: str self.iot_hub_namespace = iot_hub_namespace @@ -3538,11 +3889,11 @@ class JavaScriptFunctionBinding(FunctionBinding): All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the function binding type.Constant filled by server. - :type type: str - :param script: The JavaScript code containing a single function definition. For example: + :ivar type: Required. Indicates the function binding type.Constant filled by server. + :vartype type: str + :ivar script: The JavaScript code containing a single function definition. For example: 'function (x, y) { return x + y; }'. - :type script: str + :vartype script: str """ _validation = { @@ -3560,6 +3911,11 @@ def __init__( script: Optional[str] = None, **kwargs ): + """ + :keyword script: The JavaScript code containing a single function definition. For example: + 'function (x, y) { return x + y; }'. + :paramtype script: str + """ super(JavaScriptFunctionBinding, self).__init__(**kwargs) self.type = 'Microsoft.StreamAnalytics/JavascriptUdf' # type: str self.script = script @@ -3568,22 +3924,20 @@ def __init__( class JavaScriptFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): """The parameters needed to retrieve the default function definition for a JavaScript function. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :param binding_type: Required. Indicates the function binding type.Constant filled by server. - :type binding_type: str - :param script: The JavaScript code containing a single function definition. For example: + :ivar binding_type: Required. Indicates the function binding type.Constant filled by server. + :vartype binding_type: str + :ivar script: The JavaScript code containing a single function definition. For example: 'function (x, y) { return x + y; }'. - :type script: str - :ivar udf_type: The function type. Default value: "Scalar". + :vartype script: str + :ivar udf_type: The function type. The only acceptable values to pass in are None and "Scalar". + The default value is None. :vartype udf_type: str """ _validation = { 'binding_type': {'required': True}, - 'udf_type': {'constant': True}, } _attribute_map = { @@ -3592,28 +3946,36 @@ class JavaScriptFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefa 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, } - udf_type = "Scalar" - def __init__( self, *, script: Optional[str] = None, + udf_type: Optional[str] = None, **kwargs ): + """ + :keyword script: The JavaScript code containing a single function definition. For example: + 'function (x, y) { return x + y; }'. + :paramtype script: str + :keyword udf_type: The function type. The only acceptable values to pass in are None and + "Scalar". The default value is None. + :paramtype udf_type: str + """ super(JavaScriptFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) self.binding_type = 'Microsoft.StreamAnalytics/JavascriptUdf' # type: str self.script = script + self.udf_type = udf_type class StorageAccount(msrest.serialization.Model): """The properties that are associated with an Azure Storage account. - :param account_name: The name of the Azure Storage account. Required on PUT (CreateOrReplace) + :ivar account_name: The name of the Azure Storage account. Required on PUT (CreateOrReplace) requests. - :type account_name: str - :param account_key: The account key for the Azure Storage account. Required on PUT + :vartype account_name: str + :ivar account_key: The account key for the Azure Storage account. Required on PUT (CreateOrReplace) requests. - :type account_key: str + :vartype account_key: str """ _attribute_map = { @@ -3628,6 +3990,14 @@ def __init__( account_key: Optional[str] = None, **kwargs ): + """ + :keyword account_name: The name of the Azure Storage account. Required on PUT (CreateOrReplace) + requests. + :paramtype account_name: str + :keyword account_key: The account key for the Azure Storage account. Required on PUT + (CreateOrReplace) requests. + :paramtype account_key: str + """ super(StorageAccount, self).__init__(**kwargs) self.account_name = account_name self.account_key = account_key @@ -3636,15 +4006,16 @@ def __init__( class JobStorageAccount(StorageAccount): """The properties that are associated with an Azure Storage account with MSI. - :param account_name: The name of the Azure Storage account. Required on PUT (CreateOrReplace) + :ivar account_name: The name of the Azure Storage account. Required on PUT (CreateOrReplace) requests. - :type account_name: str - :param account_key: The account key for the Azure Storage account. Required on PUT + :vartype account_name: str + :ivar account_key: The account key for the Azure Storage account. Required on PUT (CreateOrReplace) requests. - :type account_key: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + :vartype account_key: str + :ivar authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :vartype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode """ _attribute_map = { @@ -3661,6 +4032,18 @@ def __init__( authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, **kwargs ): + """ + :keyword account_name: The name of the Azure Storage account. Required on PUT (CreateOrReplace) + requests. + :paramtype account_name: str + :keyword account_key: The account key for the Azure Storage account. Required on PUT + (CreateOrReplace) requests. + :paramtype account_key: str + :keyword authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :paramtype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + """ super(JobStorageAccount, self).__init__(account_name=account_name, account_key=account_key, **kwargs) self.authentication_mode = authentication_mode @@ -3670,21 +4053,22 @@ class JsonSerialization(Serialization): All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the type of serialization that the input or output uses. - Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: - "Csv", "Avro", "Json", "CustomClr", "Parquet". - :type type: str or ~stream_analytics_management_client.models.EventSerializationType - :param encoding: Specifies the encoding of the incoming data in the case of input and the + :ivar type: Required. Indicates the type of serialization that the input or output uses. + Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: + "Csv", "Avro", "Json", "Parquet". + :vartype type: str or ~stream_analytics_management_client.models.EventSerializationType + :ivar encoding: Specifies the encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. Required on PUT (CreateOrReplace) requests. Possible values include: "UTF8". - :type encoding: str or ~stream_analytics_management_client.models.Encoding - :param format: This property only applies to JSON serialization of outputs only. It is not + :vartype encoding: str or ~stream_analytics_management_client.models.Encoding + :ivar format: This property only applies to JSON serialization of outputs only. It is not applicable to inputs. This property specifies the format of the JSON the output will be written in. The currently supported values are 'lineSeparated' indicating the output will be formatted by having each JSON object separated by a new line and 'array' indicating the output will be formatted as an array of JSON objects. Default value is 'lineSeparated' if left null. Possible values include: "LineSeparated", "Array". - :type format: str or ~stream_analytics_management_client.models.JsonOutputSerializationFormat + :vartype format: str or + ~stream_analytics_management_client.models.JsonOutputSerializationFormat """ _validation = { @@ -3704,6 +4088,20 @@ def __init__( format: Optional[Union[str, "JsonOutputSerializationFormat"]] = None, **kwargs ): + """ + :keyword encoding: Specifies the encoding of the incoming data in the case of input and the + encoding of outgoing data in the case of output. Required on PUT (CreateOrReplace) requests. + Possible values include: "UTF8". + :paramtype encoding: str or ~stream_analytics_management_client.models.Encoding + :keyword format: This property only applies to JSON serialization of outputs only. It is not + applicable to inputs. This property specifies the format of the JSON the output will be written + in. The currently supported values are 'lineSeparated' indicating the output will be formatted + by having each JSON object separated by a new line and 'array' indicating the output will be + formatted as an array of JSON objects. Default value is 'lineSeparated' if left null. Possible + values include: "LineSeparated", "Array". + :paramtype format: str or + ~stream_analytics_management_client.models.JsonOutputSerializationFormat + """ super(JsonSerialization, self).__init__(**kwargs) self.type = 'Json' # type: str self.encoding = encoding @@ -3717,6 +4115,8 @@ class Operation(msrest.serialization.Model): :ivar name: The name of the operation being performed on this particular object. :vartype name: str + :ivar is_data_action: Indicates whether the operation is a data action. + :vartype is_data_action: bool :ivar display: Contains the localized display information for this particular operation / action. :vartype display: ~stream_analytics_management_client.models.OperationDisplay @@ -3729,15 +4129,23 @@ class Operation(msrest.serialization.Model): _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, + 'is_data_action': {'key': 'isDataAction', 'type': 'bool'}, 'display': {'key': 'display', 'type': 'OperationDisplay'}, } def __init__( self, + *, + is_data_action: Optional[bool] = None, **kwargs ): + """ + :keyword is_data_action: Indicates whether the operation is a data action. + :paramtype is_data_action: bool + """ super(Operation, self).__init__(**kwargs) self.name = None + self.is_data_action = is_data_action self.display = None @@ -3775,6 +4183,8 @@ def __init__( self, **kwargs ): + """ + """ super(OperationDisplay, self).__init__(**kwargs) self.provider = None self.resource = None @@ -3808,6 +4218,8 @@ def __init__( self, **kwargs ): + """ + """ super(OperationListResult, self).__init__(**kwargs) self.value = None self.next_link = None @@ -3820,20 +4232,20 @@ class Output(SubResource): :ivar id: Resource Id. :vartype id: str - :param name: Resource name. - :type name: str + :ivar name: Resource name. + :vartype name: str :ivar type: Resource type. :vartype type: str - :param datasource: Describes the data source that output will be written to. Required on PUT + :ivar datasource: Describes the data source that output will be written to. Required on PUT (CreateOrReplace) requests. - :type datasource: ~stream_analytics_management_client.models.OutputDataSource - :param time_window: - :type time_window: str - :param size_window: - :type size_window: float - :param serialization: Describes how data from an input is serialized or how data is serialized + :vartype datasource: ~stream_analytics_management_client.models.OutputDataSource + :ivar time_window: The time frame for filtering Stream Analytics job outputs. + :vartype time_window: str + :ivar size_window: The size window to constrain a Stream Analytics output to. + :vartype size_window: float + :ivar serialization: Describes how data from an input is serialized or how data is serialized when written to an output. Required on PUT (CreateOrReplace) requests. - :type serialization: ~stream_analytics_management_client.models.Serialization + :vartype serialization: ~stream_analytics_management_client.models.Serialization :ivar diagnostics: Describes conditions applicable to the Input, Output, or the job overall, that warrant customer attention. :vartype diagnostics: ~stream_analytics_management_client.models.Diagnostics @@ -3872,6 +4284,20 @@ def __init__( serialization: Optional["Serialization"] = None, **kwargs ): + """ + :keyword name: Resource name. + :paramtype name: str + :keyword datasource: Describes the data source that output will be written to. Required on PUT + (CreateOrReplace) requests. + :paramtype datasource: ~stream_analytics_management_client.models.OutputDataSource + :keyword time_window: The time frame for filtering Stream Analytics job outputs. + :paramtype time_window: str + :keyword size_window: The size window to constrain a Stream Analytics output to. + :paramtype size_window: float + :keyword serialization: Describes how data from an input is serialized or how data is + serialized when written to an output. Required on PUT (CreateOrReplace) requests. + :paramtype serialization: ~stream_analytics_management_client.models.Serialization + """ super(Output, self).__init__(name=name, **kwargs) self.datasource = datasource self.time_window = time_window @@ -3906,6 +4332,8 @@ def __init__( self, **kwargs ): + """ + """ super(OutputListResult, self).__init__(**kwargs) self.value = None self.next_link = None @@ -3916,13 +4344,13 @@ class ParquetSerialization(Serialization): All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the type of serialization that the input or output uses. - Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: - "Csv", "Avro", "Json", "CustomClr", "Parquet". - :type type: str or ~stream_analytics_management_client.models.EventSerializationType - :param properties: The properties that are associated with the Parquet serialization type. + :ivar type: Required. Indicates the type of serialization that the input or output uses. + Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: + "Csv", "Avro", "Json", "Parquet". + :vartype type: str or ~stream_analytics_management_client.models.EventSerializationType + :ivar properties: The properties that are associated with the Parquet serialization type. Required on PUT (CreateOrReplace) requests. - :type properties: object + :vartype properties: any """ _validation = { @@ -3937,9 +4365,14 @@ class ParquetSerialization(Serialization): def __init__( self, *, - properties: Optional[object] = None, + properties: Optional[Any] = None, **kwargs ): + """ + :keyword properties: The properties that are associated with the Parquet serialization type. + Required on PUT (CreateOrReplace) requests. + :paramtype properties: any + """ super(ParquetSerialization, self).__init__(**kwargs) self.type = 'Parquet' # type: str self.properties = properties @@ -3950,37 +4383,38 @@ class PowerBIOutputDataSource(OutputDataSource): All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the type of data source output will be written to. Required on + :ivar type: Required. Indicates the type of data source output will be written to. Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param refresh_token: A refresh token that can be used to obtain a valid access token that can + :vartype type: str + :ivar refresh_token: A refresh token that can be used to obtain a valid access token that can then be used to authenticate with the data source. A valid refresh token is currently only obtainable via the Azure Portal. It is recommended to put a dummy string value here when creating the data source and then going to the Azure Portal to authenticate the data source which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) requests. - :type refresh_token: str - :param token_user_principal_name: The user principal name (UPN) of the user that was used to + :vartype refresh_token: str + :ivar token_user_principal_name: The user principal name (UPN) of the user that was used to obtain the refresh token. Use this property to help remember which user was used to obtain the refresh token. - :type token_user_principal_name: str - :param token_user_display_name: The user display name of the user that was used to obtain the + :vartype token_user_principal_name: str + :ivar token_user_display_name: The user display name of the user that was used to obtain the refresh token. Use this property to help remember which user was used to obtain the refresh token. - :type token_user_display_name: str - :param dataset: The name of the Power BI dataset. Required on PUT (CreateOrReplace) requests. - :type dataset: str - :param table: The name of the Power BI table under the specified dataset. Required on PUT + :vartype token_user_display_name: str + :ivar dataset: The name of the Power BI dataset. Required on PUT (CreateOrReplace) requests. + :vartype dataset: str + :ivar table: The name of the Power BI table under the specified dataset. Required on PUT (CreateOrReplace) requests. - :type table: str - :param group_id: The ID of the Power BI group. - :type group_id: str - :param group_name: The name of the Power BI group. Use this property to help remember which + :vartype table: str + :ivar group_id: The ID of the Power BI group. + :vartype group_id: str + :ivar group_name: The name of the Power BI group. Use this property to help remember which specific Power BI group id was used. - :type group_name: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + :vartype group_name: str + :ivar authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :vartype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode """ _validation = { @@ -4012,6 +4446,37 @@ def __init__( authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, **kwargs ): + """ + :keyword refresh_token: A refresh token that can be used to obtain a valid access token that + can then be used to authenticate with the data source. A valid refresh token is currently only + obtainable via the Azure Portal. It is recommended to put a dummy string value here when + creating the data source and then going to the Azure Portal to authenticate the data source + which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) + requests. + :paramtype refresh_token: str + :keyword token_user_principal_name: The user principal name (UPN) of the user that was used to + obtain the refresh token. Use this property to help remember which user was used to obtain the + refresh token. + :paramtype token_user_principal_name: str + :keyword token_user_display_name: The user display name of the user that was used to obtain the + refresh token. Use this property to help remember which user was used to obtain the refresh + token. + :paramtype token_user_display_name: str + :keyword dataset: The name of the Power BI dataset. Required on PUT (CreateOrReplace) requests. + :paramtype dataset: str + :keyword table: The name of the Power BI table under the specified dataset. Required on PUT + (CreateOrReplace) requests. + :paramtype table: str + :keyword group_id: The ID of the Power BI group. + :paramtype group_id: str + :keyword group_name: The name of the Power BI group. Use this property to help remember which + specific Power BI group id was used. + :paramtype group_name: str + :keyword authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :paramtype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + """ super(PowerBIOutputDataSource, self).__init__(**kwargs) self.type = 'PowerBI' # type: str self.refresh_token = refresh_token @@ -4027,34 +4492,35 @@ def __init__( class PowerBIOutputDataSourceProperties(OAuthBasedDataSourceProperties): """The properties that are associated with a Power BI output. - :param refresh_token: A refresh token that can be used to obtain a valid access token that can + :ivar refresh_token: A refresh token that can be used to obtain a valid access token that can then be used to authenticate with the data source. A valid refresh token is currently only obtainable via the Azure Portal. It is recommended to put a dummy string value here when creating the data source and then going to the Azure Portal to authenticate the data source which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) requests. - :type refresh_token: str - :param token_user_principal_name: The user principal name (UPN) of the user that was used to + :vartype refresh_token: str + :ivar token_user_principal_name: The user principal name (UPN) of the user that was used to obtain the refresh token. Use this property to help remember which user was used to obtain the refresh token. - :type token_user_principal_name: str - :param token_user_display_name: The user display name of the user that was used to obtain the + :vartype token_user_principal_name: str + :ivar token_user_display_name: The user display name of the user that was used to obtain the refresh token. Use this property to help remember which user was used to obtain the refresh token. - :type token_user_display_name: str - :param dataset: The name of the Power BI dataset. Required on PUT (CreateOrReplace) requests. - :type dataset: str - :param table: The name of the Power BI table under the specified dataset. Required on PUT + :vartype token_user_display_name: str + :ivar dataset: The name of the Power BI dataset. Required on PUT (CreateOrReplace) requests. + :vartype dataset: str + :ivar table: The name of the Power BI table under the specified dataset. Required on PUT (CreateOrReplace) requests. - :type table: str - :param group_id: The ID of the Power BI group. - :type group_id: str - :param group_name: The name of the Power BI group. Use this property to help remember which + :vartype table: str + :ivar group_id: The ID of the Power BI group. + :vartype group_id: str + :ivar group_name: The name of the Power BI group. Use this property to help remember which specific Power BI group id was used. - :type group_name: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + :vartype group_name: str + :ivar authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode + :vartype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode """ _attribute_map = { @@ -4081,6 +4547,37 @@ def __init__( authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, **kwargs ): + """ + :keyword refresh_token: A refresh token that can be used to obtain a valid access token that + can then be used to authenticate with the data source. A valid refresh token is currently only + obtainable via the Azure Portal. It is recommended to put a dummy string value here when + creating the data source and then going to the Azure Portal to authenticate the data source + which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) + requests. + :paramtype refresh_token: str + :keyword token_user_principal_name: The user principal name (UPN) of the user that was used to + obtain the refresh token. Use this property to help remember which user was used to obtain the + refresh token. + :paramtype token_user_principal_name: str + :keyword token_user_display_name: The user display name of the user that was used to obtain the + refresh token. Use this property to help remember which user was used to obtain the refresh + token. + :paramtype token_user_display_name: str + :keyword dataset: The name of the Power BI dataset. Required on PUT (CreateOrReplace) requests. + :paramtype dataset: str + :keyword table: The name of the Power BI table under the specified dataset. Required on PUT + (CreateOrReplace) requests. + :paramtype table: str + :keyword group_id: The ID of the Power BI group. + :paramtype group_id: str + :keyword group_name: The name of the Power BI group. Use this property to help remember which + specific Power BI group id was used. + :paramtype group_name: str + :keyword authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :paramtype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + """ super(PowerBIOutputDataSourceProperties, self).__init__(refresh_token=refresh_token, token_user_principal_name=token_user_principal_name, token_user_display_name=token_user_display_name, **kwargs) self.dataset = dataset self.table = table @@ -4089,7 +4586,43 @@ def __init__( self.authentication_mode = authentication_mode -class PrivateEndpoint(Resource): +class ProxyResource(Resource): + """The resource model definition for a ARM proxy resource. It will have everything other than required location and tags. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(ProxyResource, self).__init__(**kwargs) + + +class PrivateEndpoint(ProxyResource): """Complete information about the private endpoint. Variables are only populated by the server, and will be ignored when sending a request. @@ -4102,12 +4635,16 @@ class PrivateEndpoint(Resource): :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. :vartype type: str - :param properties: The properties associated with a private endpoint. - :type properties: ~stream_analytics_management_client.models.PrivateEndpointProperties :ivar etag: Unique opaque string (generally a GUID) that represents the metadata state of the resource (private endpoint) and changes whenever the resource is updated. Required on PUT (CreateOrUpdate) requests. :vartype etag: str + :ivar created_date: The date when this private endpoint was created. + :vartype created_date: str + :ivar manual_private_link_service_connections: A list of connections to the remote resource. + Immutable after it is set. + :vartype manual_private_link_service_connections: + list[~stream_analytics_management_client.models.PrivateLinkServiceConnection] """ _validation = { @@ -4115,25 +4652,34 @@ class PrivateEndpoint(Resource): 'name': {'readonly': True}, 'type': {'readonly': True}, 'etag': {'readonly': True}, + 'created_date': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'PrivateEndpointProperties'}, 'etag': {'key': 'etag', 'type': 'str'}, + 'created_date': {'key': 'properties.createdDate', 'type': 'str'}, + 'manual_private_link_service_connections': {'key': 'properties.manualPrivateLinkServiceConnections', 'type': '[PrivateLinkServiceConnection]'}, } def __init__( self, *, - properties: Optional["PrivateEndpointProperties"] = None, + manual_private_link_service_connections: Optional[List["PrivateLinkServiceConnection"]] = None, **kwargs ): + """ + :keyword manual_private_link_service_connections: A list of connections to the remote resource. + Immutable after it is set. + :paramtype manual_private_link_service_connections: + list[~stream_analytics_management_client.models.PrivateLinkServiceConnection] + """ super(PrivateEndpoint, self).__init__(**kwargs) - self.properties = properties self.etag = None + self.created_date = None + self.manual_private_link_service_connections = manual_private_link_service_connections class PrivateEndpointListResult(msrest.serialization.Model): @@ -4161,44 +4707,13 @@ def __init__( self, **kwargs ): + """ + """ super(PrivateEndpointListResult, self).__init__(**kwargs) self.value = None self.next_link = None -class PrivateEndpointProperties(msrest.serialization.Model): - """The properties associated with a private endpoint. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar created_date: The date when this private endpoint was created. - :vartype created_date: str - :param manual_private_link_service_connections: A list of connections to the remote resource. - Immutable after it is set. - :type manual_private_link_service_connections: - list[~stream_analytics_management_client.models.PrivateLinkServiceConnection] - """ - - _validation = { - 'created_date': {'readonly': True}, - } - - _attribute_map = { - 'created_date': {'key': 'createdDate', 'type': 'str'}, - 'manual_private_link_service_connections': {'key': 'manualPrivateLinkServiceConnections', 'type': '[PrivateLinkServiceConnection]'}, - } - - def __init__( - self, - *, - manual_private_link_service_connections: Optional[List["PrivateLinkServiceConnection"]] = None, - **kwargs - ): - super(PrivateEndpointProperties, self).__init__(**kwargs) - self.created_date = None - self.manual_private_link_service_connections = manual_private_link_service_connections - - class PrivateLinkConnectionState(msrest.serialization.Model): """A collection of read-only information about the state of the connection to the private remote resource. @@ -4230,6 +4745,8 @@ def __init__( self, **kwargs ): + """ + """ super(PrivateLinkConnectionState, self).__init__(**kwargs) self.status = None self.description = None @@ -4239,21 +4756,27 @@ def __init__( class PrivateLinkServiceConnection(msrest.serialization.Model): """A grouping of information about the connection to the remote resource. - :param private_link_service_id: The resource id of the private link service. Required on PUT + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar private_link_service_id: The resource id of the private link service. Required on PUT (CreateOrUpdate) requests. - :type private_link_service_id: str - :param group_ids: The ID(s) of the group(s) obtained from the remote resource that this private + :vartype private_link_service_id: str + :ivar group_ids: The ID(s) of the group(s) obtained from the remote resource that this private endpoint should connect to. Required on PUT (CreateOrUpdate) requests. - :type group_ids: list[str] - :param request_message: A message passed to the owner of the remote resource with this + :vartype group_ids: list[str] + :ivar request_message: A message passed to the owner of the remote resource with this connection request. Restricted to 140 chars. - :type request_message: str - :param private_link_service_connection_state: A collection of read-only information about the + :vartype request_message: str + :ivar private_link_service_connection_state: A collection of read-only information about the state of the connection to the private remote resource. - :type private_link_service_connection_state: + :vartype private_link_service_connection_state: ~stream_analytics_management_client.models.PrivateLinkConnectionState """ + _validation = { + 'request_message': {'readonly': True}, + } + _attribute_map = { 'private_link_service_id': {'key': 'properties.privateLinkServiceId', 'type': 'str'}, 'group_ids': {'key': 'properties.groupIds', 'type': '[str]'}, @@ -4266,51 +4789,28 @@ def __init__( *, private_link_service_id: Optional[str] = None, group_ids: Optional[List[str]] = None, - request_message: Optional[str] = None, private_link_service_connection_state: Optional["PrivateLinkConnectionState"] = None, **kwargs ): + """ + :keyword private_link_service_id: The resource id of the private link service. Required on PUT + (CreateOrUpdate) requests. + :paramtype private_link_service_id: str + :keyword group_ids: The ID(s) of the group(s) obtained from the remote resource that this + private endpoint should connect to. Required on PUT (CreateOrUpdate) requests. + :paramtype group_ids: list[str] + :keyword private_link_service_connection_state: A collection of read-only information about the + state of the connection to the private remote resource. + :paramtype private_link_service_connection_state: + ~stream_analytics_management_client.models.PrivateLinkConnectionState + """ super(PrivateLinkServiceConnection, self).__init__(**kwargs) self.private_link_service_id = private_link_service_id self.group_ids = group_ids - self.request_message = request_message + self.request_message = None self.private_link_service_connection_state = private_link_service_connection_state -class ProxyResource(Resource): - """The resource model definition for a ARM proxy resource. It will have everything other than required location and tags. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource Id for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or - Microsoft.Storage/storageAccounts. - :vartype type: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ProxyResource, self).__init__(**kwargs) - - class ReferenceInputProperties(InputProperties): """The properties that are associated with an input containing reference data. @@ -4318,12 +4818,12 @@ class ReferenceInputProperties(InputProperties): All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates whether the input is a source of reference data or stream - data. Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param serialization: Describes how data from an input is serialized or how data is serialized + :ivar type: Required. Indicates whether the input is a source of reference data or stream data. + Required on PUT (CreateOrReplace) requests.Constant filled by server. + :vartype type: str + :ivar serialization: Describes how data from an input is serialized or how data is serialized when written to an output. Required on PUT (CreateOrReplace) requests. - :type serialization: ~stream_analytics_management_client.models.Serialization + :vartype serialization: ~stream_analytics_management_client.models.Serialization :ivar diagnostics: Describes conditions applicable to the Input, Output, or the job overall, that warrant customer attention. :vartype diagnostics: ~stream_analytics_management_client.models.Diagnostics @@ -4331,14 +4831,14 @@ class ReferenceInputProperties(InputProperties): detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. :vartype etag: str - :param compression: Describes how input data is compressed. - :type compression: ~stream_analytics_management_client.models.Compression - :param partition_key: partitionKey Describes a key in the input data which is used for + :ivar compression: Describes how input data is compressed. + :vartype compression: ~stream_analytics_management_client.models.Compression + :ivar partition_key: partitionKey Describes a key in the input data which is used for partitioning the input data. - :type partition_key: str - :param datasource: Describes an input data source that contains reference data. Required on PUT + :vartype partition_key: str + :ivar datasource: Describes an input data source that contains reference data. Required on PUT (CreateOrReplace) requests. - :type datasource: ~stream_analytics_management_client.models.ReferenceInputDataSource + :vartype datasource: ~stream_analytics_management_client.models.ReferenceInputDataSource """ _validation = { @@ -4366,6 +4866,19 @@ def __init__( datasource: Optional["ReferenceInputDataSource"] = None, **kwargs ): + """ + :keyword serialization: Describes how data from an input is serialized or how data is + serialized when written to an output. Required on PUT (CreateOrReplace) requests. + :paramtype serialization: ~stream_analytics_management_client.models.Serialization + :keyword compression: Describes how input data is compressed. + :paramtype compression: ~stream_analytics_management_client.models.Compression + :keyword partition_key: partitionKey Describes a key in the input data which is used for + partitioning the input data. + :paramtype partition_key: str + :keyword datasource: Describes an input data source that contains reference data. Required on + PUT (CreateOrReplace) requests. + :paramtype datasource: ~stream_analytics_management_client.models.ReferenceInputDataSource + """ super(ReferenceInputProperties, self).__init__(serialization=serialization, compression=compression, partition_key=partition_key, **kwargs) self.type = 'Reference' # type: str self.datasource = datasource @@ -4396,6 +4909,8 @@ def __init__( self, **kwargs ): + """ + """ super(ResourceTestStatus, self).__init__(**kwargs) self.status = None self.error = None @@ -4408,19 +4923,19 @@ class ScalarFunctionProperties(FunctionProperties): All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the type of function.Constant filled by server. - :type type: str + :ivar type: Required. Indicates the type of function.Constant filled by server. + :vartype type: str :ivar etag: The current entity tag for the function. This is an opaque string. You can use it - to detect whether the resource has changed between requests. You can also use it in the If- - Match or If-None-Match headers for write operations for optimistic concurrency. + to detect whether the resource has changed between requests. You can also use it in the + If-Match or If-None-Match headers for write operations for optimistic concurrency. :vartype etag: str - :param inputs: - :type inputs: list[~stream_analytics_management_client.models.FunctionInput] - :param output: Describes the output of a function. - :type output: ~stream_analytics_management_client.models.FunctionOutput - :param binding: The physical binding of the function. For example, in the Azure Machine - Learning web service’s case, this describes the endpoint. - :type binding: ~stream_analytics_management_client.models.FunctionBinding + :ivar inputs: A list of inputs describing the parameters of the function. + :vartype inputs: list[~stream_analytics_management_client.models.FunctionInput] + :ivar output: The output of the function. + :vartype output: ~stream_analytics_management_client.models.FunctionOutput + :ivar binding: The physical binding of the function. For example, in the Azure Machine Learning + web service’s case, this describes the endpoint. + :vartype binding: ~stream_analytics_management_client.models.FunctionBinding """ _validation = { @@ -4444,8 +4959,47 @@ def __init__( binding: Optional["FunctionBinding"] = None, **kwargs ): - super(ScalarFunctionProperties, self).__init__(inputs=inputs, output=output, binding=binding, **kwargs) + """ + :keyword inputs: A list of inputs describing the parameters of the function. + :paramtype inputs: list[~stream_analytics_management_client.models.FunctionInput] + :keyword output: The output of the function. + :paramtype output: ~stream_analytics_management_client.models.FunctionOutput + :keyword binding: The physical binding of the function. For example, in the Azure Machine + Learning web service’s case, this describes the endpoint. + :paramtype binding: ~stream_analytics_management_client.models.FunctionBinding + """ + super(ScalarFunctionProperties, self).__init__(**kwargs) self.type = 'Scalar' # type: str + self.inputs = inputs + self.output = output + self.binding = binding + + +class ScaleStreamingJobParameters(msrest.serialization.Model): + """Parameters supplied to the Scale Streaming Job operation. + + :ivar streaming_units: Specifies the number of streaming units that the streaming job will + scale to. + :vartype streaming_units: int + """ + + _attribute_map = { + 'streaming_units': {'key': 'streamingUnits', 'type': 'int'}, + } + + def __init__( + self, + *, + streaming_units: Optional[int] = None, + **kwargs + ): + """ + :keyword streaming_units: Specifies the number of streaming units that the streaming job will + scale to. + :paramtype streaming_units: int + """ + super(ScaleStreamingJobParameters, self).__init__(**kwargs) + self.streaming_units = streaming_units class ServiceBusQueueOutputDataSource(OutputDataSource): @@ -4453,29 +5007,33 @@ class ServiceBusQueueOutputDataSource(OutputDataSource): All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the type of data source output will be written to. Required on + :ivar type: Required. Indicates the type of data source output will be written to. Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + :vartype type: str + :ivar service_bus_namespace: The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type service_bus_namespace: str - :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + :vartype service_bus_namespace: str + :ivar shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_name: str - :param shared_access_policy_key: The shared access policy key for the specified shared access + :vartype shared_access_policy_name: str + :ivar shared_access_policy_key: The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_key: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + :vartype shared_access_policy_key: str + :ivar authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - :param queue_name: The name of the Service Bus Queue. Required on PUT (CreateOrReplace) + :vartype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + :ivar queue_name: The name of the Service Bus Queue. Required on PUT (CreateOrReplace) requests. - :type queue_name: str - :param property_columns: A string array of the names of output columns to be attached to - Service Bus messages as custom properties. - :type property_columns: list[str] - :param system_property_columns: Dictionary of :code:``. - :type system_property_columns: dict[str, str] + :vartype queue_name: str + :ivar property_columns: A string array of the names of output columns to be attached to Service + Bus messages as custom properties. + :vartype property_columns: list[str] + :ivar system_property_columns: The system properties associated with the Service Bus Queue. The + following system properties are supported: ReplyToSessionId, ContentType, To, Subject, + CorrelationId, TimeToLive, PartitionKey, SessionId, ScheduledEnqueueTime, MessageId, ReplyTo, + Label, ScheduledEnqueueTimeUtc. + :vartype system_property_columns: any """ _validation = { @@ -4490,7 +5048,7 @@ class ServiceBusQueueOutputDataSource(OutputDataSource): 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, 'queue_name': {'key': 'properties.queueName', 'type': 'str'}, 'property_columns': {'key': 'properties.propertyColumns', 'type': '[str]'}, - 'system_property_columns': {'key': 'properties.systemPropertyColumns', 'type': '{str}'}, + 'system_property_columns': {'key': 'properties.systemPropertyColumns', 'type': 'object'}, } def __init__( @@ -4502,9 +5060,35 @@ def __init__( authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, queue_name: Optional[str] = None, property_columns: Optional[List[str]] = None, - system_property_columns: Optional[Dict[str, str]] = None, + system_property_columns: Optional[Any] = None, **kwargs ): + """ + :keyword service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :paramtype service_bus_namespace: str + :keyword shared_access_policy_name: The shared access policy name for the Event Hub, Service + Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :paramtype shared_access_policy_name: str + :keyword shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :paramtype shared_access_policy_key: str + :keyword authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :paramtype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + :keyword queue_name: The name of the Service Bus Queue. Required on PUT (CreateOrReplace) + requests. + :paramtype queue_name: str + :keyword property_columns: A string array of the names of output columns to be attached to + Service Bus messages as custom properties. + :paramtype property_columns: list[str] + :keyword system_property_columns: The system properties associated with the Service Bus Queue. + The following system properties are supported: ReplyToSessionId, ContentType, To, Subject, + CorrelationId, TimeToLive, PartitionKey, SessionId, ScheduledEnqueueTime, MessageId, ReplyTo, + Label, ScheduledEnqueueTimeUtc. + :paramtype system_property_columns: any + """ super(ServiceBusQueueOutputDataSource, self).__init__(**kwargs) self.type = 'Microsoft.ServiceBus/Queue' # type: str self.service_bus_namespace = service_bus_namespace @@ -4519,26 +5103,30 @@ def __init__( class ServiceBusQueueOutputDataSourceProperties(ServiceBusDataSourceProperties): """The properties that are associated with a Service Bus Queue output. - :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + :ivar service_bus_namespace: The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type service_bus_namespace: str - :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + :vartype service_bus_namespace: str + :ivar shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_name: str - :param shared_access_policy_key: The shared access policy key for the specified shared access + :vartype shared_access_policy_name: str + :ivar shared_access_policy_key: The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_key: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + :vartype shared_access_policy_key: str + :ivar authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - :param queue_name: The name of the Service Bus Queue. Required on PUT (CreateOrReplace) + :vartype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + :ivar queue_name: The name of the Service Bus Queue. Required on PUT (CreateOrReplace) requests. - :type queue_name: str - :param property_columns: A string array of the names of output columns to be attached to - Service Bus messages as custom properties. - :type property_columns: list[str] - :param system_property_columns: Dictionary of :code:``. - :type system_property_columns: dict[str, str] + :vartype queue_name: str + :ivar property_columns: A string array of the names of output columns to be attached to Service + Bus messages as custom properties. + :vartype property_columns: list[str] + :ivar system_property_columns: The system properties associated with the Service Bus Queue. The + following system properties are supported: ReplyToSessionId, ContentType, To, Subject, + CorrelationId, TimeToLive, PartitionKey, SessionId, ScheduledEnqueueTime, MessageId, ReplyTo, + Label, ScheduledEnqueueTimeUtc. + :vartype system_property_columns: any """ _attribute_map = { @@ -4548,7 +5136,7 @@ class ServiceBusQueueOutputDataSourceProperties(ServiceBusDataSourceProperties): 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, 'queue_name': {'key': 'queueName', 'type': 'str'}, 'property_columns': {'key': 'propertyColumns', 'type': '[str]'}, - 'system_property_columns': {'key': 'systemPropertyColumns', 'type': '{str}'}, + 'system_property_columns': {'key': 'systemPropertyColumns', 'type': 'object'}, } def __init__( @@ -4560,9 +5148,35 @@ def __init__( authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, queue_name: Optional[str] = None, property_columns: Optional[List[str]] = None, - system_property_columns: Optional[Dict[str, str]] = None, + system_property_columns: Optional[Any] = None, **kwargs ): + """ + :keyword service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :paramtype service_bus_namespace: str + :keyword shared_access_policy_name: The shared access policy name for the Event Hub, Service + Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :paramtype shared_access_policy_name: str + :keyword shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :paramtype shared_access_policy_key: str + :keyword authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :paramtype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + :keyword queue_name: The name of the Service Bus Queue. Required on PUT (CreateOrReplace) + requests. + :paramtype queue_name: str + :keyword property_columns: A string array of the names of output columns to be attached to + Service Bus messages as custom properties. + :paramtype property_columns: list[str] + :keyword system_property_columns: The system properties associated with the Service Bus Queue. + The following system properties are supported: ReplyToSessionId, ContentType, To, Subject, + CorrelationId, TimeToLive, PartitionKey, SessionId, ScheduledEnqueueTime, MessageId, ReplyTo, + Label, ScheduledEnqueueTimeUtc. + :paramtype system_property_columns: any + """ super(ServiceBusQueueOutputDataSourceProperties, self).__init__(service_bus_namespace=service_bus_namespace, shared_access_policy_name=shared_access_policy_name, shared_access_policy_key=shared_access_policy_key, authentication_mode=authentication_mode, **kwargs) self.queue_name = queue_name self.property_columns = property_columns @@ -4574,29 +5188,33 @@ class ServiceBusTopicOutputDataSource(OutputDataSource): All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates the type of data source output will be written to. Required on + :ivar type: Required. Indicates the type of data source output will be written to. Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + :vartype type: str + :ivar service_bus_namespace: The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type service_bus_namespace: str - :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + :vartype service_bus_namespace: str + :ivar shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_name: str - :param shared_access_policy_key: The shared access policy key for the specified shared access + :vartype shared_access_policy_name: str + :ivar shared_access_policy_key: The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_key: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + :vartype shared_access_policy_key: str + :ivar authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - :param topic_name: The name of the Service Bus Topic. Required on PUT (CreateOrReplace) + :vartype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + :ivar topic_name: The name of the Service Bus Topic. Required on PUT (CreateOrReplace) requests. - :type topic_name: str - :param property_columns: A string array of the names of output columns to be attached to - Service Bus messages as custom properties. - :type property_columns: list[str] - :param system_property_columns: Dictionary of :code:``. - :type system_property_columns: dict[str, str] + :vartype topic_name: str + :ivar property_columns: A string array of the names of output columns to be attached to Service + Bus messages as custom properties. + :vartype property_columns: list[str] + :ivar system_property_columns: The system properties associated with the Service Bus Topic + Output. The following system properties are supported: ReplyToSessionId, ContentType, To, + Subject, CorrelationId, TimeToLive, PartitionKey, SessionId, ScheduledEnqueueTime, MessageId, + ReplyTo, Label, ScheduledEnqueueTimeUtc. + :vartype system_property_columns: dict[str, str] """ _validation = { @@ -4626,6 +5244,32 @@ def __init__( system_property_columns: Optional[Dict[str, str]] = None, **kwargs ): + """ + :keyword service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :paramtype service_bus_namespace: str + :keyword shared_access_policy_name: The shared access policy name for the Event Hub, Service + Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :paramtype shared_access_policy_name: str + :keyword shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :paramtype shared_access_policy_key: str + :keyword authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :paramtype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + :keyword topic_name: The name of the Service Bus Topic. Required on PUT (CreateOrReplace) + requests. + :paramtype topic_name: str + :keyword property_columns: A string array of the names of output columns to be attached to + Service Bus messages as custom properties. + :paramtype property_columns: list[str] + :keyword system_property_columns: The system properties associated with the Service Bus Topic + Output. The following system properties are supported: ReplyToSessionId, ContentType, To, + Subject, CorrelationId, TimeToLive, PartitionKey, SessionId, ScheduledEnqueueTime, MessageId, + ReplyTo, Label, ScheduledEnqueueTimeUtc. + :paramtype system_property_columns: dict[str, str] + """ super(ServiceBusTopicOutputDataSource, self).__init__(**kwargs) self.type = 'Microsoft.ServiceBus/Topic' # type: str self.service_bus_namespace = service_bus_namespace @@ -4640,26 +5284,30 @@ def __init__( class ServiceBusTopicOutputDataSourceProperties(ServiceBusDataSourceProperties): """The properties that are associated with a Service Bus Topic output. - :param service_bus_namespace: The namespace that is associated with the desired Event Hub, + :ivar service_bus_namespace: The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type service_bus_namespace: str - :param shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus + :vartype service_bus_namespace: str + :ivar shared_access_policy_name: The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_name: str - :param shared_access_policy_key: The shared access policy key for the specified shared access + :vartype shared_access_policy_name: str + :ivar shared_access_policy_key: The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. - :type shared_access_policy_key: str - :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + :vartype shared_access_policy_key: str + :ivar authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", "ConnectionString". - :type authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode - :param topic_name: The name of the Service Bus Topic. Required on PUT (CreateOrReplace) + :vartype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + :ivar topic_name: The name of the Service Bus Topic. Required on PUT (CreateOrReplace) requests. - :type topic_name: str - :param property_columns: A string array of the names of output columns to be attached to - Service Bus messages as custom properties. - :type property_columns: list[str] - :param system_property_columns: Dictionary of :code:``. - :type system_property_columns: dict[str, str] + :vartype topic_name: str + :ivar property_columns: A string array of the names of output columns to be attached to Service + Bus messages as custom properties. + :vartype property_columns: list[str] + :ivar system_property_columns: The system properties associated with the Service Bus Topic + Output. The following system properties are supported: ReplyToSessionId, ContentType, To, + Subject, CorrelationId, TimeToLive, PartitionKey, SessionId, ScheduledEnqueueTime, MessageId, + ReplyTo, Label, ScheduledEnqueueTimeUtc. + :vartype system_property_columns: dict[str, str] """ _attribute_map = { @@ -4684,26 +5332,79 @@ def __init__( system_property_columns: Optional[Dict[str, str]] = None, **kwargs ): + """ + :keyword service_bus_namespace: The namespace that is associated with the desired Event Hub, + Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :paramtype service_bus_namespace: str + :keyword shared_access_policy_name: The shared access policy name for the Event Hub, Service + Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + :paramtype shared_access_policy_name: str + :keyword shared_access_policy_key: The shared access policy key for the specified shared access + policy. Required on PUT (CreateOrReplace) requests. + :paramtype shared_access_policy_key: str + :keyword authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :paramtype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + :keyword topic_name: The name of the Service Bus Topic. Required on PUT (CreateOrReplace) + requests. + :paramtype topic_name: str + :keyword property_columns: A string array of the names of output columns to be attached to + Service Bus messages as custom properties. + :paramtype property_columns: list[str] + :keyword system_property_columns: The system properties associated with the Service Bus Topic + Output. The following system properties are supported: ReplyToSessionId, ContentType, To, + Subject, CorrelationId, TimeToLive, PartitionKey, SessionId, ScheduledEnqueueTime, MessageId, + ReplyTo, Label, ScheduledEnqueueTimeUtc. + :paramtype system_property_columns: dict[str, str] + """ super(ServiceBusTopicOutputDataSourceProperties, self).__init__(service_bus_namespace=service_bus_namespace, shared_access_policy_name=shared_access_policy_name, shared_access_policy_key=shared_access_policy_key, authentication_mode=authentication_mode, **kwargs) self.topic_name = topic_name self.property_columns = property_columns self.system_property_columns = system_property_columns +class Sku(msrest.serialization.Model): + """The properties that are associated with a SKU. + + :ivar name: The name of the SKU. Required on PUT (CreateOrReplace) requests. Possible values + include: "Standard". + :vartype name: str or ~stream_analytics_management_client.models.SkuName + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[Union[str, "SkuName"]] = None, + **kwargs + ): + """ + :keyword name: The name of the SKU. Required on PUT (CreateOrReplace) requests. Possible values + include: "Standard". + :paramtype name: str or ~stream_analytics_management_client.models.SkuName + """ + super(Sku, self).__init__(**kwargs) + self.name = name + + class StartStreamingJobParameters(msrest.serialization.Model): """Parameters supplied to the Start Streaming Job operation. - :param output_start_mode: Value may be JobStartTime, CustomTime, or LastOutputEventTime to + :ivar output_start_mode: Value may be JobStartTime, CustomTime, or LastOutputEventTime to indicate whether the starting point of the output event stream should start whenever the job is started, start at a custom user time stamp specified via the outputStartTime property, or start from the last event output time. Possible values include: "JobStartTime", "CustomTime", "LastOutputEventTime". - :type output_start_mode: str or ~stream_analytics_management_client.models.OutputStartMode - :param output_start_time: Value is either an ISO-8601 formatted time stamp that indicates the + :vartype output_start_mode: str or ~stream_analytics_management_client.models.OutputStartMode + :ivar output_start_time: Value is either an ISO-8601 formatted time stamp that indicates the starting point of the output event stream, or null to indicate that the output event stream will start whenever the streaming job is started. This property must have a value if outputStartMode is set to CustomTime. - :type output_start_time: ~datetime.datetime + :vartype output_start_time: ~datetime.datetime """ _attribute_map = { @@ -4718,6 +5419,19 @@ def __init__( output_start_time: Optional[datetime.datetime] = None, **kwargs ): + """ + :keyword output_start_mode: Value may be JobStartTime, CustomTime, or LastOutputEventTime to + indicate whether the starting point of the output event stream should start whenever the job is + started, start at a custom user time stamp specified via the outputStartTime property, or start + from the last event output time. Possible values include: "JobStartTime", "CustomTime", + "LastOutputEventTime". + :paramtype output_start_mode: str or ~stream_analytics_management_client.models.OutputStartMode + :keyword output_start_time: Value is either an ISO-8601 formatted time stamp that indicates the + starting point of the output event stream, or null to indicate that the output event stream + will start whenever the streaming job is started. This property must have a value if + outputStartMode is set to CustomTime. + :paramtype output_start_time: ~datetime.datetime + """ super(StartStreamingJobParameters, self).__init__(**kwargs) self.output_start_mode = output_start_mode self.output_start_time = output_start_time @@ -4736,15 +5450,15 @@ class StreamingJob(TrackedResource): :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. :vartype type: str - :param tags: A set of tags. Resource tags. - :type tags: dict[str, str] - :param location: The geo-location where the resource lives. - :type location: str - :param identity: Describes the system-assigned managed identity assigned to this job that can - be used to authenticate with inputs and outputs. - :type identity: ~stream_analytics_management_client.models.Identity - :param sku: Describes the SKU of the streaming job. Required on PUT (CreateOrReplace) requests. - :type sku: ~stream_analytics_management_client.models.StreamingJobSku + :ivar tags: A set of tags. Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. + :vartype location: str + :ivar identity: Describes the system-assigned managed identity assigned to this job that can be + used to authenticate with inputs and outputs. + :vartype identity: ~stream_analytics_management_client.models.Identity + :ivar sku: Describes the SKU of the streaming job. Required on PUT (CreateOrReplace) requests. + :vartype sku: ~stream_analytics_management_client.models.Sku :ivar job_id: A GUID uniquely identifying the streaming job. This GUID is generated upon creation of the streaming job. :vartype job_id: str @@ -4752,87 +5466,87 @@ class StreamingJob(TrackedResource): :vartype provisioning_state: str :ivar job_state: Describes the state of the streaming job. :vartype job_state: str - :param job_type: Describes the type of the job. Valid modes are ``Cloud`` and 'Edge'. Possible + :ivar job_type: Describes the type of the job. Valid modes are ``Cloud`` and 'Edge'. Possible values include: "Cloud", "Edge". - :type job_type: str or ~stream_analytics_management_client.models.JobType - :param output_start_mode: This property should only be utilized when it is desired that the job + :vartype job_type: str or ~stream_analytics_management_client.models.JobType + :ivar output_start_mode: This property should only be utilized when it is desired that the job be started immediately upon creation. Value may be JobStartTime, CustomTime, or LastOutputEventTime to indicate whether the starting point of the output event stream should start whenever the job is started, start at a custom user time stamp specified via the outputStartTime property, or start from the last event output time. Possible values include: "JobStartTime", "CustomTime", "LastOutputEventTime". - :type output_start_mode: str or ~stream_analytics_management_client.models.OutputStartMode - :param output_start_time: Value is either an ISO-8601 formatted time stamp that indicates the + :vartype output_start_mode: str or ~stream_analytics_management_client.models.OutputStartMode + :ivar output_start_time: Value is either an ISO-8601 formatted time stamp that indicates the starting point of the output event stream, or null to indicate that the output event stream will start whenever the streaming job is started. This property must have a value if outputStartMode is set to CustomTime. - :type output_start_time: ~datetime.datetime + :vartype output_start_time: ~datetime.datetime :ivar last_output_event_time: Value is either an ISO-8601 formatted timestamp indicating the last output event time of the streaming job or null indicating that output has not yet been produced. In case of multiple outputs or multiple streams, this shows the latest value in that set. :vartype last_output_event_time: ~datetime.datetime - :param events_out_of_order_policy: Indicates the policy to apply to events that arrive out of + :ivar events_out_of_order_policy: Indicates the policy to apply to events that arrive out of order in the input event stream. Possible values include: "Adjust", "Drop". - :type events_out_of_order_policy: str or + :vartype events_out_of_order_policy: str or ~stream_analytics_management_client.models.EventsOutOfOrderPolicy - :param output_error_policy: Indicates the policy to apply to events that arrive at the output + :ivar output_error_policy: Indicates the policy to apply to events that arrive at the output and cannot be written to the external storage due to being malformed (missing column values, column values of wrong type or size). Possible values include: "Stop", "Drop". - :type output_error_policy: str or ~stream_analytics_management_client.models.OutputErrorPolicy - :param events_out_of_order_max_delay_in_seconds: The maximum tolerable delay in seconds where + :vartype output_error_policy: str or + ~stream_analytics_management_client.models.OutputErrorPolicy + :ivar events_out_of_order_max_delay_in_seconds: The maximum tolerable delay in seconds where out-of-order events can be adjusted to be back in order. - :type events_out_of_order_max_delay_in_seconds: int - :param events_late_arrival_max_delay_in_seconds: The maximum tolerable delay in seconds where + :vartype events_out_of_order_max_delay_in_seconds: int + :ivar events_late_arrival_max_delay_in_seconds: The maximum tolerable delay in seconds where events arriving late could be included. Supported range is -1 to 1814399 (20.23:59:59 days) and -1 is used to specify wait indefinitely. If the property is absent, it is interpreted to have a value of -1. - :type events_late_arrival_max_delay_in_seconds: int - :param data_locale: The data locale of the stream analytics job. Value should be the name of a - supported .NET Culture from the set https://msdn.microsoft.com/en- - us/library/system.globalization.culturetypes(v=vs.110).aspx. Defaults to 'en-US' if none - specified. - :type data_locale: str - :param compatibility_level: Controls certain runtime behaviors of the streaming job. Possible - values include: "1.0". - :type compatibility_level: str or ~stream_analytics_management_client.models.CompatibilityLevel + :vartype events_late_arrival_max_delay_in_seconds: int + :ivar data_locale: The data locale of the stream analytics job. Value should be the name of a + supported .NET Culture from the set + https://msdn.microsoft.com/en-us/library/system.globalization.culturetypes(v=vs.110).aspx. + Defaults to 'en-US' if none specified. + :vartype data_locale: str + :ivar compatibility_level: Controls certain runtime behaviors of the streaming job. Possible + values include: "1.0", "1.2". + :vartype compatibility_level: str or + ~stream_analytics_management_client.models.CompatibilityLevel :ivar created_date: Value is an ISO-8601 formatted UTC timestamp indicating when the streaming job was created. :vartype created_date: ~datetime.datetime - :param inputs: A list of one or more inputs to the streaming job. The name property for each + :ivar inputs: A list of one or more inputs to the streaming job. The name property for each input is required when specifying this property in a PUT request. This property cannot be modify via a PATCH operation. You must use the PATCH API available for the individual input. - :type inputs: list[~stream_analytics_management_client.models.Input] - :param transformation: Indicates the query and the number of streaming units to use for the + :vartype inputs: list[~stream_analytics_management_client.models.Input] + :ivar transformation: Indicates the query and the number of streaming units to use for the streaming job. The name property of the transformation is required when specifying this property in a PUT request. This property cannot be modify via a PATCH operation. You must use the PATCH API available for the individual transformation. - :type transformation: ~stream_analytics_management_client.models.Transformation - :param outputs: A list of one or more outputs for the streaming job. The name property for each + :vartype transformation: ~stream_analytics_management_client.models.Transformation + :ivar outputs: A list of one or more outputs for the streaming job. The name property for each output is required when specifying this property in a PUT request. This property cannot be modify via a PATCH operation. You must use the PATCH API available for the individual output. - :type outputs: list[~stream_analytics_management_client.models.Output] - :param functions: A list of one or more functions for the streaming job. The name property for + :vartype outputs: list[~stream_analytics_management_client.models.Output] + :ivar functions: A list of one or more functions for the streaming job. The name property for each function is required when specifying this property in a PUT request. This property cannot be modify via a PATCH operation. You must use the PATCH API available for the individual transformation. - :type functions: list[~stream_analytics_management_client.models.Function] + :vartype functions: list[~stream_analytics_management_client.models.Function] :ivar etag: The current entity tag for the streaming job. This is an opaque string. You can use - it to detect whether the resource has changed between requests. You can also use it in the If- - Match or If-None-Match headers for write operations for optimistic concurrency. + it to detect whether the resource has changed between requests. You can also use it in the + If-Match or If-None-Match headers for write operations for optimistic concurrency. :vartype etag: str - :param job_storage_account: The properties that are associated with an Azure Storage account + :ivar job_storage_account: The properties that are associated with an Azure Storage account with MSI. - :type job_storage_account: ~stream_analytics_management_client.models.JobStorageAccount + :vartype job_storage_account: ~stream_analytics_management_client.models.JobStorageAccount :ivar content_storage_policy: Valid values are JobStorageAccount and SystemAccount. If set to JobStorageAccount, this requires the user to also specify jobStorageAccount property. . Possible values include: "SystemAccount", "JobStorageAccount". :vartype content_storage_policy: str or ~stream_analytics_management_client.models.ContentStoragePolicy - :param externals: The storage account where the custom code artifacts are located. - :type externals: ~stream_analytics_management_client.models.External - :param cluster: The cluster which streaming jobs will run on. - :type cluster: ~stream_analytics_management_client.models.ClusterInfo + :ivar cluster: The cluster which streaming jobs will run on. + :vartype cluster: ~stream_analytics_management_client.models.ClusterInfo """ _validation = { @@ -4845,7 +5559,6 @@ class StreamingJob(TrackedResource): 'last_output_event_time': {'readonly': True}, 'created_date': {'readonly': True}, 'etag': {'readonly': True}, - 'content_storage_policy': {'readonly': True}, } _attribute_map = { @@ -4855,7 +5568,7 @@ class StreamingJob(TrackedResource): 'tags': {'key': 'tags', 'type': '{str}'}, 'location': {'key': 'location', 'type': 'str'}, 'identity': {'key': 'identity', 'type': 'Identity'}, - 'sku': {'key': 'properties.sku', 'type': 'StreamingJobSku'}, + 'sku': {'key': 'properties.sku', 'type': 'Sku'}, 'job_id': {'key': 'properties.jobId', 'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, 'job_state': {'key': 'properties.jobState', 'type': 'str'}, @@ -4877,7 +5590,6 @@ class StreamingJob(TrackedResource): 'etag': {'key': 'properties.etag', 'type': 'str'}, 'job_storage_account': {'key': 'properties.jobStorageAccount', 'type': 'JobStorageAccount'}, 'content_storage_policy': {'key': 'properties.contentStoragePolicy', 'type': 'str'}, - 'externals': {'key': 'properties.externals', 'type': 'External'}, 'cluster': {'key': 'properties.cluster', 'type': 'ClusterInfo'}, } @@ -4887,7 +5599,7 @@ def __init__( tags: Optional[Dict[str, str]] = None, location: Optional[str] = None, identity: Optional["Identity"] = None, - sku: Optional["StreamingJobSku"] = None, + sku: Optional["Sku"] = None, job_type: Optional[Union[str, "JobType"]] = None, output_start_mode: Optional[Union[str, "OutputStartMode"]] = None, output_start_time: Optional[datetime.datetime] = None, @@ -4902,10 +5614,91 @@ def __init__( outputs: Optional[List["Output"]] = None, functions: Optional[List["Function"]] = None, job_storage_account: Optional["JobStorageAccount"] = None, - externals: Optional["External"] = None, + content_storage_policy: Optional[Union[str, "ContentStoragePolicy"]] = None, cluster: Optional["ClusterInfo"] = None, **kwargs ): + """ + :keyword tags: A set of tags. Resource tags. + :paramtype tags: dict[str, str] + :keyword location: The geo-location where the resource lives. + :paramtype location: str + :keyword identity: Describes the system-assigned managed identity assigned to this job that can + be used to authenticate with inputs and outputs. + :paramtype identity: ~stream_analytics_management_client.models.Identity + :keyword sku: Describes the SKU of the streaming job. Required on PUT (CreateOrReplace) + requests. + :paramtype sku: ~stream_analytics_management_client.models.Sku + :keyword job_type: Describes the type of the job. Valid modes are ``Cloud`` and 'Edge'. + Possible values include: "Cloud", "Edge". + :paramtype job_type: str or ~stream_analytics_management_client.models.JobType + :keyword output_start_mode: This property should only be utilized when it is desired that the + job be started immediately upon creation. Value may be JobStartTime, CustomTime, or + LastOutputEventTime to indicate whether the starting point of the output event stream should + start whenever the job is started, start at a custom user time stamp specified via the + outputStartTime property, or start from the last event output time. Possible values include: + "JobStartTime", "CustomTime", "LastOutputEventTime". + :paramtype output_start_mode: str or ~stream_analytics_management_client.models.OutputStartMode + :keyword output_start_time: Value is either an ISO-8601 formatted time stamp that indicates the + starting point of the output event stream, or null to indicate that the output event stream + will start whenever the streaming job is started. This property must have a value if + outputStartMode is set to CustomTime. + :paramtype output_start_time: ~datetime.datetime + :keyword events_out_of_order_policy: Indicates the policy to apply to events that arrive out of + order in the input event stream. Possible values include: "Adjust", "Drop". + :paramtype events_out_of_order_policy: str or + ~stream_analytics_management_client.models.EventsOutOfOrderPolicy + :keyword output_error_policy: Indicates the policy to apply to events that arrive at the output + and cannot be written to the external storage due to being malformed (missing column values, + column values of wrong type or size). Possible values include: "Stop", "Drop". + :paramtype output_error_policy: str or + ~stream_analytics_management_client.models.OutputErrorPolicy + :keyword events_out_of_order_max_delay_in_seconds: The maximum tolerable delay in seconds where + out-of-order events can be adjusted to be back in order. + :paramtype events_out_of_order_max_delay_in_seconds: int + :keyword events_late_arrival_max_delay_in_seconds: The maximum tolerable delay in seconds where + events arriving late could be included. Supported range is -1 to 1814399 (20.23:59:59 days) + and -1 is used to specify wait indefinitely. If the property is absent, it is interpreted to + have a value of -1. + :paramtype events_late_arrival_max_delay_in_seconds: int + :keyword data_locale: The data locale of the stream analytics job. Value should be the name of + a supported .NET Culture from the set + https://msdn.microsoft.com/en-us/library/system.globalization.culturetypes(v=vs.110).aspx. + Defaults to 'en-US' if none specified. + :paramtype data_locale: str + :keyword compatibility_level: Controls certain runtime behaviors of the streaming job. Possible + values include: "1.0", "1.2". + :paramtype compatibility_level: str or + ~stream_analytics_management_client.models.CompatibilityLevel + :keyword inputs: A list of one or more inputs to the streaming job. The name property for each + input is required when specifying this property in a PUT request. This property cannot be + modify via a PATCH operation. You must use the PATCH API available for the individual input. + :paramtype inputs: list[~stream_analytics_management_client.models.Input] + :keyword transformation: Indicates the query and the number of streaming units to use for the + streaming job. The name property of the transformation is required when specifying this + property in a PUT request. This property cannot be modify via a PATCH operation. You must use + the PATCH API available for the individual transformation. + :paramtype transformation: ~stream_analytics_management_client.models.Transformation + :keyword outputs: A list of one or more outputs for the streaming job. The name property for + each output is required when specifying this property in a PUT request. This property cannot be + modify via a PATCH operation. You must use the PATCH API available for the individual output. + :paramtype outputs: list[~stream_analytics_management_client.models.Output] + :keyword functions: A list of one or more functions for the streaming job. The name property + for each function is required when specifying this property in a PUT request. This property + cannot be modify via a PATCH operation. You must use the PATCH API available for the individual + transformation. + :paramtype functions: list[~stream_analytics_management_client.models.Function] + :keyword job_storage_account: The properties that are associated with an Azure Storage account + with MSI. + :paramtype job_storage_account: ~stream_analytics_management_client.models.JobStorageAccount + :keyword content_storage_policy: Valid values are JobStorageAccount and SystemAccount. If set + to JobStorageAccount, this requires the user to also specify jobStorageAccount property. . + Possible values include: "SystemAccount", "JobStorageAccount". + :paramtype content_storage_policy: str or + ~stream_analytics_management_client.models.ContentStoragePolicy + :keyword cluster: The cluster which streaming jobs will run on. + :paramtype cluster: ~stream_analytics_management_client.models.ClusterInfo + """ super(StreamingJob, self).__init__(tags=tags, location=location, **kwargs) self.identity = identity self.sku = sku @@ -4929,8 +5722,7 @@ def __init__( self.functions = functions self.etag = None self.job_storage_account = job_storage_account - self.content_storage_policy = None - self.externals = externals + self.content_storage_policy = content_storage_policy self.cluster = cluster @@ -4959,33 +5751,13 @@ def __init__( self, **kwargs ): + """ + """ super(StreamingJobListResult, self).__init__(**kwargs) self.value = None self.next_link = None -class StreamingJobSku(msrest.serialization.Model): - """The properties that are associated with a SKU. - - :param name: The name of the SKU. Required on PUT (CreateOrReplace) requests. Possible values - include: "Standard". - :type name: str or ~stream_analytics_management_client.models.StreamingJobSkuName - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - } - - def __init__( - self, - *, - name: Optional[Union[str, "StreamingJobSkuName"]] = None, - **kwargs - ): - super(StreamingJobSku, self).__init__(**kwargs) - self.name = name - - class StreamInputProperties(InputProperties): """The properties that are associated with an input containing stream data. @@ -4993,12 +5765,12 @@ class StreamInputProperties(InputProperties): All required parameters must be populated in order to send to Azure. - :param type: Required. Indicates whether the input is a source of reference data or stream - data. Required on PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param serialization: Describes how data from an input is serialized or how data is serialized + :ivar type: Required. Indicates whether the input is a source of reference data or stream data. + Required on PUT (CreateOrReplace) requests.Constant filled by server. + :vartype type: str + :ivar serialization: Describes how data from an input is serialized or how data is serialized when written to an output. Required on PUT (CreateOrReplace) requests. - :type serialization: ~stream_analytics_management_client.models.Serialization + :vartype serialization: ~stream_analytics_management_client.models.Serialization :ivar diagnostics: Describes conditions applicable to the Input, Output, or the job overall, that warrant customer attention. :vartype diagnostics: ~stream_analytics_management_client.models.Diagnostics @@ -5006,14 +5778,14 @@ class StreamInputProperties(InputProperties): detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. :vartype etag: str - :param compression: Describes how input data is compressed. - :type compression: ~stream_analytics_management_client.models.Compression - :param partition_key: partitionKey Describes a key in the input data which is used for + :ivar compression: Describes how input data is compressed. + :vartype compression: ~stream_analytics_management_client.models.Compression + :ivar partition_key: partitionKey Describes a key in the input data which is used for partitioning the input data. - :type partition_key: str - :param datasource: Describes an input data source that contains stream data. Required on PUT + :vartype partition_key: str + :ivar datasource: Describes an input data source that contains stream data. Required on PUT (CreateOrReplace) requests. - :type datasource: ~stream_analytics_management_client.models.StreamInputDataSource + :vartype datasource: ~stream_analytics_management_client.models.StreamInputDataSource """ _validation = { @@ -5041,6 +5813,19 @@ def __init__( datasource: Optional["StreamInputDataSource"] = None, **kwargs ): + """ + :keyword serialization: Describes how data from an input is serialized or how data is + serialized when written to an output. Required on PUT (CreateOrReplace) requests. + :paramtype serialization: ~stream_analytics_management_client.models.Serialization + :keyword compression: Describes how input data is compressed. + :paramtype compression: ~stream_analytics_management_client.models.Compression + :keyword partition_key: partitionKey Describes a key in the input data which is used for + partitioning the input data. + :paramtype partition_key: str + :keyword datasource: Describes an input data source that contains stream data. Required on PUT + (CreateOrReplace) requests. + :paramtype datasource: ~stream_analytics_management_client.models.StreamInputDataSource + """ super(StreamInputProperties, self).__init__(serialization=serialization, compression=compression, partition_key=partition_key, **kwargs) self.type = 'Stream' # type: str self.datasource = datasource @@ -5053,8 +5838,8 @@ class SubscriptionQuota(SubResource): :ivar id: Resource Id. :vartype id: str - :param name: Resource name. - :type name: str + :ivar name: Resource name. + :vartype name: str :ivar type: Resource type. :vartype type: str :ivar max_count: The max permitted usage of this resource. @@ -5084,6 +5869,10 @@ def __init__( name: Optional[str] = None, **kwargs ): + """ + :keyword name: Resource name. + :paramtype name: str + """ super(SubscriptionQuota, self).__init__(name=name, **kwargs) self.max_count = None self.current_count = None @@ -5110,6 +5899,8 @@ def __init__( self, **kwargs ): + """ + """ super(SubscriptionQuotasListResult, self).__init__(**kwargs) self.value = None @@ -5121,16 +5912,18 @@ class Transformation(SubResource): :ivar id: Resource Id. :vartype id: str - :param name: Resource name. - :type name: str + :ivar name: Resource name. + :vartype name: str :ivar type: Resource type. :vartype type: str - :param streaming_units: Specifies the number of streaming units that the streaming job uses. - :type streaming_units: int - :param query: Specifies the query that will be run in the streaming job. You can learn more + :ivar streaming_units: Specifies the number of streaming units that the streaming job uses. + :vartype streaming_units: int + :ivar valid_streaming_units: Specifies the valid streaming units a streaming job can scale to. + :vartype valid_streaming_units: list[int] + :ivar query: Specifies the query that will be run in the streaming job. You can learn more about the Stream Analytics Query Language (SAQL) here: https://msdn.microsoft.com/library/azure/dn834998 . Required on PUT (CreateOrReplace) requests. - :type query: str + :vartype query: str :ivar etag: The current entity tag for the transformation. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. @@ -5148,6 +5941,7 @@ class Transformation(SubResource): 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'streaming_units': {'key': 'properties.streamingUnits', 'type': 'int'}, + 'valid_streaming_units': {'key': 'properties.validStreamingUnits', 'type': '[int]'}, 'query': {'key': 'properties.query', 'type': 'str'}, 'etag': {'key': 'properties.etag', 'type': 'str'}, } @@ -5157,10 +5951,25 @@ def __init__( *, name: Optional[str] = None, streaming_units: Optional[int] = None, + valid_streaming_units: Optional[List[int]] = None, query: Optional[str] = None, **kwargs ): + """ + :keyword name: Resource name. + :paramtype name: str + :keyword streaming_units: Specifies the number of streaming units that the streaming job uses. + :paramtype streaming_units: int + :keyword valid_streaming_units: Specifies the valid streaming units a streaming job can scale + to. + :paramtype valid_streaming_units: list[int] + :keyword query: Specifies the query that will be run in the streaming job. You can learn more + about the Stream Analytics Query Language (SAQL) here: + https://msdn.microsoft.com/library/azure/dn834998 . Required on PUT (CreateOrReplace) requests. + :paramtype query: str + """ super(Transformation, self).__init__(name=name, **kwargs) self.streaming_units = streaming_units + self.valid_streaming_units = valid_streaming_units self.query = query self.etag = None diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_stream_analytics_management_client_enums.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_stream_analytics_management_client_enums.py index 4ddfdea9c290..289651ecbed5 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_stream_analytics_management_client_enums.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_stream_analytics_management_client_enums.py @@ -6,27 +6,12 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from enum import Enum, EnumMeta +from enum import Enum from six import with_metaclass +from azure.core import CaseInsensitiveEnumMeta -class _CaseInsensitiveEnumMeta(EnumMeta): - def __getitem__(self, name): - return super().__getitem__(name.upper()) - def __getattr__(cls, name): - """Return the enum member matching `name` - We use __getattr__ instead of descriptors or inserting into the enum - class' __dict__ in order to support `name` and `value` being both - properties for enum members (which live in the class' __dict__) and - enum members themselves. - """ - try: - return cls._member_map_[name.upper()] - except KeyError: - raise AttributeError(name) - - -class AuthenticationMode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class AuthenticationMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Authentication Mode. Valid modes are ``ConnectionString``\ , ``Msi`` and 'UserToken'. """ @@ -34,29 +19,44 @@ class AuthenticationMode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): USER_TOKEN = "UserToken" CONNECTION_STRING = "ConnectionString" -class ClusterProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class ClusterProvisioningState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The status of the cluster provisioning. The three terminal states are: Succeeded, Failed and Canceled """ - SUCCEEDED = "Succeeded" #: The cluster provisioning succeeded. - FAILED = "Failed" #: The cluster provisioning failed. - CANCELED = "Canceled" #: The cluster provisioning was canceled. - IN_PROGRESS = "InProgress" #: The cluster provisioning was inprogress. + #: The cluster provisioning succeeded. + SUCCEEDED = "Succeeded" + #: The cluster provisioning failed. + FAILED = "Failed" + #: The cluster provisioning was canceled. + CANCELED = "Canceled" + #: The cluster provisioning was inprogress. + IN_PROGRESS = "InProgress" -class ClusterSkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class ClusterSkuName(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Specifies the SKU name of the cluster. Required on PUT (CreateOrUpdate) requests. """ - DEFAULT = "Default" #: The default SKU. + #: The default SKU. + DEFAULT = "Default" -class CompatibilityLevel(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class CompatibilityLevel(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Controls certain runtime behaviors of the streaming job. """ ONE0 = "1.0" + ONE2 = "1.2" + +class CompressionType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + """Indicates the type of compression that the input uses. Required on PUT (CreateOrReplace) + requests. + """ + + NONE = "None" + G_ZIP = "GZip" + DEFLATE = "Deflate" -class ContentStoragePolicy(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class ContentStoragePolicy(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Valid values are JobStorageAccount and SystemAccount. If set to JobStorageAccount, this requires the user to also specify jobStorageAccount property. . """ @@ -64,14 +64,14 @@ class ContentStoragePolicy(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): SYSTEM_ACCOUNT = "SystemAccount" JOB_STORAGE_ACCOUNT = "JobStorageAccount" -class Encoding(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class Encoding(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Specifies the encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. """ UTF8 = "UTF8" -class EventSerializationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class EventSerializationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Indicates the type of serialization that the input or output uses. Required on PUT (CreateOrReplace) requests. """ @@ -79,39 +79,48 @@ class EventSerializationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum) CSV = "Csv" AVRO = "Avro" JSON = "Json" - CUSTOM_CLR = "CustomClr" PARQUET = "Parquet" -class EventsOutOfOrderPolicy(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class EventsOutOfOrderPolicy(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Indicates the policy to apply to events that arrive out of order in the input event stream. """ ADJUST = "Adjust" DROP = "Drop" -class JobState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class JobState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The current execution state of the streaming job. """ - CREATED = "Created" #: The job is currently in the Created state. - STARTING = "Starting" #: The job is currently in the Starting state. - RUNNING = "Running" #: The job is currently in the Running state. - STOPPING = "Stopping" #: The job is currently in the Stopping state. - STOPPED = "Stopped" #: The job is currently in the Stopped state. - DELETING = "Deleting" #: The job is currently in the Deleting state. - FAILED = "Failed" #: The job is currently in the Failed state. - DEGRADED = "Degraded" #: The job is currently in the Degraded state. - RESTARTING = "Restarting" #: The job is currently in the Restarting state. - SCALING = "Scaling" #: The job is currently in the Scaling state. - -class JobType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + #: The job is currently in the Created state. + CREATED = "Created" + #: The job is currently in the Starting state. + STARTING = "Starting" + #: The job is currently in the Running state. + RUNNING = "Running" + #: The job is currently in the Stopping state. + STOPPING = "Stopping" + #: The job is currently in the Stopped state. + STOPPED = "Stopped" + #: The job is currently in the Deleting state. + DELETING = "Deleting" + #: The job is currently in the Failed state. + FAILED = "Failed" + #: The job is currently in the Degraded state. + DEGRADED = "Degraded" + #: The job is currently in the Restarting state. + RESTARTING = "Restarting" + #: The job is currently in the Scaling state. + SCALING = "Scaling" + +class JobType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Describes the type of the job. Valid modes are ``Cloud`` and 'Edge'. """ CLOUD = "Cloud" EDGE = "Edge" -class JsonOutputSerializationFormat(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class JsonOutputSerializationFormat(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Specifies the format of the JSON the output will be written in. The currently supported values are 'lineSeparated' indicating the output will be formatted by having each JSON object separated by a new line and 'array' indicating the output will be formatted as an array of JSON @@ -121,7 +130,7 @@ class JsonOutputSerializationFormat(with_metaclass(_CaseInsensitiveEnumMeta, str LINE_SEPARATED = "LineSeparated" ARRAY = "Array" -class OutputErrorPolicy(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class OutputErrorPolicy(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Indicates the policy to apply to events that arrive at the output and cannot be written to the external storage due to being malformed (missing column values, column values of wrong type or size). @@ -130,7 +139,7 @@ class OutputErrorPolicy(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): STOP = "Stop" DROP = "Drop" -class OutputStartMode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class OutputStartMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Value may be JobStartTime, CustomTime, or LastOutputEventTime to indicate whether the starting point of the output event stream should start whenever the job is started, start at a custom user time stamp specified via the outputStartTime property, or start from the last event output @@ -141,7 +150,15 @@ class OutputStartMode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): CUSTOM_TIME = "CustomTime" LAST_OUTPUT_EVENT_TIME = "LastOutputEventTime" -class StreamingJobSkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class RefreshType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + """Indicates the type of data refresh option. + """ + + STATIC = "Static" + REFRESH_PERIODICALLY_WITH_FULL = "RefreshPeriodicallyWithFull" + REFRESH_PERIODICALLY_WITH_DELTA = "RefreshPeriodicallyWithDelta" + +class SkuName(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The name of the SKU. Required on PUT (CreateOrReplace) requests. """ diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/__init__.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/__init__.py index a247559efb05..72cfdc41ec92 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/__init__.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/__init__.py @@ -6,24 +6,24 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._functions_operations import FunctionsOperations +from ._operations import Operations +from ._streaming_jobs_operations import StreamingJobsOperations from ._inputs_operations import InputsOperations from ._outputs_operations import OutputsOperations -from ._streaming_jobs_operations import StreamingJobsOperations -from ._subscriptions_operations import SubscriptionsOperations from ._transformations_operations import TransformationsOperations -from ._operations import Operations +from ._functions_operations import FunctionsOperations +from ._subscriptions_operations import SubscriptionsOperations from ._clusters_operations import ClustersOperations from ._private_endpoints_operations import PrivateEndpointsOperations __all__ = [ - 'FunctionsOperations', + 'Operations', + 'StreamingJobsOperations', 'InputsOperations', 'OutputsOperations', - 'StreamingJobsOperations', - 'SubscriptionsOperations', 'TransformationsOperations', - 'Operations', + 'FunctionsOperations', + 'SubscriptionsOperations', 'ClustersOperations', 'PrivateEndpointsOperations', ] diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_clusters_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_clusters_operations.py index 2b0d23f60677..9928bd14e86b 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_clusters_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_clusters_operations.py @@ -5,25 +5,294 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING +import functools +from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +from msrest import Serializer + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +JSONType = Any +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_create_or_update_request_initial( + subscription_id: str, + resource_group_name: str, + cluster_name: str, + *, + json: JSONType = None, + content: Any = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=url, + params=query_parameters, + headers=header_parameters, + json=json, + content=content, + **kwargs + ) + + +def build_update_request_initial( + subscription_id: str, + resource_group_name: str, + cluster_name: str, + *, + json: JSONType = None, + content: Any = None, + if_match: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PATCH", + url=url, + params=query_parameters, + headers=header_parameters, + json=json, + content=content, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + cluster_name: str, + **kwargs: Any +) -> HttpRequest: + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_delete_request_initial( + subscription_id: str, + resource_group_name: str, + cluster_name: str, + **kwargs: Any +) -> HttpRequest: + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_list_by_subscription_request( + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/clusters') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_list_by_resource_group_request( + subscription_id: str, + resource_group_name: str, + **kwargs: Any +) -> HttpRequest: + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_list_streaming_jobs_request( + subscription_id: str, + resource_group_name: str, + cluster_name: str, + **kwargs: Any +) -> HttpRequest: + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/listStreamingJobs') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) class ClustersOperations(object): """ClustersOperations operations. @@ -39,7 +308,7 @@ class ClustersOperations(object): :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -49,56 +318,42 @@ def __init__(self, client, config, serializer, deserializer): def _create_or_update_initial( self, - resource_group_name, # type: str - cluster_name, # type: str - cluster, # type: "models.Cluster" - if_match=None, # type: Optional[str] - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.Cluster" - cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + resource_group_name: str, + cluster_name: str, + cluster: "_models.Cluster", + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + **kwargs: Any + ) -> "_models.Cluster": + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._create_or_update_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(cluster, 'Cluster') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(cluster, 'Cluster') + + request = build_create_or_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_name=cluster_name, + content_type=content_type, + json=_json, + if_match=if_match, + if_none_match=if_none_match, + template_url=self._create_or_update_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) if response.status_code == 200: deserialized = self._deserialize('Cluster', pipeline_response) @@ -110,18 +365,20 @@ def _create_or_update_initial( return cls(pipeline_response, deserialized, {}) return deserialized + _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} # type: ignore + + @distributed_trace def begin_create_or_update( self, - resource_group_name, # type: str - cluster_name, # type: str - cluster, # type: "models.Cluster" - if_match=None, # type: Optional[str] - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> LROPoller["models.Cluster"] + resource_group_name: str, + cluster_name: str, + cluster: "_models.Cluster", + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + **kwargs: Any + ) -> LROPoller["_models.Cluster"]: """Creates a Stream Analytics Cluster or replaces an already existing cluster. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -140,16 +397,19 @@ def begin_create_or_update( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of LROPoller that returns either Cluster or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~stream_analytics_management_client.models.Cluster] - :raises ~azure.core.exceptions.HttpResponseError: + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -162,21 +422,21 @@ def begin_create_or_update( cluster=cluster, if_match=if_match, if_none_match=if_none_match, + content_type=content_type, cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): + response = pipeline_response.http_response deserialized = self._deserialize('Cluster', pipeline_response) - if cls: return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -188,57 +448,45 @@ def get_long_running_output(pipeline_response): ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} # type: ignore def _update_initial( self, - resource_group_name, # type: str - cluster_name, # type: str - cluster, # type: "models.Cluster" - if_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Optional["models.Cluster"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Cluster"]] + resource_group_name: str, + cluster_name: str, + cluster: "_models.Cluster", + if_match: Optional[str] = None, + **kwargs: Any + ) -> Optional["_models.Cluster"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Cluster"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._update_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(cluster, 'Cluster') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(cluster, 'Cluster') + + request = build_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_name=cluster_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self._update_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: @@ -248,17 +496,19 @@ def _update_initial( return cls(pipeline_response, deserialized, {}) return deserialized + _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} # type: ignore + + @distributed_trace def begin_update( self, - resource_group_name, # type: str - cluster_name, # type: str - cluster, # type: "models.Cluster" - if_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> LROPoller["models.Cluster"] + resource_group_name: str, + cluster_name: str, + cluster: "_models.Cluster", + if_match: Optional[str] = None, + **kwargs: Any + ) -> LROPoller["_models.Cluster"]: """Updates an existing cluster. This can be used to partially update (ie. update one or two properties) a cluster without affecting the rest of the cluster definition. @@ -275,16 +525,19 @@ def begin_update( :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of LROPoller that returns either Cluster or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~stream_analytics_management_client.models.Cluster] - :raises ~azure.core.exceptions.HttpResponseError: + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -296,21 +549,21 @@ def begin_update( cluster_name=cluster_name, cluster=cluster, if_match=if_match, + content_type=content_type, cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): + response = pipeline_response.http_response deserialized = self._deserialize('Cluster', pipeline_response) - if cls: return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -322,15 +575,16 @@ def get_long_running_output(pipeline_response): ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} # type: ignore + @distributed_trace def get( self, - resource_group_name, # type: str - cluster_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.Cluster" + resource_group_name: str, + cluster_name: str, + **kwargs: Any + ) -> "_models.Cluster": """Gets information about the specified cluster. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -342,38 +596,28 @@ def get( :rtype: ~stream_analytics_management_client.models.Cluster :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_name=cluster_name, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('Cluster', pipeline_response) @@ -382,61 +626,52 @@ def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} # type: ignore + def _delete_initial( self, - resource_group_name, # type: str - cluster_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + resource_group_name: str, + cluster_name: str, + **kwargs: Any + ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" - accept = "application/json" - - # Construct URL - url = self._delete_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_delete_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_name=cluster_name, + template_url=self._delete_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} # type: ignore + + @distributed_trace def begin_delete( self, - resource_group_name, # type: str - cluster_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + resource_group_name: str, + cluster_name: str, + **kwargs: Any + ) -> LROPoller[None]: """Deletes the specified cluster. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -445,15 +680,17 @@ def begin_delete( :type cluster_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', @@ -467,15 +704,14 @@ def begin_delete( cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -487,53 +723,50 @@ def get_long_running_output(pipeline_response): ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} # type: ignore + @distributed_trace def list_by_subscription( self, - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.ClusterListResult"] + **kwargs: Any + ) -> Iterable["_models.ClusterListResult"]: """Lists all of the clusters in the given subscription. :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ClusterListResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.ClusterListResult] + :rtype: + ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.ClusterListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ClusterListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ClusterListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_subscription.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_subscription_request( + subscription_id=self._config.subscription_id, + template_url=self.list_by_subscription.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_subscription_request( + subscription_id=self._config.subscription_id, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('ClusterListResult', pipeline_response) + deserialized = self._deserialize("ClusterListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -546,66 +779,64 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return ItemPaged( get_next, extract_data ) list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/clusters'} # type: ignore + @distributed_trace def list_by_resource_group( self, - resource_group_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.ClusterListResult"] + resource_group_name: str, + **kwargs: Any + ) -> Iterable["_models.ClusterListResult"]: """Lists all of the clusters in the given resource group. :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ClusterListResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.ClusterListResult] + :rtype: + ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.ClusterListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ClusterListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ClusterListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_resource_group.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_resource_group_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + template_url=self.list_by_resource_group.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_resource_group_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('ClusterListResult', pipeline_response) + deserialized = self._deserialize("ClusterListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -618,24 +849,25 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return ItemPaged( get_next, extract_data ) list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters'} # type: ignore + @distributed_trace def list_streaming_jobs( self, - resource_group_name, # type: str - cluster_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.ClusterJobListResult"] + resource_group_name: str, + cluster_name: str, + **kwargs: Any + ) -> Iterable["_models.ClusterJobListResult"]: """Lists all of the streaming jobs in the given cluster. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -643,45 +875,44 @@ def list_streaming_jobs( :param cluster_name: The name of the cluster. :type cluster_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ClusterJobListResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.ClusterJobListResult] + :return: An iterator like instance of either ClusterJobListResult or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.ClusterJobListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ClusterJobListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ClusterJobListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_streaming_jobs.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.post(url, query_parameters, header_parameters) + + request = build_list_streaming_jobs_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_name=cluster_name, + template_url=self.list_streaming_jobs.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_streaming_jobs_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_name=cluster_name, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('ClusterJobListResult', pipeline_response) + deserialized = self._deserialize("ClusterJobListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -694,12 +925,13 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return ItemPaged( get_next, extract_data ) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_functions_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_functions_operations.py index 31063c85850b..8e88d2424a6a 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_functions_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_functions_operations.py @@ -5,25 +5,334 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING +import functools +from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +from msrest import Serializer + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +JSONType = Any +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_create_or_replace_request( + subscription_id: str, + resource_group_name: str, + job_name: str, + function_name: str, + *, + json: JSONType = None, + content: Any = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + "functionName": _SERIALIZER.url("function_name", function_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=url, + params=query_parameters, + headers=header_parameters, + json=json, + content=content, + **kwargs + ) + + +def build_update_request( + subscription_id: str, + resource_group_name: str, + job_name: str, + function_name: str, + *, + json: JSONType = None, + content: Any = None, + if_match: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + "functionName": _SERIALIZER.url("function_name", function_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PATCH", + url=url, + params=query_parameters, + headers=header_parameters, + json=json, + content=content, + **kwargs + ) + + +def build_delete_request( + subscription_id: str, + resource_group_name: str, + job_name: str, + function_name: str, + **kwargs: Any +) -> HttpRequest: + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + "functionName": _SERIALIZER.url("function_name", function_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + job_name: str, + function_name: str, + **kwargs: Any +) -> HttpRequest: + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + "functionName": _SERIALIZER.url("function_name", function_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_list_by_streaming_job_request( + subscription_id: str, + resource_group_name: str, + job_name: str, + *, + select: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + if select is not None: + query_parameters['$select'] = _SERIALIZER.query("select", select, 'str') + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_test_request_initial( + subscription_id: str, + resource_group_name: str, + job_name: str, + function_name: str, + *, + json: JSONType = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/test') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + "functionName": _SERIALIZER.url("function_name", function_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + json=json, + content=content, + **kwargs + ) + + +def build_retrieve_default_definition_request( + subscription_id: str, + resource_group_name: str, + job_name: str, + function_name: str, + *, + json: JSONType = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/retrieveDefaultDefinition') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + "functionName": _SERIALIZER.url("function_name", function_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + json=json, + content=content, + **kwargs + ) class FunctionsOperations(object): """FunctionsOperations operations. @@ -39,7 +348,7 @@ class FunctionsOperations(object): :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -47,17 +356,17 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config + @distributed_trace def create_or_replace( self, - resource_group_name, # type: str - job_name, # type: str - function_name, # type: str - function, # type: "models.Function" - if_match=None, # type: Optional[str] - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.Function" + resource_group_name: str, + job_name: str, + function_name: str, + function: "_models.Function", + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + **kwargs: Any + ) -> "_models.Function": """Creates a function or replaces an already existing function under an existing streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -81,74 +390,67 @@ def create_or_replace( :rtype: ~stream_analytics_management_client.models.Function :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Function"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_replace.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'functionName': self._serialize.url("function_name", function_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(function, 'Function') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(function, 'Function') + + request = build_create_or_replace_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + function_name=function_name, + content_type=content_type, + json=_json, + if_match=if_match, + if_none_match=if_none_match, + template_url=self.create_or_replace.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Function', pipeline_response) if response.status_code == 201: response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Function', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + create_or_replace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}'} # type: ignore + + @distributed_trace def update( self, - resource_group_name, # type: str - job_name, # type: str - function_name, # type: str - function, # type: "models.Function" - if_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.Function" + resource_group_name: str, + job_name: str, + function_name: str, + function: "_models.Function", + if_match: Optional[str] = None, + **kwargs: Any + ) -> "_models.Function": """Updates an existing function under an existing streaming job. This can be used to partially update (ie. update one or two properties) a function without affecting the rest the job or function definition. @@ -173,65 +475,58 @@ def update( :rtype: ~stream_analytics_management_client.models.Function :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Function"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'functionName': self._serialize.url("function_name", function_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(function, 'Function') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(function, 'Function') + + request = build_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + function_name=function_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Function', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}'} # type: ignore + + @distributed_trace def delete( self, - resource_group_name, # type: str - job_name, # type: str - function_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + resource_group_name: str, + job_name: str, + function_name: str, + **kwargs: Any + ) -> None: """Deletes a function from the streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -250,46 +545,40 @@ def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'functionName': self._serialize.url("function_name", function_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + function_name=function_name, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}'} # type: ignore + + @distributed_trace def get( self, - resource_group_name, # type: str - job_name, # type: str - function_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.Function" + resource_group_name: str, + job_name: str, + function_name: str, + **kwargs: Any + ) -> "_models.Function": """Gets details about the specified function. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -303,58 +592,52 @@ def get( :rtype: ~stream_analytics_management_client.models.Function :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Function"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'functionName': self._serialize.url("function_name", function_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + function_name=function_name, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Function', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}'} # type: ignore + + @distributed_trace def list_by_streaming_job( self, - resource_group_name, # type: str - job_name, # type: str - select=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.FunctionListResult"] + resource_group_name: str, + job_name: str, + select: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.FunctionListResult"]: """Lists all of the functions under the specified streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -367,46 +650,44 @@ def list_by_streaming_job( :type select: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either FunctionListResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.FunctionListResult] + :rtype: + ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.FunctionListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.FunctionListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.FunctionListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_streaming_job.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if select is not None: - query_parameters['$select'] = self._serialize.query("select", select, 'str') - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_streaming_job_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + select=select, + template_url=self.list_by_streaming_job.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_streaming_job_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + select=select, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('FunctionListResult', pipeline_response) + deserialized = self._deserialize("FunctionListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -420,10 +701,12 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return ItemPaged( get_next, extract_data ) @@ -431,48 +714,37 @@ def get_next(next_link=None): def _test_initial( self, - resource_group_name, # type: str - job_name, # type: str - function_name, # type: str - function=None, # type: Optional["models.Function"] - **kwargs # type: Any - ): - # type: (...) -> Optional["models.ResourceTestStatus"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ResourceTestStatus"]] + resource_group_name: str, + job_name: str, + function_name: str, + function: Optional["_models.Function"] = None, + **kwargs: Any + ) -> Optional["_models.ResourceTestStatus"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ResourceTestStatus"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._test_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'functionName': self._serialize.url("function_name", function_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - body_content_kwargs = {} # type: Dict[str, Any] if function is not None: - body_content = self._serialize.body(function, 'Function') + _json = self._serialize.body(function, 'Function') else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + _json = None + + request = build_test_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + function_name=function_name, + content_type=content_type, + json=_json, + template_url=self._test_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -488,17 +760,19 @@ def _test_initial( return cls(pipeline_response, deserialized, {}) return deserialized + _test_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/test'} # type: ignore + + @distributed_trace def begin_test( self, - resource_group_name, # type: str - job_name, # type: str - function_name, # type: str - function=None, # type: Optional["models.Function"] - **kwargs # type: Any - ): - # type: (...) -> LROPoller["models.ResourceTestStatus"] + resource_group_name: str, + job_name: str, + function_name: str, + function: Optional["_models.Function"] = None, + **kwargs: Any + ) -> LROPoller["_models.ResourceTestStatus"]: """Tests if the information provided for a function is valid. This can range from testing the connection to the underlying web service behind the function or making sure the function code provided is syntactically correct. @@ -517,16 +791,21 @@ def begin_test( :type function: ~stream_analytics_management_client.models.Function :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either ResourceTestStatus or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~stream_analytics_management_client.models.ResourceTestStatus] - :raises ~azure.core.exceptions.HttpResponseError: + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ResourceTestStatus or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~stream_analytics_management_client.models.ResourceTestStatus] + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.ResourceTestStatus"] + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceTestStatus"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -538,21 +817,21 @@ def begin_test( job_name=job_name, function_name=function_name, function=function, + content_type=content_type, cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): + response = pipeline_response.http_response deserialized = self._deserialize('ResourceTestStatus', pipeline_response) - if cls: return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -564,17 +843,18 @@ def get_long_running_output(pipeline_response): ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_test.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/test'} # type: ignore + @distributed_trace def retrieve_default_definition( self, - resource_group_name, # type: str - job_name, # type: str - function_name, # type: str - function_retrieve_default_definition_parameters=None, # type: Optional["models.FunctionRetrieveDefaultDefinitionParameters"] - **kwargs # type: Any - ): - # type: (...) -> "models.Function" + resource_group_name: str, + job_name: str, + function_name: str, + function_retrieve_default_definition_parameters: Optional["_models.FunctionRetrieveDefaultDefinitionParameters"] = None, + **kwargs: Any + ) -> "_models.Function": """Retrieves the default definition of a function based on the parameters specified. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -585,53 +865,45 @@ def retrieve_default_definition( :type function_name: str :param function_retrieve_default_definition_parameters: Parameters used to specify the type of function to retrieve the default definition for. - :type function_retrieve_default_definition_parameters: ~stream_analytics_management_client.models.FunctionRetrieveDefaultDefinitionParameters + :type function_retrieve_default_definition_parameters: + ~stream_analytics_management_client.models.FunctionRetrieveDefaultDefinitionParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: Function, or the result of cls(response) :rtype: ~stream_analytics_management_client.models.Function :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Function"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.retrieve_default_definition.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'functionName': self._serialize.url("function_name", function_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - body_content_kwargs = {} # type: Dict[str, Any] if function_retrieve_default_definition_parameters is not None: - body_content = self._serialize.body(function_retrieve_default_definition_parameters, 'FunctionRetrieveDefaultDefinitionParameters') + _json = self._serialize.body(function_retrieve_default_definition_parameters, 'FunctionRetrieveDefaultDefinitionParameters') else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + _json = None + + request = build_retrieve_default_definition_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + function_name=function_name, + content_type=content_type, + json=_json, + template_url=self.retrieve_default_definition.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('Function', pipeline_response) @@ -639,4 +911,6 @@ def retrieve_default_definition( return cls(pipeline_response, deserialized, {}) return deserialized - retrieve_default_definition.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/RetrieveDefaultDefinition'} # type: ignore + + retrieve_default_definition.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/retrieveDefaultDefinition'} # type: ignore + diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_inputs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_inputs_operations.py index 890d33f1b8b1..e38cfdfe309e 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_inputs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_inputs_operations.py @@ -5,25 +5,288 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING +import functools +from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +from msrest import Serializer + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +JSONType = Any +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_create_or_replace_request( + subscription_id: str, + resource_group_name: str, + job_name: str, + input_name: str, + *, + json: JSONType = None, + content: Any = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + "inputName": _SERIALIZER.url("input_name", input_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=url, + params=query_parameters, + headers=header_parameters, + json=json, + content=content, + **kwargs + ) + + +def build_update_request( + subscription_id: str, + resource_group_name: str, + job_name: str, + input_name: str, + *, + json: JSONType = None, + content: Any = None, + if_match: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + "inputName": _SERIALIZER.url("input_name", input_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PATCH", + url=url, + params=query_parameters, + headers=header_parameters, + json=json, + content=content, + **kwargs + ) + + +def build_delete_request( + subscription_id: str, + resource_group_name: str, + job_name: str, + input_name: str, + **kwargs: Any +) -> HttpRequest: + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + "inputName": _SERIALIZER.url("input_name", input_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + job_name: str, + input_name: str, + **kwargs: Any +) -> HttpRequest: + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + "inputName": _SERIALIZER.url("input_name", input_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_list_by_streaming_job_request( + subscription_id: str, + resource_group_name: str, + job_name: str, + *, + select: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + if select is not None: + query_parameters['$select'] = _SERIALIZER.query("select", select, 'str') + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_test_request_initial( + subscription_id: str, + resource_group_name: str, + job_name: str, + input_name: str, + *, + json: JSONType = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}/test') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + "inputName": _SERIALIZER.url("input_name", input_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + json=json, + content=content, + **kwargs + ) class InputsOperations(object): """InputsOperations operations. @@ -39,7 +302,7 @@ class InputsOperations(object): :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -47,17 +310,17 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config + @distributed_trace def create_or_replace( self, - resource_group_name, # type: str - job_name, # type: str - input_name, # type: str - input, # type: "models.Input" - if_match=None, # type: Optional[str] - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.Input" + resource_group_name: str, + job_name: str, + input_name: str, + input: "_models.Input", + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + **kwargs: Any + ) -> "_models.Input": """Creates an input or replaces an already existing input under an existing streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -80,74 +343,67 @@ def create_or_replace( :rtype: ~stream_analytics_management_client.models.Input :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Input"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Input"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_replace.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'inputName': self._serialize.url("input_name", input_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(input, 'Input') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(input, 'Input') + + request = build_create_or_replace_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + input_name=input_name, + content_type=content_type, + json=_json, + if_match=if_match, + if_none_match=if_none_match, + template_url=self.create_or_replace.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Input', pipeline_response) if response.status_code == 201: response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Input', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + create_or_replace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}'} # type: ignore + + @distributed_trace def update( self, - resource_group_name, # type: str - job_name, # type: str - input_name, # type: str - input, # type: "models.Input" - if_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.Input" + resource_group_name: str, + job_name: str, + input_name: str, + input: "_models.Input", + if_match: Optional[str] = None, + **kwargs: Any + ) -> "_models.Input": """Updates an existing input under an existing streaming job. This can be used to partially update (ie. update one or two properties) an input without affecting the rest the job or input definition. @@ -171,65 +427,58 @@ def update( :rtype: ~stream_analytics_management_client.models.Input :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Input"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Input"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'inputName': self._serialize.url("input_name", input_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(input, 'Input') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(input, 'Input') + + request = build_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + input_name=input_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Input', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}'} # type: ignore + + @distributed_trace def delete( self, - resource_group_name, # type: str - job_name, # type: str - input_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + resource_group_name: str, + job_name: str, + input_name: str, + **kwargs: Any + ) -> None: """Deletes an input from the streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -248,46 +497,40 @@ def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'inputName': self._serialize.url("input_name", input_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + input_name=input_name, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}'} # type: ignore + + @distributed_trace def get( self, - resource_group_name, # type: str - job_name, # type: str - input_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.Input" + resource_group_name: str, + job_name: str, + input_name: str, + **kwargs: Any + ) -> "_models.Input": """Gets details about the specified input. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -301,58 +544,52 @@ def get( :rtype: ~stream_analytics_management_client.models.Input :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Input"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Input"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'inputName': self._serialize.url("input_name", input_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + input_name=input_name, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Input', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}'} # type: ignore + + @distributed_trace def list_by_streaming_job( self, - resource_group_name, # type: str - job_name, # type: str - select=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.InputListResult"] + resource_group_name: str, + job_name: str, + select: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.InputListResult"]: """Lists all of the inputs under the specified streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -365,46 +602,44 @@ def list_by_streaming_job( :type select: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either InputListResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.InputListResult] + :rtype: + ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.InputListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.InputListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.InputListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_streaming_job.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if select is not None: - query_parameters['$select'] = self._serialize.query("select", select, 'str') - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_streaming_job_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + select=select, + template_url=self.list_by_streaming_job.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_streaming_job_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + select=select, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('InputListResult', pipeline_response) + deserialized = self._deserialize("InputListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -418,10 +653,12 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return ItemPaged( get_next, extract_data ) @@ -429,48 +666,37 @@ def get_next(next_link=None): def _test_initial( self, - resource_group_name, # type: str - job_name, # type: str - input_name, # type: str - input=None, # type: Optional["models.Input"] - **kwargs # type: Any - ): - # type: (...) -> Optional["models.ResourceTestStatus"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ResourceTestStatus"]] + resource_group_name: str, + job_name: str, + input_name: str, + input: Optional["_models.Input"] = None, + **kwargs: Any + ) -> Optional["_models.ResourceTestStatus"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ResourceTestStatus"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._test_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'inputName': self._serialize.url("input_name", input_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - body_content_kwargs = {} # type: Dict[str, Any] if input is not None: - body_content = self._serialize.body(input, 'Input') + _json = self._serialize.body(input, 'Input') else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + _json = None + + request = build_test_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + input_name=input_name, + content_type=content_type, + json=_json, + template_url=self._test_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -486,17 +712,19 @@ def _test_initial( return cls(pipeline_response, deserialized, {}) return deserialized + _test_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}/test'} # type: ignore + + @distributed_trace def begin_test( self, - resource_group_name, # type: str - job_name, # type: str - input_name, # type: str - input=None, # type: Optional["models.Input"] - **kwargs # type: Any - ): - # type: (...) -> LROPoller["models.ResourceTestStatus"] + resource_group_name: str, + job_name: str, + input_name: str, + input: Optional["_models.Input"] = None, + **kwargs: Any + ) -> LROPoller["_models.ResourceTestStatus"]: """Tests whether an input’s datasource is reachable and usable by the Azure Stream Analytics service. @@ -514,16 +742,21 @@ def begin_test( :type input: ~stream_analytics_management_client.models.Input :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either ResourceTestStatus or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~stream_analytics_management_client.models.ResourceTestStatus] - :raises ~azure.core.exceptions.HttpResponseError: + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ResourceTestStatus or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~stream_analytics_management_client.models.ResourceTestStatus] + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.ResourceTestStatus"] + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceTestStatus"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -535,21 +768,21 @@ def begin_test( job_name=job_name, input_name=input_name, input=input, + content_type=content_type, cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): + response = pipeline_response.http_response deserialized = self._deserialize('ResourceTestStatus', pipeline_response) - if cls: return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -561,4 +794,5 @@ def get_long_running_output(pipeline_response): ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_test.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}/test'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_operations.py index 1a63db586859..b9c24b592305 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_operations.py @@ -5,23 +5,50 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING +import functools +from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +from msrest import Serializer + +from .. import models as _models +from .._vendor import _convert_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_request( + **kwargs: Any +) -> HttpRequest: + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/providers/Microsoft.StreamAnalytics/operations') + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) class Operations(object): """Operations operations. @@ -37,7 +64,7 @@ class Operations(object): :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -45,47 +72,45 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config + @distributed_trace def list( self, - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.OperationListResult"] + **kwargs: Any + ) -> Iterable["_models.OperationListResult"]: """Lists all of the available Stream Analytics related operations. :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either OperationListResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.OperationListResult] + :rtype: + ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.OperationListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = build_list_request( + template_url=self.list.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('OperationListResult', pipeline_response) + deserialized = self._deserialize("OperationListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -99,10 +124,12 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return ItemPaged( get_next, extract_data ) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_outputs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_outputs_operations.py index a18f17686979..8a05a16e4404 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_outputs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_outputs_operations.py @@ -5,25 +5,288 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING +import functools +from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +from msrest import Serializer + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +JSONType = Any +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_create_or_replace_request( + subscription_id: str, + resource_group_name: str, + job_name: str, + output_name: str, + *, + json: JSONType = None, + content: Any = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + "outputName": _SERIALIZER.url("output_name", output_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=url, + params=query_parameters, + headers=header_parameters, + json=json, + content=content, + **kwargs + ) + + +def build_update_request( + subscription_id: str, + resource_group_name: str, + job_name: str, + output_name: str, + *, + json: JSONType = None, + content: Any = None, + if_match: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + "outputName": _SERIALIZER.url("output_name", output_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PATCH", + url=url, + params=query_parameters, + headers=header_parameters, + json=json, + content=content, + **kwargs + ) + + +def build_delete_request( + subscription_id: str, + resource_group_name: str, + job_name: str, + output_name: str, + **kwargs: Any +) -> HttpRequest: + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + "outputName": _SERIALIZER.url("output_name", output_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + job_name: str, + output_name: str, + **kwargs: Any +) -> HttpRequest: + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + "outputName": _SERIALIZER.url("output_name", output_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_list_by_streaming_job_request( + subscription_id: str, + resource_group_name: str, + job_name: str, + *, + select: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + if select is not None: + query_parameters['$select'] = _SERIALIZER.query("select", select, 'str') + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_test_request_initial( + subscription_id: str, + resource_group_name: str, + job_name: str, + output_name: str, + *, + json: JSONType = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}/test') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + "outputName": _SERIALIZER.url("output_name", output_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + json=json, + content=content, + **kwargs + ) class OutputsOperations(object): """OutputsOperations operations. @@ -39,7 +302,7 @@ class OutputsOperations(object): :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -47,17 +310,17 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config + @distributed_trace def create_or_replace( self, - resource_group_name, # type: str - job_name, # type: str - output_name, # type: str - output, # type: "models.Output" - if_match=None, # type: Optional[str] - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.Output" + resource_group_name: str, + job_name: str, + output_name: str, + output: "_models.Output", + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + **kwargs: Any + ) -> "_models.Output": """Creates an output or replaces an already existing output under an existing streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -81,74 +344,67 @@ def create_or_replace( :rtype: ~stream_analytics_management_client.models.Output :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Output"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Output"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_replace.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'outputName': self._serialize.url("output_name", output_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(output, 'Output') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(output, 'Output') + + request = build_create_or_replace_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + output_name=output_name, + content_type=content_type, + json=_json, + if_match=if_match, + if_none_match=if_none_match, + template_url=self.create_or_replace.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Output', pipeline_response) if response.status_code == 201: response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Output', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + create_or_replace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}'} # type: ignore + + @distributed_trace def update( self, - resource_group_name, # type: str - job_name, # type: str - output_name, # type: str - output, # type: "models.Output" - if_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.Output" + resource_group_name: str, + job_name: str, + output_name: str, + output: "_models.Output", + if_match: Optional[str] = None, + **kwargs: Any + ) -> "_models.Output": """Updates an existing output under an existing streaming job. This can be used to partially update (ie. update one or two properties) an output without affecting the rest the job or output definition. @@ -173,65 +429,58 @@ def update( :rtype: ~stream_analytics_management_client.models.Output :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Output"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Output"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'outputName': self._serialize.url("output_name", output_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(output, 'Output') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(output, 'Output') + + request = build_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + output_name=output_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Output', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}'} # type: ignore + + @distributed_trace def delete( self, - resource_group_name, # type: str - job_name, # type: str - output_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + resource_group_name: str, + job_name: str, + output_name: str, + **kwargs: Any + ) -> None: """Deletes an output from the streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -250,46 +499,40 @@ def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'outputName': self._serialize.url("output_name", output_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + output_name=output_name, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}'} # type: ignore + + @distributed_trace def get( self, - resource_group_name, # type: str - job_name, # type: str - output_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.Output" + resource_group_name: str, + job_name: str, + output_name: str, + **kwargs: Any + ) -> "_models.Output": """Gets details about the specified output. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -303,58 +546,52 @@ def get( :rtype: ~stream_analytics_management_client.models.Output :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Output"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Output"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'outputName': self._serialize.url("output_name", output_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + output_name=output_name, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Output', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}'} # type: ignore + + @distributed_trace def list_by_streaming_job( self, - resource_group_name, # type: str - job_name, # type: str - select=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.OutputListResult"] + resource_group_name: str, + job_name: str, + select: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.OutputListResult"]: """Lists all of the outputs under the specified streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -367,46 +604,44 @@ def list_by_streaming_job( :type select: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either OutputListResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.OutputListResult] + :rtype: + ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.OutputListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.OutputListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.OutputListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_streaming_job.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if select is not None: - query_parameters['$select'] = self._serialize.query("select", select, 'str') - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_streaming_job_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + select=select, + template_url=self.list_by_streaming_job.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_streaming_job_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + select=select, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('OutputListResult', pipeline_response) + deserialized = self._deserialize("OutputListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -420,10 +655,12 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return ItemPaged( get_next, extract_data ) @@ -431,48 +668,37 @@ def get_next(next_link=None): def _test_initial( self, - resource_group_name, # type: str - job_name, # type: str - output_name, # type: str - output=None, # type: Optional["models.Output"] - **kwargs # type: Any - ): - # type: (...) -> Optional["models.ResourceTestStatus"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ResourceTestStatus"]] + resource_group_name: str, + job_name: str, + output_name: str, + output: Optional["_models.Output"] = None, + **kwargs: Any + ) -> Optional["_models.ResourceTestStatus"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ResourceTestStatus"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._test_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'outputName': self._serialize.url("output_name", output_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - body_content_kwargs = {} # type: Dict[str, Any] if output is not None: - body_content = self._serialize.body(output, 'Output') + _json = self._serialize.body(output, 'Output') else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + _json = None + + request = build_test_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + output_name=output_name, + content_type=content_type, + json=_json, + template_url=self._test_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -488,17 +714,19 @@ def _test_initial( return cls(pipeline_response, deserialized, {}) return deserialized + _test_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}/test'} # type: ignore + + @distributed_trace def begin_test( self, - resource_group_name, # type: str - job_name, # type: str - output_name, # type: str - output=None, # type: Optional["models.Output"] - **kwargs # type: Any - ): - # type: (...) -> LROPoller["models.ResourceTestStatus"] + resource_group_name: str, + job_name: str, + output_name: str, + output: Optional["_models.Output"] = None, + **kwargs: Any + ) -> LROPoller["_models.ResourceTestStatus"]: """Tests whether an output’s datasource is reachable and usable by the Azure Stream Analytics service. @@ -516,16 +744,21 @@ def begin_test( :type output: ~stream_analytics_management_client.models.Output :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either ResourceTestStatus or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~stream_analytics_management_client.models.ResourceTestStatus] - :raises ~azure.core.exceptions.HttpResponseError: + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ResourceTestStatus or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~stream_analytics_management_client.models.ResourceTestStatus] + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.ResourceTestStatus"] + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceTestStatus"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -537,21 +770,21 @@ def begin_test( job_name=job_name, output_name=output_name, output=output, + content_type=content_type, cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): + response = pipeline_response.http_response deserialized = self._deserialize('ResourceTestStatus', pipeline_response) - if cls: return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -563,4 +796,5 @@ def get_long_running_output(pipeline_response): ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_test.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}/test'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_private_endpoints_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_private_endpoints_operations.py index e194d816d90c..b81ea7d6cc1b 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_private_endpoints_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_private_endpoints_operations.py @@ -5,25 +5,189 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING +import functools +from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +from msrest import Serializer + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +JSONType = Any +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_create_or_update_request( + subscription_id: str, + resource_group_name: str, + cluster_name: str, + private_endpoint_name: str, + *, + json: JSONType = None, + content: Any = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), + "privateEndpointName": _SERIALIZER.url("private_endpoint_name", private_endpoint_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=url, + params=query_parameters, + headers=header_parameters, + json=json, + content=content, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + cluster_name: str, + private_endpoint_name: str, + **kwargs: Any +) -> HttpRequest: + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), + "privateEndpointName": _SERIALIZER.url("private_endpoint_name", private_endpoint_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_delete_request_initial( + subscription_id: str, + resource_group_name: str, + cluster_name: str, + private_endpoint_name: str, + **kwargs: Any +) -> HttpRequest: + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), + "privateEndpointName": _SERIALIZER.url("private_endpoint_name", private_endpoint_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_list_by_cluster_request( + subscription_id: str, + resource_group_name: str, + cluster_name: str, + **kwargs: Any +) -> HttpRequest: + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) class PrivateEndpointsOperations(object): """PrivateEndpointsOperations operations. @@ -39,7 +203,7 @@ class PrivateEndpointsOperations(object): :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -47,17 +211,17 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config + @distributed_trace def create_or_update( self, - resource_group_name, # type: str - cluster_name, # type: str - private_endpoint_name, # type: str - private_endpoint, # type: "models.PrivateEndpoint" - if_match=None, # type: Optional[str] - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.PrivateEndpoint" + resource_group_name: str, + cluster_name: str, + private_endpoint_name: str, + private_endpoint: "_models.PrivateEndpoint", + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + **kwargs: Any + ) -> "_models.PrivateEndpoint": """Creates a Stream Analytics Private Endpoint or replaces an already existing Private Endpoint. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -81,48 +245,36 @@ def create_or_update( :rtype: ~stream_analytics_management_client.models.PrivateEndpoint :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpoint"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpoint"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), - 'privateEndpointName': self._serialize.url("private_endpoint_name", private_endpoint_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(private_endpoint, 'PrivateEndpoint') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(private_endpoint, 'PrivateEndpoint') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_name=cluster_name, + private_endpoint_name=private_endpoint_name, + content_type=content_type, + json=_json, + if_match=if_match, + if_none_match=if_none_match, + template_url=self.create_or_update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -135,16 +287,18 @@ def create_or_update( return cls(pipeline_response, deserialized, {}) return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}'} # type: ignore + + @distributed_trace def get( self, - resource_group_name, # type: str - cluster_name, # type: str - private_endpoint_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.PrivateEndpoint" + resource_group_name: str, + cluster_name: str, + private_endpoint_name: str, + **kwargs: Any + ) -> "_models.PrivateEndpoint": """Gets information about the specified Private Endpoint. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -158,39 +312,29 @@ def get( :rtype: ~stream_analytics_management_client.models.PrivateEndpoint :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpoint"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpoint"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), - 'privateEndpointName': self._serialize.url("private_endpoint_name", private_endpoint_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_name=cluster_name, + private_endpoint_name=private_endpoint_name, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('PrivateEndpoint', pipeline_response) @@ -199,64 +343,55 @@ def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}'} # type: ignore + def _delete_initial( self, - resource_group_name, # type: str - cluster_name, # type: str - private_endpoint_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + resource_group_name: str, + cluster_name: str, + private_endpoint_name: str, + **kwargs: Any + ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" - accept = "application/json" - - # Construct URL - url = self._delete_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), - 'privateEndpointName': self._serialize.url("private_endpoint_name", private_endpoint_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_delete_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_name=cluster_name, + private_endpoint_name=private_endpoint_name, + template_url=self._delete_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}'} # type: ignore + + @distributed_trace def begin_delete( self, - resource_group_name, # type: str - cluster_name, # type: str - private_endpoint_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + resource_group_name: str, + cluster_name: str, + private_endpoint_name: str, + **kwargs: Any + ) -> LROPoller[None]: """Delete the specified private endpoint. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -267,15 +402,17 @@ def begin_delete( :type private_endpoint_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', @@ -290,15 +427,14 @@ def begin_delete( cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -310,15 +446,16 @@ def get_long_running_output(pipeline_response): ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}'} # type: ignore + @distributed_trace def list_by_cluster( self, - resource_group_name, # type: str - cluster_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.PrivateEndpointListResult"] + resource_group_name: str, + cluster_name: str, + **kwargs: Any + ) -> Iterable["_models.PrivateEndpointListResult"]: """Lists the private endpoints in the cluster. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -326,45 +463,44 @@ def list_by_cluster( :param cluster_name: The name of the cluster. :type cluster_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PrivateEndpointListResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.PrivateEndpointListResult] + :return: An iterator like instance of either PrivateEndpointListResult or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.PrivateEndpointListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_cluster.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_cluster_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_name=cluster_name, + template_url=self.list_by_cluster.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_cluster_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + cluster_name=cluster_name, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('PrivateEndpointListResult', pipeline_response) + deserialized = self._deserialize("PrivateEndpointListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -377,12 +513,13 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return ItemPaged( get_next, extract_data ) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_streaming_jobs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_streaming_jobs_operations.py index f3228537a7ed..881f5c78632c 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_streaming_jobs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_streaming_jobs_operations.py @@ -5,25 +5,394 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING +import functools +from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +from msrest import Serializer + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +JSONType = Any +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_create_or_replace_request_initial( + subscription_id: str, + resource_group_name: str, + job_name: str, + *, + json: JSONType = None, + content: Any = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=url, + params=query_parameters, + headers=header_parameters, + json=json, + content=content, + **kwargs + ) + + +def build_update_request( + subscription_id: str, + resource_group_name: str, + job_name: str, + *, + json: JSONType = None, + content: Any = None, + if_match: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PATCH", + url=url, + params=query_parameters, + headers=header_parameters, + json=json, + content=content, + **kwargs + ) + + +def build_delete_request_initial( + subscription_id: str, + resource_group_name: str, + job_name: str, + **kwargs: Any +) -> HttpRequest: + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + job_name: str, + *, + expand: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + if expand is not None: + query_parameters['$expand'] = _SERIALIZER.query("expand", expand, 'str') + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_list_by_resource_group_request( + subscription_id: str, + resource_group_name: str, + *, + expand: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + if expand is not None: + query_parameters['$expand'] = _SERIALIZER.query("expand", expand, 'str') + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_list_request( + subscription_id: str, + *, + expand: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/streamingjobs') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + if expand is not None: + query_parameters['$expand'] = _SERIALIZER.query("expand", expand, 'str') + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_start_request_initial( + subscription_id: str, + resource_group_name: str, + job_name: str, + *, + json: JSONType = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/start') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + json=json, + content=content, + **kwargs + ) + + +def build_stop_request_initial( + subscription_id: str, + resource_group_name: str, + job_name: str, + **kwargs: Any +) -> HttpRequest: + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/stop') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_scale_request_initial( + subscription_id: str, + resource_group_name: str, + job_name: str, + *, + json: JSONType = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/scale') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + json=json, + content=content, + **kwargs + ) class StreamingJobsOperations(object): """StreamingJobsOperations operations. @@ -39,7 +408,7 @@ class StreamingJobsOperations(object): :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -49,49 +418,36 @@ def __init__(self, client, config, serializer, deserializer): def _create_or_replace_initial( self, - resource_group_name, # type: str - job_name, # type: str - streaming_job, # type: "models.StreamingJob" - if_match=None, # type: Optional[str] - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.StreamingJob" - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + resource_group_name: str, + job_name: str, + streaming_job: "_models.StreamingJob", + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + **kwargs: Any + ) -> "_models.StreamingJob": + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJob"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._create_or_replace_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(streaming_job, 'StreamingJob') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(streaming_job, 'StreamingJob') + + request = build_create_or_replace_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + content_type=content_type, + json=_json, + if_match=if_match, + if_none_match=if_none_match, + template_url=self._create_or_replace_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -102,28 +458,32 @@ def _create_or_replace_initial( response_headers = {} if response.status_code == 200: response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('StreamingJob', pipeline_response) if response.status_code == 201: response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('StreamingJob', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + _create_or_replace_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}'} # type: ignore + + @distributed_trace def begin_create_or_replace( self, - resource_group_name, # type: str - job_name, # type: str - streaming_job, # type: "models.StreamingJob" - if_match=None, # type: Optional[str] - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> LROPoller["models.StreamingJob"] + resource_group_name: str, + job_name: str, + streaming_job: "_models.StreamingJob", + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + **kwargs: Any + ) -> LROPoller["_models.StreamingJob"]: """Creates a streaming job or replaces an already existing streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -143,16 +503,20 @@ def begin_create_or_replace( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either StreamingJob or the result of cls(response) + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either StreamingJob or the result of + cls(response) :rtype: ~azure.core.polling.LROPoller[~stream_analytics_management_client.models.StreamingJob] - :raises ~azure.core.exceptions.HttpResponseError: + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJob"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -165,24 +529,24 @@ def begin_create_or_replace( streaming_job=streaming_job, if_match=if_match, if_none_match=if_none_match, + content_type=content_type, cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): response_headers = {} response = pipeline_response.http_response response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('StreamingJob', pipeline_response) - if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -194,17 +558,18 @@ def get_long_running_output(pipeline_response): ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_replace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}'} # type: ignore + @distributed_trace def update( self, - resource_group_name, # type: str - job_name, # type: str - streaming_job, # type: "models.StreamingJob" - if_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.StreamingJob" + resource_group_name: str, + job_name: str, + streaming_job: "_models.StreamingJob", + if_match: Optional[str] = None, + **kwargs: Any + ) -> "_models.StreamingJob": """Updates an existing streaming job. This can be used to partially update (ie. update one or two properties) a streaming job without affecting the rest the job definition. @@ -226,87 +591,71 @@ def update( :rtype: ~stream_analytics_management_client.models.StreamingJob :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJob"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(streaming_job, 'StreamingJob') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(streaming_job, 'StreamingJob') + + request = build_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('StreamingJob', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}'} # type: ignore + def _delete_initial( self, - resource_group_name, # type: str - job_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + resource_group_name: str, + job_name: str, + **kwargs: Any + ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - - # Construct URL - url = self._delete_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] + + request = build_delete_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + template_url=self._delete_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -319,13 +668,14 @@ def _delete_initial( _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}'} # type: ignore + + @distributed_trace def begin_delete( self, - resource_group_name, # type: str - job_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + resource_group_name: str, + job_name: str, + **kwargs: Any + ) -> LROPoller[None]: """Deletes a streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -334,15 +684,17 @@ def begin_delete( :type job_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', @@ -356,15 +708,14 @@ def begin_delete( cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -376,16 +727,17 @@ def get_long_running_output(pipeline_response): ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}'} # type: ignore + @distributed_trace def get( self, - resource_group_name, # type: str - job_name, # type: str - expand=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.StreamingJob" + resource_group_name: str, + job_name: str, + expand: Optional[str] = None, + **kwargs: Any + ) -> "_models.StreamingJob": """Gets details about the specified streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -402,58 +754,51 @@ def get( :rtype: ~stream_analytics_management_client.models.StreamingJob :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJob"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if expand is not None: - query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + expand=expand, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('StreamingJob', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}'} # type: ignore + + @distributed_trace def list_by_resource_group( self, - resource_group_name, # type: str - expand=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.StreamingJobListResult"] + resource_group_name: str, + expand: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.StreamingJobListResult"]: """Lists all of the streaming jobs in the specified resource group. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -464,46 +809,44 @@ def list_by_resource_group( 'transformation', 'outputs', and 'functions'. :type expand: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either StreamingJobListResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.StreamingJobListResult] + :return: An iterator like instance of either StreamingJobListResult or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.StreamingJobListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJobListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJobListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_resource_group.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if expand is not None: - query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_resource_group_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + expand=expand, + template_url=self.list_by_resource_group.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_resource_group_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + expand=expand, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('StreamingJobListResult', pipeline_response) + deserialized = self._deserialize("StreamingJobListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -517,21 +860,23 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return ItemPaged( get_next, extract_data ) list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs'} # type: ignore + @distributed_trace def list( self, - expand=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.StreamingJobListResult"] + expand: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.StreamingJobListResult"]: """Lists all of the streaming jobs in the given subscription. :param expand: The $expand OData query parameter. This is a comma-separated list of additional @@ -540,45 +885,42 @@ def list( 'transformation', 'outputs', and 'functions'. :type expand: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either StreamingJobListResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.StreamingJobListResult] + :return: An iterator like instance of either StreamingJobListResult or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.StreamingJobListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJobListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJobListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if expand is not None: - query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + subscription_id=self._config.subscription_id, + expand=expand, + template_url=self.list.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + subscription_id=self._config.subscription_id, + expand=expand, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('StreamingJobListResult', pipeline_response) + deserialized = self._deserialize("StreamingJobListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -592,10 +934,12 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return ItemPaged( get_next, extract_data ) @@ -603,44 +947,35 @@ def get_next(next_link=None): def _start_initial( self, - resource_group_name, # type: str - job_name, # type: str - start_job_parameters=None, # type: Optional["models.StartStreamingJobParameters"] - **kwargs # type: Any - ): - # type: (...) -> None + resource_group_name: str, + job_name: str, + start_job_parameters: Optional["_models.StartStreamingJobParameters"] = None, + **kwargs: Any + ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self._start_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - body_content_kwargs = {} # type: Dict[str, Any] if start_job_parameters is not None: - body_content = self._serialize.body(start_job_parameters, 'StartStreamingJobParameters') + _json = self._serialize.body(start_job_parameters, 'StartStreamingJobParameters') else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + _json = None + + request = build_start_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + content_type=content_type, + json=_json, + template_url=self._start_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -653,14 +988,15 @@ def _start_initial( _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/start'} # type: ignore + + @distributed_trace def begin_start( self, - resource_group_name, # type: str - job_name, # type: str - start_job_parameters=None, # type: Optional["models.StartStreamingJobParameters"] - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + resource_group_name: str, + job_name: str, + start_job_parameters: Optional["_models.StartStreamingJobParameters"] = None, + **kwargs: Any + ) -> LROPoller[None]: """Starts a streaming job. Once a job is started it will start processing input events and produce output. @@ -669,18 +1005,22 @@ def begin_start( :param job_name: The name of the streaming job. :type job_name: str :param start_job_parameters: Parameters applicable to a start streaming job operation. - :type start_job_parameters: ~stream_analytics_management_client.models.StartStreamingJobParameters + :type start_job_parameters: + ~stream_analytics_management_client.models.StartStreamingJobParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', @@ -692,18 +1032,18 @@ def begin_start( resource_group_name=resource_group_name, job_name=job_name, start_job_parameters=start_job_parameters, + content_type=content_type, cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -715,39 +1055,31 @@ def get_long_running_output(pipeline_response): ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/start'} # type: ignore def _stop_initial( self, - resource_group_name, # type: str - job_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + resource_group_name: str, + job_name: str, + **kwargs: Any + ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - - # Construct URL - url = self._stop_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] + + request = build_stop_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + template_url=self._stop_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -760,13 +1092,14 @@ def _stop_initial( _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/stop'} # type: ignore + + @distributed_trace def begin_stop( self, - resource_group_name, # type: str - job_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + resource_group_name: str, + job_name: str, + **kwargs: Any + ) -> LROPoller[None]: """Stops a running streaming job. This will cause a running streaming job to stop processing input events and producing output. @@ -776,15 +1109,17 @@ def begin_stop( :type job_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', @@ -798,15 +1133,14 @@ def begin_stop( cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -818,4 +1152,117 @@ def get_long_running_output(pipeline_response): ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/stop'} # type: ignore + + def _scale_initial( + self, + resource_group_name: str, + job_name: str, + scale_job_parameters: Optional["_models.ScaleStreamingJobParameters"] = None, + **kwargs: Any + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + if scale_job_parameters is not None: + _json = self._serialize.body(scale_job_parameters, 'ScaleStreamingJobParameters') + else: + _json = None + + request = build_scale_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + content_type=content_type, + json=_json, + template_url=self._scale_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _scale_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/scale'} # type: ignore + + + @distributed_trace + def begin_scale( + self, + resource_group_name: str, + job_name: str, + scale_job_parameters: Optional["_models.ScaleStreamingJobParameters"] = None, + **kwargs: Any + ) -> LROPoller[None]: + """Scales a streaming job when the job is running. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param scale_job_parameters: Parameters applicable to a scale streaming job operation. + :type scale_job_parameters: + ~stream_analytics_management_client.models.ScaleStreamingJobParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._scale_initial( + resource_group_name=resource_group_name, + job_name=job_name, + scale_job_parameters=scale_job_parameters, + content_type=content_type, + cls=lambda x,y,z: x, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_scale.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/scale'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_subscriptions_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_subscriptions_operations.py index 81d2bbf70cdb..ad9588e88b47 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_subscriptions_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_subscriptions_operations.py @@ -5,22 +5,57 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING +import functools +from typing import Any, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +from msrest import Serializer + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_quotas_request( + location: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/quotas') + path_format_arguments = { + "location": _SERIALIZER.url("location", location, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) class SubscriptionsOperations(object): """SubscriptionsOperations operations. @@ -36,7 +71,7 @@ class SubscriptionsOperations(object): :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -44,12 +79,12 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config + @distributed_trace def list_quotas( self, - location, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.SubscriptionQuotasListResult" + location: str, + **kwargs: Any + ) -> "_models.SubscriptionQuotasListResult": """Retrieves the subscription's current quota information in a particular region. :param location: The region in which to retrieve the subscription's quota information. You can @@ -61,37 +96,28 @@ def list_quotas( :rtype: ~stream_analytics_management_client.models.SubscriptionQuotasListResult :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.SubscriptionQuotasListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.SubscriptionQuotasListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - accept = "application/json" - - # Construct URL - url = self.list_quotas.metadata['url'] # type: ignore - path_format_arguments = { - 'location': self._serialize.url("location", location, 'str'), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = build_list_quotas_request( + location=location, + subscription_id=self._config.subscription_id, + template_url=self.list_quotas.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('SubscriptionQuotasListResult', pipeline_response) @@ -99,4 +125,6 @@ def list_quotas( return cls(pipeline_response, deserialized, {}) return deserialized + list_quotas.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/quotas'} # type: ignore + diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_transformations_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_transformations_operations.py index 49f318e3a748..267cff4f5416 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_transformations_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_transformations_operations.py @@ -5,22 +5,163 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING +import functools +from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +from msrest import Serializer + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +JSONType = Any +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_create_or_replace_request( + subscription_id: str, + resource_group_name: str, + job_name: str, + transformation_name: str, + *, + json: JSONType = None, + content: Any = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + "transformationName": _SERIALIZER.url("transformation_name", transformation_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=url, + params=query_parameters, + headers=header_parameters, + json=json, + content=content, + **kwargs + ) + + +def build_update_request( + subscription_id: str, + resource_group_name: str, + job_name: str, + transformation_name: str, + *, + json: JSONType = None, + content: Any = None, + if_match: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + "transformationName": _SERIALIZER.url("transformation_name", transformation_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PATCH", + url=url, + params=query_parameters, + headers=header_parameters, + json=json, + content=content, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + job_name: str, + transformation_name: str, + **kwargs: Any +) -> HttpRequest: + api_version = "2020-03-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "jobName": _SERIALIZER.url("job_name", job_name, 'str'), + "transformationName": _SERIALIZER.url("transformation_name", transformation_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) class TransformationsOperations(object): """TransformationsOperations operations. @@ -36,7 +177,7 @@ class TransformationsOperations(object): :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -44,17 +185,17 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config + @distributed_trace def create_or_replace( self, - resource_group_name, # type: str - job_name, # type: str - transformation_name, # type: str - transformation, # type: "models.Transformation" - if_match=None, # type: Optional[str] - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.Transformation" + resource_group_name: str, + job_name: str, + transformation_name: str, + transformation: "_models.Transformation", + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + **kwargs: Any + ) -> "_models.Transformation": """Creates a transformation or replaces an already existing transformation under an existing streaming job. @@ -80,74 +221,67 @@ def create_or_replace( :rtype: ~stream_analytics_management_client.models.Transformation :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Transformation"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Transformation"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_replace.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'transformationName': self._serialize.url("transformation_name", transformation_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(transformation, 'Transformation') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(transformation, 'Transformation') + + request = build_create_or_replace_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + transformation_name=transformation_name, + content_type=content_type, + json=_json, + if_match=if_match, + if_none_match=if_none_match, + template_url=self.create_or_replace.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Transformation', pipeline_response) if response.status_code == 201: response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Transformation', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + create_or_replace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}'} # type: ignore + + @distributed_trace def update( self, - resource_group_name, # type: str - job_name, # type: str - transformation_name, # type: str - transformation, # type: "models.Transformation" - if_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.Transformation" + resource_group_name: str, + job_name: str, + transformation_name: str, + transformation: "_models.Transformation", + if_match: Optional[str] = None, + **kwargs: Any + ) -> "_models.Transformation": """Updates an existing transformation under an existing streaming job. This can be used to partially update (ie. update one or two properties) a transformation without affecting the rest the job or transformation definition. @@ -173,65 +307,58 @@ def update( :rtype: ~stream_analytics_management_client.models.Transformation :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Transformation"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Transformation"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'transformationName': self._serialize.url("transformation_name", transformation_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(transformation, 'Transformation') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(transformation, 'Transformation') + + request = build_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + transformation_name=transformation_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Transformation', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}'} # type: ignore + + @distributed_trace def get( self, - resource_group_name, # type: str - job_name, # type: str - transformation_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.Transformation" + resource_group_name: str, + job_name: str, + transformation_name: str, + **kwargs: Any + ) -> "_models.Transformation": """Gets details about the specified transformation. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -245,46 +372,40 @@ def get( :rtype: ~stream_analytics_management_client.models.Transformation :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Transformation"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Transformation"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str'), - 'transformationName': self._serialize.url("transformation_name", transformation_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + job_name=job_name, + transformation_name=transformation_name, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('Transformation', pipeline_response) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}'} # type: ignore + diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/setup.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/setup.py index 646534c52381..44fbd571bfe6 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/setup.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/setup.py @@ -67,7 +67,7 @@ install_requires=[ 'msrest>=0.6.21', 'azure-common~=1.1', - 'azure-mgmt-core>=1.2.0,<2.0.0', + 'azure-mgmt-core>=1.3.0,<2.0.0', ], python_requires=">=3.7", ) diff --git a/shared_requirements.txt b/shared_requirements.txt index b3c249edb1d0..bf82911604ea 100644 --- a/shared_requirements.txt +++ b/shared_requirements.txt @@ -237,5 +237,7 @@ opentelemetry-sdk<2.0.0,>=1.5.0,!=1.10a0 #override azure-mgmt-redis azure-mgmt-core>=1.3.0,<2.0.0 #override azure-mgmt-recoveryservicesbackup msrest>=0.6.21 #override azure-mgmt-recoveryservicesbackup azure-mgmt-core>=1.3.0,<2.0.0 +#override azure-mgmt-streamanalytics msrest>=0.6.21 +#override azure-mgmt-streamanalytics azure-mgmt-core>=1.3.0,<2.0.0 #override azure-mgmt-consumption msrest>=0.6.21 #override azure-mgmt-consumption azure-mgmt-core>=1.3.0,<2.0.0