diff --git a/sdk/datafactory/azure-mgmt-datafactory/CHANGELOG.md b/sdk/datafactory/azure-mgmt-datafactory/CHANGELOG.md
index 148d57669138..ca38fd8447d6 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/CHANGELOG.md
+++ b/sdk/datafactory/azure-mgmt-datafactory/CHANGELOG.md
@@ -1,5 +1,37 @@
# Release History
+## 9.1.0 (2024-12-16)
+
+### Features Added
+
+ - Model `AzurePostgreSqlLinkedService` added property `server`
+ - Model `AzurePostgreSqlLinkedService` added property `port`
+ - Model `AzurePostgreSqlLinkedService` added property `username`
+ - Model `AzurePostgreSqlLinkedService` added property `database`
+ - Model `AzurePostgreSqlLinkedService` added property `ssl_mode`
+ - Model `AzurePostgreSqlLinkedService` added property `timeout`
+ - Model `AzurePostgreSqlLinkedService` added property `command_timeout`
+ - Model `AzurePostgreSqlLinkedService` added property `trust_server_certificate`
+ - Model `AzurePostgreSqlLinkedService` added property `read_buffer_size`
+ - Model `AzurePostgreSqlLinkedService` added property `timezone`
+ - Model `AzurePostgreSqlLinkedService` added property `encoding`
+ - Model `MariaDBLinkedService` added property `ssl_mode`
+ - Model `MariaDBLinkedService` added property `use_system_trust_store`
+ - Model `MySqlLinkedService` added property `allow_zero_date_time`
+ - Model `MySqlLinkedService` added property `connection_timeout`
+ - Model `MySqlLinkedService` added property `convert_zero_date_time`
+ - Model `MySqlLinkedService` added property `guid_format`
+ - Model `MySqlLinkedService` added property `ssl_cert`
+ - Model `MySqlLinkedService` added property `ssl_key`
+ - Model `MySqlLinkedService` added property `treat_tiny_as_boolean`
+ - Model `PostgreSqlV2LinkedService` added property `authentication_type`
+ - Model `SalesforceV2Source` added property `page_size`
+ - Model `ServiceNowV2Source` added property `page_size`
+ - Model `SnowflakeV2LinkedService` added property `host`
+ - Added model `IcebergDataset`
+ - Added model `IcebergSink`
+ - Added model `IcebergWriteSettings`
+
## 9.0.0 (2024-08-19)
### Features Added
diff --git a/sdk/datafactory/azure-mgmt-datafactory/_meta.json b/sdk/datafactory/azure-mgmt-datafactory/_meta.json
index 7a98149e5eed..bf776bd01373 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/_meta.json
+++ b/sdk/datafactory/azure-mgmt-datafactory/_meta.json
@@ -1,11 +1,11 @@
{
- "commit": "471fbc404548c3c6611833680dbbeefcc010e201",
+ "commit": "f06cffbda682a8cd225a8b16bc6f000d26d01612",
"repository_url": "https://github.com/Azure/azure-rest-api-specs",
"autorest": "3.10.2",
"use": [
- "@autorest/python@6.17.0",
+ "@autorest/python@6.26.4",
"@autorest/modelerfour@4.27.0"
],
- "autorest_command": "autorest specification/datafactory/resource-manager/readme.md --generate-sample=True --generate-test=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/home/vsts/work/1/azure-sdk-for-python/sdk --use=@autorest/python@6.17.0 --use=@autorest/modelerfour@4.27.0 --version=3.10.2 --version-tolerant=False",
+ "autorest_command": "autorest specification/datafactory/resource-manager/readme.md --generate-sample=True --generate-test=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/mnt/vss/_work/1/azure-sdk-for-python/sdk --use=@autorest/python@6.26.4 --use=@autorest/modelerfour@4.27.0 --version=3.10.2 --version-tolerant=False",
"readme": "specification/datafactory/resource-manager/readme.md"
}
\ No newline at end of file
diff --git a/sdk/datafactory/azure-mgmt-datafactory/assets.json b/sdk/datafactory/azure-mgmt-datafactory/assets.json
deleted file mode 100644
index cc8a6a706d1b..000000000000
--- a/sdk/datafactory/azure-mgmt-datafactory/assets.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "AssetsRepo": "Azure/azure-sdk-assets",
- "AssetsRepoPrefixPath": "python",
- "TagPrefix": "python/datafactory/azure-mgmt-datafactory",
- "Tag": "python/datafactory/azure-mgmt-datafactory_bd262e788d"
-}
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/__init__.py
index e3b2bfa87ed4..3306f38dc916 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/__init__.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/__init__.py
@@ -5,15 +5,21 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._data_factory_management_client import DataFactoryManagementClient
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._data_factory_management_client import DataFactoryManagementClient # type: ignore
from ._version import VERSION
__version__ = VERSION
try:
from ._patch import __all__ as _patch_all
- from ._patch import * # pylint: disable=unused-wildcard-import
+ from ._patch import *
except ImportError:
_patch_all = []
from ._patch import patch_sdk as _patch_sdk
@@ -21,6 +27,6 @@
__all__ = [
"DataFactoryManagementClient",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_configuration.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_configuration.py
index ed3c1a32555f..d55e4896f4d0 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_configuration.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_configuration.py
@@ -14,11 +14,10 @@
from ._version import VERSION
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
-class DataFactoryManagementClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long
+class DataFactoryManagementClientConfiguration: # pylint: disable=too-many-instance-attributes
"""Configuration for DataFactoryManagementClient.
Note that all parameters used to create this instance are saved as instance
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py
index 127c83a86a1f..b8f83bfb18b9 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py
@@ -45,11 +45,10 @@
)
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
-class DataFactoryManagementClient: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes
+class DataFactoryManagementClient: # pylint: disable=too-many-instance-attributes
"""The Azure Data Factory V2 management API provides a RESTful set of web services that interact
with Azure Data Factory V2 services.
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_serialization.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_serialization.py
index 8139854b97bb..ce17d1798ce7 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_serialization.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_serialization.py
@@ -1,3 +1,4 @@
+# pylint: disable=too-many-lines
# --------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -24,7 +25,6 @@
#
# --------------------------------------------------------------------------
-# pylint: skip-file
# pyright: reportUnnecessaryTypeIgnoreComment=false
from base64 import b64decode, b64encode
@@ -52,7 +52,6 @@
MutableMapping,
Type,
List,
- Mapping,
)
try:
@@ -91,6 +90,8 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
:param data: Input, could be bytes or stream (will be decoded with UTF8) or text
:type data: str or bytes or IO
:param str content_type: The content type.
+ :return: The deserialized data.
+ :rtype: object
"""
if hasattr(data, "read"):
# Assume a stream
@@ -112,7 +113,7 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
try:
return json.loads(data_as_str)
except ValueError as err:
- raise DeserializationError("JSON is invalid: {}".format(err), err)
+ raise DeserializationError("JSON is invalid: {}".format(err), err) from err
elif "xml" in (content_type or []):
try:
@@ -155,6 +156,11 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]],
Use bytes and headers to NOT use any requests/aiohttp or whatever
specific implementation.
Headers will tested for "content-type"
+
+ :param bytes body_bytes: The body of the response.
+ :param dict headers: The headers of the response.
+ :returns: The deserialized data.
+ :rtype: object
"""
# Try to use content-type from headers if available
content_type = None
@@ -184,15 +190,30 @@ class UTC(datetime.tzinfo):
"""Time Zone info for handling UTC"""
def utcoffset(self, dt):
- """UTF offset for UTC is 0."""
+ """UTF offset for UTC is 0.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The offset
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(0)
def tzname(self, dt):
- """Timestamp representation."""
+ """Timestamp representation.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The timestamp representation
+ :rtype: str
+ """
return "Z"
def dst(self, dt):
- """No daylight saving for UTC."""
+ """No daylight saving for UTC.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The daylight saving time
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(hours=1)
@@ -206,7 +227,7 @@ class _FixedOffset(datetime.tzinfo): # type: ignore
:param datetime.timedelta offset: offset in timedelta format
"""
- def __init__(self, offset):
+ def __init__(self, offset) -> None:
self.__offset = offset
def utcoffset(self, dt):
@@ -235,24 +256,26 @@ def __getinitargs__(self):
_FLATTEN = re.compile(r"(? None:
self.additional_properties: Optional[Dict[str, Any]] = {}
- for k in kwargs:
+ for k in kwargs: # pylint: disable=consider-using-dict-items
if k not in self._attribute_map:
_LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__)
elif k in self._validation and self._validation[k].get("readonly", False):
@@ -300,13 +330,23 @@ def __init__(self, **kwargs: Any) -> None:
setattr(self, k, kwargs[k])
def __eq__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes."""
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are equal
+ :rtype: bool
+ """
if isinstance(other, self.__class__):
return self.__dict__ == other.__dict__
return False
def __ne__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes."""
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are not equal
+ :rtype: bool
+ """
return not self.__eq__(other)
def __str__(self) -> str:
@@ -326,7 +366,11 @@ def is_xml_model(cls) -> bool:
@classmethod
def _create_xml_node(cls):
- """Create XML node."""
+ """Create XML node.
+
+ :returns: The XML node
+ :rtype: xml.etree.ElementTree.Element
+ """
try:
xml_map = cls._xml_map # type: ignore
except AttributeError:
@@ -346,7 +390,9 @@ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON:
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) # type: ignore
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, keep_readonly=keep_readonly, **kwargs
+ )
def as_dict(
self,
@@ -380,12 +426,15 @@ def my_key_transformer(key, attr_desc, value):
If you want XML serialization, you can pass the kwargs is_xml=True.
+ :param bool keep_readonly: If you want to serialize the readonly attributes
:param function key_transformer: A key transformer function.
:returns: A dict JSON compatible object
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs) # type: ignore
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs
+ )
@classmethod
def _infer_class_models(cls):
@@ -395,7 +444,7 @@ def _infer_class_models(cls):
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
if cls.__name__ not in client_models:
raise ValueError("Not Autorest generated code")
- except Exception:
+ except Exception: # pylint: disable=broad-exception-caught
# Assume it's not Autorest generated (tests?). Add ourselves as dependencies.
client_models = {cls.__name__: cls}
return client_models
@@ -408,6 +457,7 @@ def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = N
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
@@ -426,9 +476,11 @@ def from_dict(
and last_rest_key_case_insensitive_extractor)
:param dict data: A dict using RestAPI structure
+ :param function key_extractors: A key extractor function.
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
deserializer.key_extractors = ( # type: ignore
@@ -448,21 +500,25 @@ def _flatten_subtype(cls, key, objects):
return {}
result = dict(cls._subtype_map[key])
for valuetype in cls._subtype_map[key].values():
- result.update(objects[valuetype]._flatten_subtype(key, objects))
+ result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access
return result
@classmethod
def _classify(cls, response, objects):
"""Check the class _subtype_map for any child classes.
We want to ignore any inherited _subtype_maps.
- Remove the polymorphic key from the initial data.
+
+ :param dict response: The initial data
+ :param dict objects: The class objects
+ :returns: The class to be used
+ :rtype: class
"""
for subtype_key in cls.__dict__.get("_subtype_map", {}).keys():
subtype_value = None
if not isinstance(response, ET.Element):
rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1]
- subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None)
+ subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None)
else:
subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response)
if subtype_value:
@@ -501,11 +557,13 @@ def _decode_attribute_map_key(key):
inside the received data.
:param str key: A key string from the generated code
+ :returns: The decoded key
+ :rtype: str
"""
return key.replace("\\.", ".")
-class Serializer(object):
+class Serializer(object): # pylint: disable=too-many-public-methods
"""Request object model serializer."""
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
@@ -540,7 +598,7 @@ class Serializer(object):
"multiple": lambda x, y: x % y != 0,
}
- def __init__(self, classes: Optional[Mapping[str, type]] = None):
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
self.serialize_type = {
"iso-8601": Serializer.serialize_iso,
"rfc-1123": Serializer.serialize_rfc,
@@ -560,13 +618,16 @@ def __init__(self, classes: Optional[Mapping[str, type]] = None):
self.key_transformer = full_restapi_key_transformer
self.client_side_validation = True
- def _serialize(self, target_obj, data_type=None, **kwargs):
+ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
+ self, target_obj, data_type=None, **kwargs
+ ):
"""Serialize data into a string according to type.
- :param target_obj: The data to be serialized.
+ :param object target_obj: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str, dict
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
"""
key_transformer = kwargs.get("key_transformer", self.key_transformer)
keep_readonly = kwargs.get("keep_readonly", False)
@@ -592,12 +653,14 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
serialized = {}
if is_xml_model_serialization:
- serialized = target_obj._create_xml_node()
+ serialized = target_obj._create_xml_node() # pylint: disable=protected-access
try:
- attributes = target_obj._attribute_map
+ attributes = target_obj._attribute_map # pylint: disable=protected-access
for attr, attr_desc in attributes.items():
attr_name = attr
- if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False):
+ if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access
+ attr_name, {}
+ ).get("readonly", False):
continue
if attr_name == "additional_properties" and attr_desc["key"] == "":
@@ -633,7 +696,8 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
if isinstance(new_attr, list):
serialized.extend(new_attr) # type: ignore
elif isinstance(new_attr, ET.Element):
- # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces.
+ # If the down XML has no XML/Name,
+ # we MUST replace the tag with the local tag. But keeping the namespaces.
if "name" not in getattr(orig_attr, "_xml_map", {}):
splitted_tag = new_attr.tag.split("}")
if len(splitted_tag) == 2: # Namespace
@@ -664,17 +728,17 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
except (AttributeError, KeyError, TypeError) as err:
msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj))
raise SerializationError(msg) from err
- else:
- return serialized
+ return serialized
def body(self, data, data_type, **kwargs):
"""Serialize data intended for a request body.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: dict
:raises: SerializationError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized request body
"""
# Just in case this is a dict
@@ -703,7 +767,7 @@ def body(self, data, data_type, **kwargs):
attribute_key_case_insensitive_extractor,
last_rest_key_case_insensitive_extractor,
]
- data = deserializer._deserialize(data_type, data)
+ data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access
except DeserializationError as err:
raise SerializationError("Unable to build a model: " + str(err)) from err
@@ -712,9 +776,11 @@ def body(self, data, data_type, **kwargs):
def url(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL path.
- :param data: The data to be serialized.
+ :param str name: The name of the URL path parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
+ :returns: The serialized URL path
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
"""
@@ -728,21 +794,20 @@ def url(self, name, data, data_type, **kwargs):
output = output.replace("{", quote("{")).replace("}", quote("}"))
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return output
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return output
def query(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL query.
- :param data: The data to be serialized.
+ :param str name: The name of the query parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :keyword bool skip_quote: Whether to skip quote the serialized result.
- Defaults to False.
:rtype: str, list
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized query parameter
"""
try:
# Treat the list aside, since we don't want to encode the div separator
@@ -759,19 +824,20 @@ def query(self, name, data, data_type, **kwargs):
output = str(output)
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def header(self, name, data, data_type, **kwargs):
"""Serialize data intended for a request header.
- :param data: The data to be serialized.
+ :param str name: The name of the header.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized header
"""
try:
if data_type in ["[str]"]:
@@ -780,21 +846,20 @@ def header(self, name, data, data_type, **kwargs):
output = self.serialize_data(data, data_type, **kwargs)
if data_type == "bool":
output = json.dumps(output)
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def serialize_data(self, data, data_type, **kwargs):
"""Serialize generic data according to supplied data type.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :param bool required: Whether it's essential that the data not be
- empty or None
:raises: AttributeError if required data is None.
:raises: ValueError if data is None
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
+ :rtype: str, int, float, bool, dict, list
"""
if data is None:
raise ValueError("No value for given attribute")
@@ -805,7 +870,7 @@ def serialize_data(self, data, data_type, **kwargs):
if data_type in self.basic_types.values():
return self.serialize_basic(data, data_type, **kwargs)
- elif data_type in self.serialize_type:
+ if data_type in self.serialize_type:
return self.serialize_type[data_type](data, **kwargs)
# If dependencies is empty, try with current data class
@@ -821,11 +886,10 @@ def serialize_data(self, data, data_type, **kwargs):
except (ValueError, TypeError) as err:
msg = "Unable to serialize value: {!r} as type: {!r}."
raise SerializationError(msg.format(data, data_type)) from err
- else:
- return self._serialize(data, **kwargs)
+ return self._serialize(data, **kwargs)
@classmethod
- def _get_custom_serializers(cls, data_type, **kwargs):
+ def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements
custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type)
if custom_serializer:
return custom_serializer
@@ -841,23 +905,26 @@ def serialize_basic(cls, data, data_type, **kwargs):
- basic_types_serializers dict[str, callable] : If set, use the callable as serializer
- is_xml bool : If set, use xml_basic_types_serializers
- :param data: Object to be serialized.
+ :param obj data: Object to be serialized.
:param str data_type: Type of object in the iterable.
+ :rtype: str, int, float, bool
+ :return: serialized object
"""
custom_serializer = cls._get_custom_serializers(data_type, **kwargs)
if custom_serializer:
return custom_serializer(data)
if data_type == "str":
return cls.serialize_unicode(data)
- return eval(data_type)(data) # nosec
+ return eval(data_type)(data) # nosec # pylint: disable=eval-used
@classmethod
def serialize_unicode(cls, data):
"""Special handling for serializing unicode strings in Py2.
Encode to UTF-8 if unicode, otherwise handle as a str.
- :param data: Object to be serialized.
+ :param str data: Object to be serialized.
:rtype: str
+ :return: serialized object
"""
try: # If I received an enum, return its value
return data.value
@@ -871,8 +938,7 @@ def serialize_unicode(cls, data):
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
def serialize_iter(self, data, iter_type, div=None, **kwargs):
"""Serialize iterable.
@@ -882,15 +948,13 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs):
serialization_ctxt['type'] should be same as data_type.
- is_xml bool : If set, serialize as XML
- :param list attr: Object to be serialized.
+ :param list data: Object to be serialized.
:param str iter_type: Type of object in the iterable.
- :param bool required: Whether the objects in the iterable must
- not be None or empty.
:param str div: If set, this str will be used to combine the elements
in the iterable into a combined string. Default is 'None'.
- :keyword bool do_quote: Whether to quote the serialized result of each iterable element.
Defaults to False.
:rtype: list, str
+ :return: serialized iterable
"""
if isinstance(data, str):
raise SerializationError("Refuse str type as a valid iter type.")
@@ -945,9 +1009,8 @@ def serialize_dict(self, attr, dict_type, **kwargs):
:param dict attr: Object to be serialized.
:param str dict_type: Type of object in the dictionary.
- :param bool required: Whether the objects in the dictionary must
- not be None or empty.
:rtype: dict
+ :return: serialized dictionary
"""
serialization_ctxt = kwargs.get("serialization_ctxt", {})
serialized = {}
@@ -971,7 +1034,7 @@ def serialize_dict(self, attr, dict_type, **kwargs):
return serialized
- def serialize_object(self, attr, **kwargs):
+ def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Serialize a generic object.
This will be handled as a dictionary. If object passed in is not
a basic type (str, int, float, dict, list) it will simply be
@@ -979,6 +1042,7 @@ def serialize_object(self, attr, **kwargs):
:param dict attr: Object to be serialized.
:rtype: dict or str
+ :return: serialized object
"""
if attr is None:
return None
@@ -1003,7 +1067,7 @@ def serialize_object(self, attr, **kwargs):
return self.serialize_decimal(attr)
# If it's a model or I know this dependency, serialize as a Model
- elif obj_type in self.dependencies.values() or isinstance(attr, Model):
+ if obj_type in self.dependencies.values() or isinstance(attr, Model):
return self._serialize(attr)
if obj_type == dict:
@@ -1034,56 +1098,61 @@ def serialize_enum(attr, enum_obj=None):
try:
enum_obj(result) # type: ignore
return result
- except ValueError:
+ except ValueError as exc:
for enum_value in enum_obj: # type: ignore
if enum_value.value.lower() == str(attr).lower():
return enum_value.value
error = "{!r} is not valid value for enum {!r}"
- raise SerializationError(error.format(attr, enum_obj))
+ raise SerializationError(error.format(attr, enum_obj)) from exc
@staticmethod
- def serialize_bytearray(attr, **kwargs):
+ def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize bytearray into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
return b64encode(attr).decode()
@staticmethod
- def serialize_base64(attr, **kwargs):
+ def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize str into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
encoded = b64encode(attr).decode("ascii")
return encoded.strip("=").replace("+", "-").replace("/", "_")
@staticmethod
- def serialize_decimal(attr, **kwargs):
+ def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Decimal object to float.
- :param attr: Object to be serialized.
+ :param decimal attr: Object to be serialized.
:rtype: float
+ :return: serialized decimal
"""
return float(attr)
@staticmethod
- def serialize_long(attr, **kwargs):
+ def serialize_long(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize long (Py2) or int (Py3).
- :param attr: Object to be serialized.
+ :param int attr: Object to be serialized.
:rtype: int/long
+ :return: serialized long
"""
return _long_type(attr)
@staticmethod
- def serialize_date(attr, **kwargs):
+ def serialize_date(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Date object into ISO-8601 formatted string.
:param Date attr: Object to be serialized.
:rtype: str
+ :return: serialized date
"""
if isinstance(attr, str):
attr = isodate.parse_date(attr)
@@ -1091,11 +1160,12 @@ def serialize_date(attr, **kwargs):
return t
@staticmethod
- def serialize_time(attr, **kwargs):
+ def serialize_time(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Time object into ISO-8601 formatted string.
:param datetime.time attr: Object to be serialized.
:rtype: str
+ :return: serialized time
"""
if isinstance(attr, str):
attr = isodate.parse_time(attr)
@@ -1105,30 +1175,32 @@ def serialize_time(attr, **kwargs):
return t
@staticmethod
- def serialize_duration(attr, **kwargs):
+ def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize TimeDelta object into ISO-8601 formatted string.
:param TimeDelta attr: Object to be serialized.
:rtype: str
+ :return: serialized duration
"""
if isinstance(attr, str):
attr = isodate.parse_duration(attr)
return isodate.duration_isoformat(attr)
@staticmethod
- def serialize_rfc(attr, **kwargs):
+ def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into RFC-1123 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: TypeError if format invalid.
+ :return: serialized rfc
"""
try:
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
utc = attr.utctimetuple()
- except AttributeError:
- raise TypeError("RFC1123 object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("RFC1123 object must be valid Datetime object.") from exc
return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format(
Serializer.days[utc.tm_wday],
@@ -1141,12 +1213,13 @@ def serialize_rfc(attr, **kwargs):
)
@staticmethod
- def serialize_iso(attr, **kwargs):
+ def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into ISO-8601 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: SerializationError if format invalid.
+ :return: serialized iso
"""
if isinstance(attr, str):
attr = isodate.parse_datetime(attr)
@@ -1172,13 +1245,14 @@ def serialize_iso(attr, **kwargs):
raise TypeError(msg) from err
@staticmethod
- def serialize_unix(attr, **kwargs):
+ def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into IntTime format.
This is represented as seconds.
:param Datetime attr: Object to be serialized.
:rtype: int
:raises: SerializationError if format invalid
+ :return: serialied unix
"""
if isinstance(attr, int):
return attr
@@ -1186,11 +1260,11 @@ def serialize_unix(attr, **kwargs):
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
return int(calendar.timegm(attr.utctimetuple()))
- except AttributeError:
- raise TypeError("Unix time object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("Unix time object must be valid Datetime object.") from exc
-def rest_key_extractor(attr, attr_desc, data):
+def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
key = attr_desc["key"]
working_data = data
@@ -1211,7 +1285,9 @@ def rest_key_extractor(attr, attr_desc, data):
return working_data.get(key)
-def rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements
+ attr, attr_desc, data
+):
key = attr_desc["key"]
working_data = data
@@ -1232,17 +1308,29 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data):
return attribute_key_case_insensitive_extractor(key, None, working_data)
-def last_rest_key_extractor(attr, attr_desc, data):
- """Extract the attribute in "data" based on the last part of the JSON path key."""
+def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
+ """Extract the attribute in "data" based on the last part of the JSON path key.
+
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
+ """
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
return attribute_key_extractor(dict_keys[-1], None, data)
-def last_rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
"""Extract the attribute in "data" based on the last part of the JSON path key.
This is the case insensitive version of "last_rest_key_extractor"
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
"""
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
@@ -1279,7 +1367,7 @@ def _extract_name_from_internal_type(internal_type):
return xml_name
-def xml_key_extractor(attr, attr_desc, data):
+def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements
if isinstance(data, dict):
return None
@@ -1331,22 +1419,21 @@ def xml_key_extractor(attr, attr_desc, data):
if is_iter_type:
if is_wrapped:
return None # is_wrapped no node, we want None
- else:
- return [] # not wrapped, assume empty list
+ return [] # not wrapped, assume empty list
return None # Assume it's not there, maybe an optional node.
# If is_iter_type and not wrapped, return all found children
if is_iter_type:
if not is_wrapped:
return children
- else: # Iter and wrapped, should have found one node only (the wrap one)
- if len(children) != 1:
- raise DeserializationError(
- "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format(
- xml_name
- )
+ # Iter and wrapped, should have found one node only (the wrap one)
+ if len(children) != 1:
+ raise DeserializationError(
+ "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long
+ xml_name
)
- return list(children[0]) # Might be empty list and that's ok.
+ )
+ return list(children[0]) # Might be empty list and that's ok.
# Here it's not a itertype, we should have found one element only or empty
if len(children) > 1:
@@ -1363,9 +1450,9 @@ class Deserializer(object):
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
- valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
+ valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
- def __init__(self, classes: Optional[Mapping[str, type]] = None):
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
self.deserialize_type = {
"iso-8601": Deserializer.deserialize_iso,
"rfc-1123": Deserializer.deserialize_rfc,
@@ -1403,11 +1490,12 @@ def __call__(self, target_obj, response_data, content_type=None):
:param str content_type: Swagger "produces" if available.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
data = self._unpack_content(response_data, content_type)
return self._deserialize(target_obj, data)
- def _deserialize(self, target_obj, data):
+ def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements
"""Call the deserializer on a model.
Data needs to be already deserialized as JSON or XML ElementTree
@@ -1416,12 +1504,13 @@ def _deserialize(self, target_obj, data):
:param object data: Object to deserialize.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
# This is already a model, go recursive just in case
if hasattr(data, "_attribute_map"):
constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")]
try:
- for attr, mapconfig in data._attribute_map.items():
+ for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access
if attr in constants:
continue
value = getattr(data, attr)
@@ -1440,13 +1529,13 @@ def _deserialize(self, target_obj, data):
if isinstance(response, str):
return self.deserialize_data(data, response)
- elif isinstance(response, type) and issubclass(response, Enum):
+ if isinstance(response, type) and issubclass(response, Enum):
return self.deserialize_enum(data, response)
if data is None or data is CoreNull:
return data
try:
- attributes = response._attribute_map # type: ignore
+ attributes = response._attribute_map # type: ignore # pylint: disable=protected-access
d_attrs = {}
for attr, attr_desc in attributes.items():
# Check empty string. If it's not empty, someone has a real "additionalProperties"...
@@ -1476,9 +1565,8 @@ def _deserialize(self, target_obj, data):
except (AttributeError, TypeError, KeyError) as err:
msg = "Unable to deserialize to object: " + class_name # type: ignore
raise DeserializationError(msg) from err
- else:
- additional_properties = self._build_additional_properties(attributes, data)
- return self._instantiate_model(response, d_attrs, additional_properties)
+ additional_properties = self._build_additional_properties(attributes, data)
+ return self._instantiate_model(response, d_attrs, additional_properties)
def _build_additional_properties(self, attribute_map, data):
if not self.additional_properties_detection:
@@ -1505,6 +1593,8 @@ def _classify_target(self, target, data):
:param str target: The target object type to deserialize to.
:param str/dict data: The response data to deserialize.
+ :return: The classified target object and its class name.
+ :rtype: tuple
"""
if target is None:
return None, None
@@ -1516,7 +1606,7 @@ def _classify_target(self, target, data):
return target, target
try:
- target = target._classify(data, self.dependencies) # type: ignore
+ target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access
except AttributeError:
pass # Target is not a Model, no classify
return target, target.__class__.__name__ # type: ignore
@@ -1531,10 +1621,12 @@ def failsafe_deserialize(self, target_obj, data, content_type=None):
:param str target_obj: The target object type to deserialize to.
:param str/dict data: The response data to deserialize.
:param str content_type: Swagger "produces" if available.
+ :return: Deserialized object.
+ :rtype: object
"""
try:
return self(target_obj, data, content_type=content_type)
- except:
+ except: # pylint: disable=bare-except
_LOGGER.debug(
"Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
)
@@ -1552,10 +1644,12 @@ def _unpack_content(raw_data, content_type=None):
If raw_data is something else, bypass all logic and return it directly.
- :param raw_data: Data to be processed.
- :param content_type: How to parse if raw_data is a string/bytes.
+ :param obj raw_data: Data to be processed.
+ :param str content_type: How to parse if raw_data is a string/bytes.
:raises JSONDecodeError: If JSON is requested and parsing is impossible.
:raises UnicodeDecodeError: If bytes is not UTF8
+ :rtype: object
+ :return: Unpacked content.
"""
# Assume this is enough to detect a Pipeline Response without importing it
context = getattr(raw_data, "context", {})
@@ -1579,14 +1673,21 @@ def _unpack_content(raw_data, content_type=None):
def _instantiate_model(self, response, attrs, additional_properties=None):
"""Instantiate a response model passing in deserialized args.
- :param response: The response model class.
- :param d_attrs: The deserialized response attributes.
+ :param Response response: The response model class.
+ :param dict attrs: The deserialized response attributes.
+ :param dict additional_properties: Additional properties to be set.
+ :rtype: Response
+ :return: The instantiated response model.
"""
if callable(response):
subtype = getattr(response, "_subtype_map", {})
try:
- readonly = [k for k, v in response._validation.items() if v.get("readonly")]
- const = [k for k, v in response._validation.items() if v.get("constant")]
+ readonly = [
+ k for k, v in response._validation.items() if v.get("readonly") # pylint: disable=protected-access
+ ]
+ const = [
+ k for k, v in response._validation.items() if v.get("constant") # pylint: disable=protected-access
+ ]
kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const}
response_obj = response(**kwargs)
for attr in readonly:
@@ -1596,7 +1697,7 @@ def _instantiate_model(self, response, attrs, additional_properties=None):
return response_obj
except TypeError as err:
msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore
- raise DeserializationError(msg + str(err))
+ raise DeserializationError(msg + str(err)) from err
else:
try:
for attr, value in attrs.items():
@@ -1605,15 +1706,16 @@ def _instantiate_model(self, response, attrs, additional_properties=None):
except Exception as exp:
msg = "Unable to populate response model. "
msg += "Type: {}, Error: {}".format(type(response), exp)
- raise DeserializationError(msg)
+ raise DeserializationError(msg) from exp
- def deserialize_data(self, data, data_type):
+ def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements
"""Process data for deserialization according to data type.
:param str data: The response string to be deserialized.
:param str data_type: The type to deserialize to.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
if data is None:
return data
@@ -1627,7 +1729,11 @@ def deserialize_data(self, data, data_type):
if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())):
return data
- is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"]
+ is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment
+ "object",
+ "[]",
+ r"{}",
+ ]
if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text:
return None
data_val = self.deserialize_type[data_type](data)
@@ -1647,14 +1753,14 @@ def deserialize_data(self, data, data_type):
msg = "Unable to deserialize response data."
msg += " Data: {}, {}".format(data, data_type)
raise DeserializationError(msg) from err
- else:
- return self._deserialize(obj_type, data)
+ return self._deserialize(obj_type, data)
def deserialize_iter(self, attr, iter_type):
"""Deserialize an iterable.
:param list attr: Iterable to be deserialized.
:param str iter_type: The type of object in the iterable.
+ :return: Deserialized iterable.
:rtype: list
"""
if attr is None:
@@ -1671,6 +1777,7 @@ def deserialize_dict(self, attr, dict_type):
:param dict/list attr: Dictionary to be deserialized. Also accepts
a list of key, value pairs.
:param str dict_type: The object type of the items in the dictionary.
+ :return: Deserialized dictionary.
:rtype: dict
"""
if isinstance(attr, list):
@@ -1681,11 +1788,12 @@ def deserialize_dict(self, attr, dict_type):
attr = {el.tag: el.text for el in attr}
return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()}
- def deserialize_object(self, attr, **kwargs):
+ def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Deserialize a generic object.
This will be handled as a dictionary.
:param dict attr: Dictionary to be deserialized.
+ :return: Deserialized object.
:rtype: dict
:raises: TypeError if non-builtin datatype encountered.
"""
@@ -1720,11 +1828,10 @@ def deserialize_object(self, attr, **kwargs):
pass
return deserialized
- else:
- error = "Cannot deserialize generic object with type: "
- raise TypeError(error + str(obj_type))
+ error = "Cannot deserialize generic object with type: "
+ raise TypeError(error + str(obj_type))
- def deserialize_basic(self, attr, data_type):
+ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements
"""Deserialize basic builtin data type from string.
Will attempt to convert to str, int, float and bool.
This function will also accept '1', '0', 'true' and 'false' as
@@ -1732,6 +1839,7 @@ def deserialize_basic(self, attr, data_type):
:param str attr: response string to be deserialized.
:param str data_type: deserialization data type.
+ :return: Deserialized basic type.
:rtype: str, int, float or bool
:raises: TypeError if string format is not valid.
"""
@@ -1743,24 +1851,23 @@ def deserialize_basic(self, attr, data_type):
if data_type == "str":
# None or '', node is empty string.
return ""
- else:
- # None or '', node with a strong type is None.
- # Don't try to model "empty bool" or "empty int"
- return None
+ # None or '', node with a strong type is None.
+ # Don't try to model "empty bool" or "empty int"
+ return None
if data_type == "bool":
if attr in [True, False, 1, 0]:
return bool(attr)
- elif isinstance(attr, str):
+ if isinstance(attr, str):
if attr.lower() in ["true", "1"]:
return True
- elif attr.lower() in ["false", "0"]:
+ if attr.lower() in ["false", "0"]:
return False
raise TypeError("Invalid boolean value: {}".format(attr))
if data_type == "str":
return self.deserialize_unicode(attr)
- return eval(data_type)(attr) # nosec
+ return eval(data_type)(attr) # nosec # pylint: disable=eval-used
@staticmethod
def deserialize_unicode(data):
@@ -1768,6 +1875,7 @@ def deserialize_unicode(data):
as a string.
:param str data: response string to be deserialized.
+ :return: Deserialized string.
:rtype: str or unicode
"""
# We might be here because we have an enum modeled as string,
@@ -1781,8 +1889,7 @@ def deserialize_unicode(data):
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
@staticmethod
def deserialize_enum(data, enum_obj):
@@ -1794,6 +1901,7 @@ def deserialize_enum(data, enum_obj):
:param str data: Response string to be deserialized. If this value is
None or invalid it will be returned as-is.
:param Enum enum_obj: Enum object to deserialize to.
+ :return: Deserialized enum object.
:rtype: Enum
"""
if isinstance(data, enum_obj) or data is None:
@@ -1804,9 +1912,9 @@ def deserialize_enum(data, enum_obj):
# Workaround. We might consider remove it in the future.
try:
return list(enum_obj.__members__.values())[data]
- except IndexError:
+ except IndexError as exc:
error = "{!r} is not a valid index for enum {!r}"
- raise DeserializationError(error.format(data, enum_obj))
+ raise DeserializationError(error.format(data, enum_obj)) from exc
try:
return enum_obj(str(data))
except ValueError:
@@ -1822,6 +1930,7 @@ def deserialize_bytearray(attr):
"""Deserialize string into bytearray.
:param str attr: response string to be deserialized.
+ :return: Deserialized bytearray
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
@@ -1834,6 +1943,7 @@ def deserialize_base64(attr):
"""Deserialize base64 encoded string into string.
:param str attr: response string to be deserialized.
+ :return: Deserialized base64 string
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
@@ -1849,8 +1959,9 @@ def deserialize_decimal(attr):
"""Deserialize string into Decimal object.
:param str attr: response string to be deserialized.
- :rtype: Decimal
+ :return: Deserialized decimal
:raises: DeserializationError if string format invalid.
+ :rtype: decimal
"""
if isinstance(attr, ET.Element):
attr = attr.text
@@ -1865,6 +1976,7 @@ def deserialize_long(attr):
"""Deserialize string into long (Py2) or int (Py3).
:param str attr: response string to be deserialized.
+ :return: Deserialized int
:rtype: long or int
:raises: ValueError if string format invalid.
"""
@@ -1877,6 +1989,7 @@ def deserialize_duration(attr):
"""Deserialize ISO-8601 formatted string into TimeDelta object.
:param str attr: response string to be deserialized.
+ :return: Deserialized duration
:rtype: TimeDelta
:raises: DeserializationError if string format invalid.
"""
@@ -1887,14 +2000,14 @@ def deserialize_duration(attr):
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize duration object."
raise DeserializationError(msg) from err
- else:
- return duration
+ return duration
@staticmethod
def deserialize_date(attr):
"""Deserialize ISO-8601 formatted string into Date object.
:param str attr: response string to be deserialized.
+ :return: Deserialized date
:rtype: Date
:raises: DeserializationError if string format invalid.
"""
@@ -1910,6 +2023,7 @@ def deserialize_time(attr):
"""Deserialize ISO-8601 formatted string into time object.
:param str attr: response string to be deserialized.
+ :return: Deserialized time
:rtype: datetime.time
:raises: DeserializationError if string format invalid.
"""
@@ -1924,6 +2038,7 @@ def deserialize_rfc(attr):
"""Deserialize RFC-1123 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized RFC datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
@@ -1939,14 +2054,14 @@ def deserialize_rfc(attr):
except ValueError as err:
msg = "Cannot deserialize to rfc datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
@staticmethod
def deserialize_iso(attr):
"""Deserialize ISO-8601 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized ISO datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
@@ -1976,8 +2091,7 @@ def deserialize_iso(attr):
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
@staticmethod
def deserialize_unix(attr):
@@ -1985,6 +2099,7 @@ def deserialize_unix(attr):
This is represented as seconds.
:param int attr: Object to be serialized.
+ :return: Deserialized datetime
:rtype: Datetime
:raises: DeserializationError if format invalid
"""
@@ -1996,5 +2111,4 @@ def deserialize_unix(attr):
except ValueError as err:
msg = "Cannot deserialize to unix datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py
index b77ac9246082..f89ed38360ab 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py
@@ -6,4 +6,4 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-VERSION = "9.0.0"
+VERSION = "9.1.0"
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/__init__.py
index 18f7efaa68a7..0fc55d836054 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/__init__.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/__init__.py
@@ -5,12 +5,18 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._data_factory_management_client import DataFactoryManagementClient
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._data_factory_management_client import DataFactoryManagementClient # type: ignore
try:
from ._patch import __all__ as _patch_all
- from ._patch import * # pylint: disable=unused-wildcard-import
+ from ._patch import *
except ImportError:
_patch_all = []
from ._patch import patch_sdk as _patch_sdk
@@ -18,6 +24,6 @@
__all__ = [
"DataFactoryManagementClient",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_configuration.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_configuration.py
index 0eefe72711ac..fc96ba0c3140 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_configuration.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_configuration.py
@@ -14,11 +14,10 @@
from .._version import VERSION
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
-class DataFactoryManagementClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long
+class DataFactoryManagementClientConfiguration: # pylint: disable=too-many-instance-attributes
"""Configuration for DataFactoryManagementClient.
Note that all parameters used to create this instance are saved as instance
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_data_factory_management_client.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_data_factory_management_client.py
index 597d4988e2b8..3bfae1c79ed5 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_data_factory_management_client.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_data_factory_management_client.py
@@ -45,11 +45,10 @@
)
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
-class DataFactoryManagementClient: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes
+class DataFactoryManagementClient: # pylint: disable=too-many-instance-attributes
"""The Azure Data Factory V2 management API provides a RESTful set of web services that interact
with Azure Data Factory V2 services.
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/__init__.py
index 668131aae0e5..f42af8783eb3 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/__init__.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/__init__.py
@@ -5,33 +5,39 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._operations import Operations
-from ._factories_operations import FactoriesOperations
-from ._exposure_control_operations import ExposureControlOperations
-from ._integration_runtimes_operations import IntegrationRuntimesOperations
-from ._integration_runtime_object_metadata_operations import IntegrationRuntimeObjectMetadataOperations
-from ._integration_runtime_nodes_operations import IntegrationRuntimeNodesOperations
-from ._linked_services_operations import LinkedServicesOperations
-from ._datasets_operations import DatasetsOperations
-from ._pipelines_operations import PipelinesOperations
-from ._pipeline_runs_operations import PipelineRunsOperations
-from ._activity_runs_operations import ActivityRunsOperations
-from ._triggers_operations import TriggersOperations
-from ._trigger_runs_operations import TriggerRunsOperations
-from ._data_flows_operations import DataFlowsOperations
-from ._data_flow_debug_session_operations import DataFlowDebugSessionOperations
-from ._managed_virtual_networks_operations import ManagedVirtualNetworksOperations
-from ._managed_private_endpoints_operations import ManagedPrivateEndpointsOperations
-from ._credential_operations_operations import CredentialOperationsOperations
-from ._private_end_point_connections_operations import PrivateEndPointConnectionsOperations
-from ._private_endpoint_connection_operations import PrivateEndpointConnectionOperations
-from ._private_link_resources_operations import PrivateLinkResourcesOperations
-from ._global_parameters_operations import GlobalParametersOperations
-from ._change_data_capture_operations import ChangeDataCaptureOperations
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._operations import Operations # type: ignore
+from ._factories_operations import FactoriesOperations # type: ignore
+from ._exposure_control_operations import ExposureControlOperations # type: ignore
+from ._integration_runtimes_operations import IntegrationRuntimesOperations # type: ignore
+from ._integration_runtime_object_metadata_operations import IntegrationRuntimeObjectMetadataOperations # type: ignore
+from ._integration_runtime_nodes_operations import IntegrationRuntimeNodesOperations # type: ignore
+from ._linked_services_operations import LinkedServicesOperations # type: ignore
+from ._datasets_operations import DatasetsOperations # type: ignore
+from ._pipelines_operations import PipelinesOperations # type: ignore
+from ._pipeline_runs_operations import PipelineRunsOperations # type: ignore
+from ._activity_runs_operations import ActivityRunsOperations # type: ignore
+from ._triggers_operations import TriggersOperations # type: ignore
+from ._trigger_runs_operations import TriggerRunsOperations # type: ignore
+from ._data_flows_operations import DataFlowsOperations # type: ignore
+from ._data_flow_debug_session_operations import DataFlowDebugSessionOperations # type: ignore
+from ._managed_virtual_networks_operations import ManagedVirtualNetworksOperations # type: ignore
+from ._managed_private_endpoints_operations import ManagedPrivateEndpointsOperations # type: ignore
+from ._credential_operations_operations import CredentialOperationsOperations # type: ignore
+from ._private_end_point_connections_operations import PrivateEndPointConnectionsOperations # type: ignore
+from ._private_endpoint_connection_operations import PrivateEndpointConnectionOperations # type: ignore
+from ._private_link_resources_operations import PrivateLinkResourcesOperations # type: ignore
+from ._global_parameters_operations import GlobalParametersOperations # type: ignore
+from ._change_data_capture_operations import ChangeDataCaptureOperations # type: ignore
from ._patch import __all__ as _patch_all
-from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
@@ -59,5 +65,5 @@
"GlobalParametersOperations",
"ChangeDataCaptureOperations",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_activity_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_activity_runs_operations.py
index ccfa35e15c73..5cf6c0a80101 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_activity_runs_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_activity_runs_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload
+from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -30,7 +29,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -136,7 +135,7 @@ async def query_by_pipeline_run(
:rtype: ~azure.mgmt.datafactory.models.ActivityRunsQueryResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_change_data_capture_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_change_data_capture_operations.py
index 8b8fabbfbe20..3d7198fef8af 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_change_data_capture_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_change_data_capture_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload
+from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -41,7 +40,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -87,7 +86,7 @@ def list_by_factory(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ChangeDataCaptureListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -247,7 +246,7 @@ async def create_or_update(
:rtype: ~azure.mgmt.datafactory.models.ChangeDataCaptureResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -328,7 +327,7 @@ async def get(
:rtype: ~azure.mgmt.datafactory.models.ChangeDataCaptureResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -373,7 +372,7 @@ async def get(
return deserialized # type: ignore
@distributed_trace_async
- async def delete( # pylint: disable=inconsistent-return-statements
+ async def delete(
self, resource_group_name: str, factory_name: str, change_data_capture_name: str, **kwargs: Any
) -> None:
"""Deletes a change data capture.
@@ -388,7 +387,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -428,7 +427,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
return cls(pipeline_response, None, {}) # type: ignore
@distributed_trace_async
- async def start( # pylint: disable=inconsistent-return-statements
+ async def start(
self, resource_group_name: str, factory_name: str, change_data_capture_name: str, **kwargs: Any
) -> None:
"""Starts a change data capture.
@@ -443,7 +442,7 @@ async def start( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -483,7 +482,7 @@ async def start( # pylint: disable=inconsistent-return-statements
return cls(pipeline_response, None, {}) # type: ignore
@distributed_trace_async
- async def stop( # pylint: disable=inconsistent-return-statements
+ async def stop(
self, resource_group_name: str, factory_name: str, change_data_capture_name: str, **kwargs: Any
) -> None:
"""Stops a change data capture.
@@ -498,7 +497,7 @@ async def stop( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -553,7 +552,7 @@ async def status(
:rtype: str
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_credential_operations_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_credential_operations_operations.py
index 894ca06edda9..4499622ecf7c 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_credential_operations_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_credential_operations_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload
+from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -38,7 +37,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -83,7 +82,7 @@ def list_by_factory(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.CredentialListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -239,7 +238,7 @@ async def create_or_update(
:rtype: ~azure.mgmt.datafactory.models.CredentialResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -320,7 +319,7 @@ async def get(
:rtype: ~azure.mgmt.datafactory.models.CredentialResource or None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -367,9 +366,7 @@ async def get(
return deserialized # type: ignore
@distributed_trace_async
- async def delete( # pylint: disable=inconsistent-return-statements
- self, resource_group_name: str, factory_name: str, credential_name: str, **kwargs: Any
- ) -> None:
+ async def delete(self, resource_group_name: str, factory_name: str, credential_name: str, **kwargs: Any) -> None:
"""Deletes a credential.
:param resource_group_name: The resource group name. Required.
@@ -382,7 +379,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flow_debug_session_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flow_debug_session_operations.py
index 7ac3b03f24e1..55d20fe3c21b 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flow_debug_session_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flow_debug_session_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload
+from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -43,7 +42,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -74,7 +73,7 @@ async def _create_initial(
request: Union[_models.CreateDataFlowDebugSessionRequest, IO[bytes]],
**kwargs: Any
) -> AsyncIterator[bytes]:
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -285,7 +284,7 @@ def query_by_factory(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.QueryDataFlowDebugSessionsResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -420,7 +419,7 @@ async def add_data_flow(
:rtype: ~azure.mgmt.datafactory.models.AddDataFlowToDebugSessionResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -475,7 +474,7 @@ async def add_data_flow(
return deserialized # type: ignore
@overload
- async def delete( # pylint: disable=inconsistent-return-statements
+ async def delete(
self,
resource_group_name: str,
factory_name: str,
@@ -501,7 +500,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
"""
@overload
- async def delete( # pylint: disable=inconsistent-return-statements
+ async def delete(
self,
resource_group_name: str,
factory_name: str,
@@ -527,7 +526,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
"""
@distributed_trace_async
- async def delete( # pylint: disable=inconsistent-return-statements
+ async def delete(
self,
resource_group_name: str,
factory_name: str,
@@ -547,7 +546,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -604,7 +603,7 @@ async def _execute_command_initial(
request: Union[_models.DataFlowDebugCommandRequest, IO[bytes]],
**kwargs: Any
) -> AsyncIterator[bytes]:
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flows_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flows_operations.py
index e884f876bb44..e7b774c3580a 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flows_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flows_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload
+from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -38,7 +37,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -156,7 +155,7 @@ async def create_or_update(
:rtype: ~azure.mgmt.datafactory.models.DataFlowResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -237,7 +236,7 @@ async def get(
:rtype: ~azure.mgmt.datafactory.models.DataFlowResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -282,9 +281,7 @@ async def get(
return deserialized # type: ignore
@distributed_trace_async
- async def delete( # pylint: disable=inconsistent-return-statements
- self, resource_group_name: str, factory_name: str, data_flow_name: str, **kwargs: Any
- ) -> None:
+ async def delete(self, resource_group_name: str, factory_name: str, data_flow_name: str, **kwargs: Any) -> None:
"""Deletes a data flow.
:param resource_group_name: The resource group name. Required.
@@ -297,7 +294,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -357,7 +354,7 @@ def list_by_factory(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.DataFlowListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_datasets_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_datasets_operations.py
index 9877c2586452..64df91a56cbf 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_datasets_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_datasets_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload
+from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -38,7 +37,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -82,7 +81,7 @@ def list_by_factory(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.DatasetListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -238,7 +237,7 @@ async def create_or_update(
:rtype: ~azure.mgmt.datafactory.models.DatasetResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -319,7 +318,7 @@ async def get(
:rtype: ~azure.mgmt.datafactory.models.DatasetResource or None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -366,9 +365,7 @@ async def get(
return deserialized # type: ignore
@distributed_trace_async
- async def delete( # pylint: disable=inconsistent-return-statements
- self, resource_group_name: str, factory_name: str, dataset_name: str, **kwargs: Any
- ) -> None:
+ async def delete(self, resource_group_name: str, factory_name: str, dataset_name: str, **kwargs: Any) -> None:
"""Deletes a dataset.
:param resource_group_name: The resource group name. Required.
@@ -381,7 +378,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_exposure_control_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_exposure_control_operations.py
index 9378b4cbc2db..6d450ba379c8 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_exposure_control_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_exposure_control_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload
+from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -34,7 +33,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -123,7 +122,7 @@ async def get_feature_value(
:rtype: ~azure.mgmt.datafactory.models.ExposureControlResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -250,7 +249,7 @@ async def get_feature_value_by_factory(
:rtype: ~azure.mgmt.datafactory.models.ExposureControlResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -381,7 +380,7 @@ async def query_feature_values_by_factory(
:rtype: ~azure.mgmt.datafactory.models.ExposureControlBatchResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_factories_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_factories_operations.py
index 909d7e58b3fc..392de8b7be89 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_factories_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_factories_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload
+from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -43,7 +42,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -81,7 +80,7 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.Factory"]:
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.FactoryListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -197,7 +196,7 @@ async def configure_factory_repo(
:rtype: ~azure.mgmt.datafactory.models.Factory
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -266,7 +265,7 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Asy
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.FactoryListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -412,7 +411,7 @@ async def create_or_update(
:rtype: ~azure.mgmt.datafactory.models.Factory
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -541,7 +540,7 @@ async def update(
:rtype: ~azure.mgmt.datafactory.models.Factory
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -613,7 +612,7 @@ async def get(
:rtype: ~azure.mgmt.datafactory.models.Factory or None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -659,9 +658,7 @@ async def get(
return deserialized # type: ignore
@distributed_trace_async
- async def delete( # pylint: disable=inconsistent-return-statements
- self, resource_group_name: str, factory_name: str, **kwargs: Any
- ) -> None:
+ async def delete(self, resource_group_name: str, factory_name: str, **kwargs: Any) -> None:
"""Deletes a factory.
:param resource_group_name: The resource group name. Required.
@@ -672,7 +669,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -784,7 +781,7 @@ async def get_git_hub_access_token(
:rtype: ~azure.mgmt.datafactory.models.GitHubAccessTokenResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -911,7 +908,7 @@ async def get_data_plane_access(
:rtype: ~azure.mgmt.datafactory.models.AccessPolicyResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_global_parameters_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_global_parameters_operations.py
index e65591c4497e..7f5d233eccf9 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_global_parameters_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_global_parameters_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload
+from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -38,7 +37,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -84,7 +83,7 @@ def list_by_factory(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.GlobalParameterListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -162,7 +161,7 @@ async def get(
:rtype: ~azure.mgmt.datafactory.models.GlobalParameterResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -287,7 +286,7 @@ async def create_or_update(
:rtype: ~azure.mgmt.datafactory.models.GlobalParameterResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -343,7 +342,7 @@ async def create_or_update(
return deserialized # type: ignore
@distributed_trace_async
- async def delete( # pylint: disable=inconsistent-return-statements
+ async def delete(
self, resource_group_name: str, factory_name: str, global_parameter_name: str, **kwargs: Any
) -> None:
"""Deletes a Global parameter.
@@ -358,7 +357,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_nodes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_nodes_operations.py
index 491a0c279007..a08698044486 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_nodes_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_nodes_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload
+from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -35,7 +34,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -77,7 +76,7 @@ async def get(
:rtype: ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -122,7 +121,7 @@ async def get(
return deserialized # type: ignore
@distributed_trace_async
- async def delete( # pylint: disable=inconsistent-return-statements
+ async def delete(
self, resource_group_name: str, factory_name: str, integration_runtime_name: str, node_name: str, **kwargs: Any
) -> None:
"""Deletes a self-hosted integration runtime node.
@@ -139,7 +138,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -275,7 +274,7 @@ async def update(
:rtype: ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -349,7 +348,7 @@ async def get_ip_address(
:rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeNodeIpAddress
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py
index 12eae02fd3a0..684f4906787c 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload
+from typing import Any, AsyncIterator, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -34,7 +33,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -61,7 +60,7 @@ def __init__(self, *args, **kwargs) -> None:
async def _refresh_initial(
self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any
) -> AsyncIterator[bytes]:
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -260,7 +259,7 @@ async def get(
:rtype: ~azure.mgmt.datafactory.models.SsisObjectMetadataListResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtimes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtimes_operations.py
index 1c5791abd67e..6857c87ae32a 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtimes_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtimes_operations.py
@@ -1,4 +1,4 @@
-# pylint: disable=too-many-lines,too-many-statements
+# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +8,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload
+from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -55,7 +55,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -101,7 +101,7 @@ def list_by_factory(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.IntegrationRuntimeListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -261,7 +261,7 @@ async def create_or_update(
:rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -342,7 +342,7 @@ async def get(
:rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource or None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -474,7 +474,7 @@ async def update(
:rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -530,7 +530,7 @@ async def update(
return deserialized # type: ignore
@distributed_trace_async
- async def delete( # pylint: disable=inconsistent-return-statements
+ async def delete(
self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any
) -> None:
"""Deletes an integration runtime.
@@ -545,7 +545,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -600,7 +600,7 @@ async def get_status(
:rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -661,7 +661,7 @@ async def list_outbound_network_dependencies_endpoints( # pylint: disable=name-
~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -723,7 +723,7 @@ async def get_connection_info(
:rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeConnectionInfo
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -853,7 +853,7 @@ async def regenerate_auth_key(
:rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeys
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -924,7 +924,7 @@ async def list_auth_keys(
:rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeys
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -970,7 +970,7 @@ async def list_auth_keys(
async def _start_initial(
self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any
) -> AsyncIterator[bytes]:
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1084,7 +1084,7 @@ def get_long_running_output(pipeline_response):
async def _stop_initial(
self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any
) -> AsyncIterator[bytes]:
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1190,7 +1190,7 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
@distributed_trace_async
- async def sync_credentials( # pylint: disable=inconsistent-return-statements
+ async def sync_credentials(
self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any
) -> None:
"""Force the integration runtime to synchronize credentials across integration runtime nodes, and
@@ -1208,7 +1208,7 @@ async def sync_credentials( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1264,7 +1264,7 @@ async def get_monitoring_data(
:rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeMonitoringData
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1308,7 +1308,7 @@ async def get_monitoring_data(
return deserialized # type: ignore
@distributed_trace_async
- async def upgrade( # pylint: disable=inconsistent-return-statements
+ async def upgrade(
self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any
) -> None:
"""Upgrade self-hosted integration runtime to latest version if availability.
@@ -1323,7 +1323,7 @@ async def upgrade( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1363,7 +1363,7 @@ async def upgrade( # pylint: disable=inconsistent-return-statements
return cls(pipeline_response, None, {}) # type: ignore
@overload
- async def remove_links( # pylint: disable=inconsistent-return-statements
+ async def remove_links(
self,
resource_group_name: str,
factory_name: str,
@@ -1395,7 +1395,7 @@ async def remove_links( # pylint: disable=inconsistent-return-statements
"""
@overload
- async def remove_links( # pylint: disable=inconsistent-return-statements
+ async def remove_links(
self,
resource_group_name: str,
factory_name: str,
@@ -1426,7 +1426,7 @@ async def remove_links( # pylint: disable=inconsistent-return-statements
"""
@distributed_trace_async
- async def remove_links( # pylint: disable=inconsistent-return-statements
+ async def remove_links(
self,
resource_group_name: str,
factory_name: str,
@@ -1451,7 +1451,7 @@ async def remove_links( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1588,7 +1588,7 @@ async def create_linked_integration_runtime(
:rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_linked_services_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_linked_services_operations.py
index 6f20219541a7..263f1d1cda26 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_linked_services_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_linked_services_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload
+from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -38,7 +37,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -84,7 +83,7 @@ def list_by_factory(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.LinkedServiceListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -243,7 +242,7 @@ async def create_or_update(
:rtype: ~azure.mgmt.datafactory.models.LinkedServiceResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -324,7 +323,7 @@ async def get(
:rtype: ~azure.mgmt.datafactory.models.LinkedServiceResource or None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -371,7 +370,7 @@ async def get(
return deserialized # type: ignore
@distributed_trace_async
- async def delete( # pylint: disable=inconsistent-return-statements
+ async def delete(
self, resource_group_name: str, factory_name: str, linked_service_name: str, **kwargs: Any
) -> None:
"""Deletes a linked service.
@@ -386,7 +385,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_private_endpoints_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_private_endpoints_operations.py
index da3e687c4de1..fa0bf85d1738 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_private_endpoints_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_private_endpoints_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload
+from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -38,7 +37,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -86,7 +85,7 @@ def list_by_factory(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ManagedPrivateEndpointListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -256,7 +255,7 @@ async def create_or_update(
:rtype: ~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -341,7 +340,7 @@ async def get(
:rtype: ~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -387,7 +386,7 @@ async def get(
return deserialized # type: ignore
@distributed_trace_async
- async def delete( # pylint: disable=inconsistent-return-statements
+ async def delete(
self,
resource_group_name: str,
factory_name: str,
@@ -409,7 +408,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_virtual_networks_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_virtual_networks_operations.py
index d39327039222..735bfd3619f8 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_virtual_networks_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_virtual_networks_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload
+from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -37,7 +36,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -83,7 +82,7 @@ def list_by_factory(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ManagedVirtualNetworkListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -243,7 +242,7 @@ async def create_or_update(
:rtype: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -324,7 +323,7 @@ async def get(
:rtype: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_operations.py
index d5b7a449d084..a2b09223e1b9 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -7,7 +6,7 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import sys
-from typing import Any, AsyncIterable, Callable, Dict, Optional, Type, TypeVar
+from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -31,7 +30,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -69,7 +68,7 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.Operation"]:
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.OperationListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipeline_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipeline_runs_operations.py
index 136dd4e28d06..d19b35f9badd 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipeline_runs_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipeline_runs_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload
+from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -34,7 +33,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -131,7 +130,7 @@ async def query_by_factory(
:rtype: ~azure.mgmt.datafactory.models.PipelineRunsQueryResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -199,7 +198,7 @@ async def get(self, resource_group_name: str, factory_name: str, run_id: str, **
:rtype: ~azure.mgmt.datafactory.models.PipelineRun
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -243,7 +242,7 @@ async def get(self, resource_group_name: str, factory_name: str, run_id: str, **
return deserialized # type: ignore
@distributed_trace_async
- async def cancel( # pylint: disable=inconsistent-return-statements
+ async def cancel(
self,
resource_group_name: str,
factory_name: str,
@@ -266,7 +265,7 @@ async def cancel( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipelines_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipelines_operations.py
index 1d7c423bdfff..69531d9b481f 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipelines_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipelines_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload
+from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -39,7 +38,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -85,7 +84,7 @@ def list_by_factory(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.PipelineListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -241,7 +240,7 @@ async def create_or_update(
:rtype: ~azure.mgmt.datafactory.models.PipelineResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -322,7 +321,7 @@ async def get(
:rtype: ~azure.mgmt.datafactory.models.PipelineResource or None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -369,9 +368,7 @@ async def get(
return deserialized # type: ignore
@distributed_trace_async
- async def delete( # pylint: disable=inconsistent-return-statements
- self, resource_group_name: str, factory_name: str, pipeline_name: str, **kwargs: Any
- ) -> None:
+ async def delete(self, resource_group_name: str, factory_name: str, pipeline_name: str, **kwargs: Any) -> None:
"""Deletes a pipeline.
:param resource_group_name: The resource group name. Required.
@@ -384,7 +381,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -562,7 +559,7 @@ async def create_run(
:rtype: ~azure.mgmt.datafactory.models.CreateRunResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_end_point_connections_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_end_point_connections_operations.py
index 4448a257ebc3..055a8db26e7f 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_end_point_connections_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_end_point_connections_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -7,7 +6,7 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import sys
-from typing import Any, AsyncIterable, Callable, Dict, Optional, Type, TypeVar
+from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -31,7 +30,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -77,7 +76,7 @@ def list_by_factory(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.PrivateEndpointConnectionListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_endpoint_connection_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_endpoint_connection_operations.py
index 0708068b34b3..dab2d4e5c224 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_endpoint_connection_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_endpoint_connection_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload
+from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -34,7 +33,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -157,7 +156,7 @@ async def create_or_update(
:rtype: ~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -238,7 +237,7 @@ async def get(
:rtype: ~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -283,7 +282,7 @@ async def get(
return deserialized # type: ignore
@distributed_trace_async
- async def delete( # pylint: disable=inconsistent-return-statements
+ async def delete(
self, resource_group_name: str, factory_name: str, private_endpoint_connection_name: str, **kwargs: Any
) -> None:
"""Deletes a private endpoint connection.
@@ -298,7 +297,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_link_resources_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_link_resources_operations.py
index 0a374306c498..f8177c380808 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_link_resources_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_link_resources_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -7,7 +6,7 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import sys
-from typing import Any, Callable, Dict, Optional, Type, TypeVar
+from typing import Any, Callable, Dict, Optional, TypeVar
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -29,7 +28,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -67,7 +66,7 @@ async def get(
:rtype: ~azure.mgmt.datafactory.models.PrivateLinkResourcesWrapper
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_trigger_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_trigger_runs_operations.py
index 83963ccb5212..f8b4754db30b 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_trigger_runs_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_trigger_runs_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload
+from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -34,7 +33,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -59,7 +58,7 @@ def __init__(self, *args, **kwargs) -> None:
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace_async
- async def rerun( # pylint: disable=inconsistent-return-statements
+ async def rerun(
self, resource_group_name: str, factory_name: str, trigger_name: str, run_id: str, **kwargs: Any
) -> None:
"""Rerun single trigger instance by runId.
@@ -76,7 +75,7 @@ async def rerun( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -117,7 +116,7 @@ async def rerun( # pylint: disable=inconsistent-return-statements
return cls(pipeline_response, None, {}) # type: ignore
@distributed_trace_async
- async def cancel( # pylint: disable=inconsistent-return-statements
+ async def cancel(
self, resource_group_name: str, factory_name: str, trigger_name: str, run_id: str, **kwargs: Any
) -> None:
"""Cancel a single trigger instance by runId.
@@ -134,7 +133,7 @@ async def cancel( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -247,7 +246,7 @@ async def query_by_factory(
:rtype: ~azure.mgmt.datafactory.models.TriggerRunsQueryResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_triggers_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_triggers_operations.py
index a5f24c346224..5fd27fa5b7d0 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_triggers_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_triggers_operations.py
@@ -1,4 +1,4 @@
-# pylint: disable=too-many-lines,too-many-statements
+# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +8,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload
+from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -48,7 +48,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -92,7 +92,7 @@ def list_by_factory(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.TriggerListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -227,7 +227,7 @@ async def query_by_factory(
:rtype: ~azure.mgmt.datafactory.models.TriggerQueryResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -375,7 +375,7 @@ async def create_or_update(
:rtype: ~azure.mgmt.datafactory.models.TriggerResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -456,7 +456,7 @@ async def get(
:rtype: ~azure.mgmt.datafactory.models.TriggerResource or None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -503,9 +503,7 @@ async def get(
return deserialized # type: ignore
@distributed_trace_async
- async def delete( # pylint: disable=inconsistent-return-statements
- self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any
- ) -> None:
+ async def delete(self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any) -> None:
"""Deletes a trigger.
:param resource_group_name: The resource group name. Required.
@@ -518,7 +516,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -560,7 +558,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
async def _subscribe_to_events_initial(
self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any
) -> AsyncIterator[bytes]:
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -687,7 +685,7 @@ async def get_event_subscription_status(
:rtype: ~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -733,7 +731,7 @@ async def get_event_subscription_status(
async def _unsubscribe_from_events_initial(
self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any
) -> AsyncIterator[bytes]:
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -847,7 +845,7 @@ def get_long_running_output(pipeline_response):
async def _start_initial(
self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any
) -> AsyncIterator[bytes]:
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -955,7 +953,7 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
async def _stop_initial(
self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any
) -> AsyncIterator[bytes]:
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py
index 5616a16fc7fb..ee44f781e28c 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py
@@ -1,3 +1,4 @@
+# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -5,925 +6,939 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._models_py3 import AccessPolicyResponse
-from ._models_py3 import Activity
-from ._models_py3 import ActivityDependency
-from ._models_py3 import ActivityPolicy
-from ._models_py3 import ActivityRun
-from ._models_py3 import ActivityRunsQueryResponse
-from ._models_py3 import AddDataFlowToDebugSessionResponse
-from ._models_py3 import AdditionalColumns
-from ._models_py3 import AmazonMWSLinkedService
-from ._models_py3 import AmazonMWSObjectDataset
-from ._models_py3 import AmazonMWSSource
-from ._models_py3 import AmazonRdsForOracleLinkedService
-from ._models_py3 import AmazonRdsForOraclePartitionSettings
-from ._models_py3 import AmazonRdsForOracleSource
-from ._models_py3 import AmazonRdsForOracleTableDataset
-from ._models_py3 import AmazonRdsForSqlServerLinkedService
-from ._models_py3 import AmazonRdsForSqlServerLinkedServiceTypeProperties
-from ._models_py3 import AmazonRdsForSqlServerSource
-from ._models_py3 import AmazonRdsForSqlServerTableDataset
-from ._models_py3 import AmazonRedshiftLinkedService
-from ._models_py3 import AmazonRedshiftSource
-from ._models_py3 import AmazonRedshiftTableDataset
-from ._models_py3 import AmazonS3CompatibleLinkedService
-from ._models_py3 import AmazonS3CompatibleLocation
-from ._models_py3 import AmazonS3CompatibleReadSettings
-from ._models_py3 import AmazonS3Dataset
-from ._models_py3 import AmazonS3LinkedService
-from ._models_py3 import AmazonS3Location
-from ._models_py3 import AmazonS3ReadSettings
-from ._models_py3 import AppFiguresLinkedService
-from ._models_py3 import AppendVariableActivity
-from ._models_py3 import ArmIdWrapper
-from ._models_py3 import AsanaLinkedService
-from ._models_py3 import AvroDataset
-from ._models_py3 import AvroFormat
-from ._models_py3 import AvroSink
-from ._models_py3 import AvroSource
-from ._models_py3 import AvroWriteSettings
-from ._models_py3 import AzPowerShellSetup
-from ._models_py3 import AzureBatchLinkedService
-from ._models_py3 import AzureBlobDataset
-from ._models_py3 import AzureBlobFSDataset
-from ._models_py3 import AzureBlobFSLinkedService
-from ._models_py3 import AzureBlobFSLocation
-from ._models_py3 import AzureBlobFSReadSettings
-from ._models_py3 import AzureBlobFSSink
-from ._models_py3 import AzureBlobFSSource
-from ._models_py3 import AzureBlobFSWriteSettings
-from ._models_py3 import AzureBlobStorageLinkedService
-from ._models_py3 import AzureBlobStorageLocation
-from ._models_py3 import AzureBlobStorageReadSettings
-from ._models_py3 import AzureBlobStorageWriteSettings
-from ._models_py3 import AzureDataExplorerCommandActivity
-from ._models_py3 import AzureDataExplorerLinkedService
-from ._models_py3 import AzureDataExplorerSink
-from ._models_py3 import AzureDataExplorerSource
-from ._models_py3 import AzureDataExplorerTableDataset
-from ._models_py3 import AzureDataLakeAnalyticsLinkedService
-from ._models_py3 import AzureDataLakeStoreDataset
-from ._models_py3 import AzureDataLakeStoreLinkedService
-from ._models_py3 import AzureDataLakeStoreLocation
-from ._models_py3 import AzureDataLakeStoreReadSettings
-from ._models_py3 import AzureDataLakeStoreSink
-from ._models_py3 import AzureDataLakeStoreSource
-from ._models_py3 import AzureDataLakeStoreWriteSettings
-from ._models_py3 import AzureDatabricksDeltaLakeDataset
-from ._models_py3 import AzureDatabricksDeltaLakeExportCommand
-from ._models_py3 import AzureDatabricksDeltaLakeImportCommand
-from ._models_py3 import AzureDatabricksDeltaLakeLinkedService
-from ._models_py3 import AzureDatabricksDeltaLakeSink
-from ._models_py3 import AzureDatabricksDeltaLakeSource
-from ._models_py3 import AzureDatabricksLinkedService
-from ._models_py3 import AzureFileStorageLinkedService
-from ._models_py3 import AzureFileStorageLocation
-from ._models_py3 import AzureFileStorageReadSettings
-from ._models_py3 import AzureFileStorageWriteSettings
-from ._models_py3 import AzureFunctionActivity
-from ._models_py3 import AzureFunctionLinkedService
-from ._models_py3 import AzureKeyVaultLinkedService
-from ._models_py3 import AzureKeyVaultSecretReference
-from ._models_py3 import AzureMLBatchExecutionActivity
-from ._models_py3 import AzureMLExecutePipelineActivity
-from ._models_py3 import AzureMLLinkedService
-from ._models_py3 import AzureMLServiceLinkedService
-from ._models_py3 import AzureMLUpdateResourceActivity
-from ._models_py3 import AzureMLWebServiceFile
-from ._models_py3 import AzureMariaDBLinkedService
-from ._models_py3 import AzureMariaDBSource
-from ._models_py3 import AzureMariaDBTableDataset
-from ._models_py3 import AzureMySqlLinkedService
-from ._models_py3 import AzureMySqlSink
-from ._models_py3 import AzureMySqlSource
-from ._models_py3 import AzureMySqlTableDataset
-from ._models_py3 import AzurePostgreSqlLinkedService
-from ._models_py3 import AzurePostgreSqlSink
-from ._models_py3 import AzurePostgreSqlSource
-from ._models_py3 import AzurePostgreSqlTableDataset
-from ._models_py3 import AzureQueueSink
-from ._models_py3 import AzureSearchIndexDataset
-from ._models_py3 import AzureSearchIndexSink
-from ._models_py3 import AzureSearchLinkedService
-from ._models_py3 import AzureSqlDWLinkedService
-from ._models_py3 import AzureSqlDWLinkedServiceTypeProperties
-from ._models_py3 import AzureSqlDWTableDataset
-from ._models_py3 import AzureSqlDatabaseLinkedService
-from ._models_py3 import AzureSqlDatabaseLinkedServiceTypeProperties
-from ._models_py3 import AzureSqlMILinkedService
-from ._models_py3 import AzureSqlMILinkedServiceTypeProperties
-from ._models_py3 import AzureSqlMITableDataset
-from ._models_py3 import AzureSqlSink
-from ._models_py3 import AzureSqlSource
-from ._models_py3 import AzureSqlTableDataset
-from ._models_py3 import AzureStorageLinkedService
-from ._models_py3 import AzureStorageLinkedServiceTypeProperties
-from ._models_py3 import AzureSynapseArtifactsLinkedService
-from ._models_py3 import AzureTableDataset
-from ._models_py3 import AzureTableSink
-from ._models_py3 import AzureTableSource
-from ._models_py3 import AzureTableStorageLinkedService
-from ._models_py3 import AzureTableStorageLinkedServiceTypeProperties
-from ._models_py3 import BigDataPoolParametrizationReference
-from ._models_py3 import BinaryDataset
-from ._models_py3 import BinaryReadSettings
-from ._models_py3 import BinarySink
-from ._models_py3 import BinarySource
-from ._models_py3 import BlobEventsTrigger
-from ._models_py3 import BlobSink
-from ._models_py3 import BlobSource
-from ._models_py3 import BlobTrigger
-from ._models_py3 import CMKIdentityDefinition
-from ._models_py3 import CassandraLinkedService
-from ._models_py3 import CassandraSource
-from ._models_py3 import CassandraTableDataset
-from ._models_py3 import ChainingTrigger
-from ._models_py3 import ChangeDataCaptureFolder
-from ._models_py3 import ChangeDataCaptureListResponse
-from ._models_py3 import ChangeDataCaptureResource
-from ._models_py3 import CloudError
-from ._models_py3 import CmdkeySetup
-from ._models_py3 import CommonDataServiceForAppsEntityDataset
-from ._models_py3 import CommonDataServiceForAppsLinkedService
-from ._models_py3 import CommonDataServiceForAppsSink
-from ._models_py3 import CommonDataServiceForAppsSource
-from ._models_py3 import ComponentSetup
-from ._models_py3 import CompressionReadSettings
-from ._models_py3 import ConcurLinkedService
-from ._models_py3 import ConcurObjectDataset
-from ._models_py3 import ConcurSource
-from ._models_py3 import ConnectionStateProperties
-from ._models_py3 import ContinuationSettingsReference
-from ._models_py3 import ControlActivity
-from ._models_py3 import CopyActivity
-from ._models_py3 import CopyActivityLogSettings
-from ._models_py3 import CopyComputeScaleProperties
-from ._models_py3 import CopySink
-from ._models_py3 import CopySource
-from ._models_py3 import CopyTranslator
-from ._models_py3 import CosmosDbLinkedService
-from ._models_py3 import CosmosDbMongoDbApiCollectionDataset
-from ._models_py3 import CosmosDbMongoDbApiLinkedService
-from ._models_py3 import CosmosDbMongoDbApiSink
-from ._models_py3 import CosmosDbMongoDbApiSource
-from ._models_py3 import CosmosDbSqlApiCollectionDataset
-from ._models_py3 import CosmosDbSqlApiSink
-from ._models_py3 import CosmosDbSqlApiSource
-from ._models_py3 import CouchbaseLinkedService
-from ._models_py3 import CouchbaseSource
-from ._models_py3 import CouchbaseTableDataset
-from ._models_py3 import CreateDataFlowDebugSessionRequest
-from ._models_py3 import CreateDataFlowDebugSessionResponse
-from ._models_py3 import CreateLinkedIntegrationRuntimeRequest
-from ._models_py3 import CreateRunResponse
-from ._models_py3 import Credential
-from ._models_py3 import CredentialListResponse
-from ._models_py3 import CredentialReference
-from ._models_py3 import CredentialResource
-from ._models_py3 import CustomActivity
-from ._models_py3 import CustomActivityReferenceObject
-from ._models_py3 import CustomDataSourceLinkedService
-from ._models_py3 import CustomDataset
-from ._models_py3 import CustomEventsTrigger
-from ._models_py3 import CustomSetupBase
-from ._models_py3 import DWCopyCommandDefaultValue
-from ._models_py3 import DWCopyCommandSettings
-from ._models_py3 import DataFlow
-from ._models_py3 import DataFlowDebugCommandPayload
-from ._models_py3 import DataFlowDebugCommandRequest
-from ._models_py3 import DataFlowDebugCommandResponse
-from ._models_py3 import DataFlowDebugPackage
-from ._models_py3 import DataFlowDebugPackageDebugSettings
-from ._models_py3 import DataFlowDebugResource
-from ._models_py3 import DataFlowDebugSessionInfo
-from ._models_py3 import DataFlowFolder
-from ._models_py3 import DataFlowListResponse
-from ._models_py3 import DataFlowReference
-from ._models_py3 import DataFlowResource
-from ._models_py3 import DataFlowSink
-from ._models_py3 import DataFlowSource
-from ._models_py3 import DataFlowSourceSetting
-from ._models_py3 import DataFlowStagingInfo
-from ._models_py3 import DataLakeAnalyticsUSQLActivity
-from ._models_py3 import DataMapperMapping
-from ._models_py3 import DatabricksNotebookActivity
-from ._models_py3 import DatabricksSparkJarActivity
-from ._models_py3 import DatabricksSparkPythonActivity
-from ._models_py3 import Dataset
-from ._models_py3 import DatasetCompression
-from ._models_py3 import DatasetDataElement
-from ._models_py3 import DatasetDebugResource
-from ._models_py3 import DatasetFolder
-from ._models_py3 import DatasetListResponse
-from ._models_py3 import DatasetLocation
-from ._models_py3 import DatasetReference
-from ._models_py3 import DatasetResource
-from ._models_py3 import DatasetSchemaDataElement
-from ._models_py3 import DatasetStorageFormat
-from ._models_py3 import DataworldLinkedService
-from ._models_py3 import Db2LinkedService
-from ._models_py3 import Db2Source
-from ._models_py3 import Db2TableDataset
-from ._models_py3 import DeleteActivity
-from ._models_py3 import DeleteDataFlowDebugSessionRequest
-from ._models_py3 import DelimitedTextDataset
-from ._models_py3 import DelimitedTextReadSettings
-from ._models_py3 import DelimitedTextSink
-from ._models_py3 import DelimitedTextSource
-from ._models_py3 import DelimitedTextWriteSettings
-from ._models_py3 import DependencyReference
-from ._models_py3 import DistcpSettings
-from ._models_py3 import DocumentDbCollectionDataset
-from ._models_py3 import DocumentDbCollectionSink
-from ._models_py3 import DocumentDbCollectionSource
-from ._models_py3 import DrillLinkedService
-from ._models_py3 import DrillSource
-from ._models_py3 import DrillTableDataset
-from ._models_py3 import DynamicsAXLinkedService
-from ._models_py3 import DynamicsAXResourceDataset
-from ._models_py3 import DynamicsAXSource
-from ._models_py3 import DynamicsCrmEntityDataset
-from ._models_py3 import DynamicsCrmLinkedService
-from ._models_py3 import DynamicsCrmSink
-from ._models_py3 import DynamicsCrmSource
-from ._models_py3 import DynamicsEntityDataset
-from ._models_py3 import DynamicsLinkedService
-from ._models_py3 import DynamicsSink
-from ._models_py3 import DynamicsSource
-from ._models_py3 import EloquaLinkedService
-from ._models_py3 import EloquaObjectDataset
-from ._models_py3 import EloquaSource
-from ._models_py3 import EncryptionConfiguration
-from ._models_py3 import EntityReference
-from ._models_py3 import EnvironmentVariableSetup
-from ._models_py3 import ExcelDataset
-from ._models_py3 import ExcelSource
-from ._models_py3 import ExecuteDataFlowActivity
-from ._models_py3 import ExecuteDataFlowActivityTypeProperties
-from ._models_py3 import ExecuteDataFlowActivityTypePropertiesCompute
-from ._models_py3 import ExecutePipelineActivity
-from ._models_py3 import ExecutePipelineActivityPolicy
-from ._models_py3 import ExecutePowerQueryActivityTypeProperties
-from ._models_py3 import ExecuteSSISPackageActivity
-from ._models_py3 import ExecuteWranglingDataflowActivity
-from ._models_py3 import ExecutionActivity
-from ._models_py3 import ExportSettings
-from ._models_py3 import ExposureControlBatchRequest
-from ._models_py3 import ExposureControlBatchResponse
-from ._models_py3 import ExposureControlRequest
-from ._models_py3 import ExposureControlResponse
-from ._models_py3 import Expression
-from ._models_py3 import ExpressionV2
-from ._models_py3 import Factory
-from ._models_py3 import FactoryGitHubConfiguration
-from ._models_py3 import FactoryIdentity
-from ._models_py3 import FactoryListResponse
-from ._models_py3 import FactoryRepoConfiguration
-from ._models_py3 import FactoryRepoUpdate
-from ._models_py3 import FactoryUpdateParameters
-from ._models_py3 import FactoryVSTSConfiguration
-from ._models_py3 import FailActivity
-from ._models_py3 import FileServerLinkedService
-from ._models_py3 import FileServerLocation
-from ._models_py3 import FileServerReadSettings
-from ._models_py3 import FileServerWriteSettings
-from ._models_py3 import FileShareDataset
-from ._models_py3 import FileSystemSink
-from ._models_py3 import FileSystemSource
-from ._models_py3 import FilterActivity
-from ._models_py3 import Flowlet
-from ._models_py3 import ForEachActivity
-from ._models_py3 import FormatReadSettings
-from ._models_py3 import FormatWriteSettings
-from ._models_py3 import FtpReadSettings
-from ._models_py3 import FtpServerLinkedService
-from ._models_py3 import FtpServerLocation
-from ._models_py3 import GetDataFactoryOperationStatusResponse
-from ._models_py3 import GetMetadataActivity
-from ._models_py3 import GetSsisObjectMetadataRequest
-from ._models_py3 import GitHubAccessTokenRequest
-from ._models_py3 import GitHubAccessTokenResponse
-from ._models_py3 import GitHubClientSecret
-from ._models_py3 import GlobalParameterListResponse
-from ._models_py3 import GlobalParameterResource
-from ._models_py3 import GlobalParameterSpecification
-from ._models_py3 import GoogleAdWordsLinkedService
-from ._models_py3 import GoogleAdWordsObjectDataset
-from ._models_py3 import GoogleAdWordsSource
-from ._models_py3 import GoogleBigQueryLinkedService
-from ._models_py3 import GoogleBigQueryObjectDataset
-from ._models_py3 import GoogleBigQuerySource
-from ._models_py3 import GoogleBigQueryV2LinkedService
-from ._models_py3 import GoogleBigQueryV2ObjectDataset
-from ._models_py3 import GoogleBigQueryV2Source
-from ._models_py3 import GoogleCloudStorageLinkedService
-from ._models_py3 import GoogleCloudStorageLocation
-from ._models_py3 import GoogleCloudStorageReadSettings
-from ._models_py3 import GoogleSheetsLinkedService
-from ._models_py3 import GreenplumLinkedService
-from ._models_py3 import GreenplumSource
-from ._models_py3 import GreenplumTableDataset
-from ._models_py3 import HBaseLinkedService
-from ._models_py3 import HBaseObjectDataset
-from ._models_py3 import HBaseSource
-from ._models_py3 import HDInsightHiveActivity
-from ._models_py3 import HDInsightLinkedService
-from ._models_py3 import HDInsightMapReduceActivity
-from ._models_py3 import HDInsightOnDemandLinkedService
-from ._models_py3 import HDInsightPigActivity
-from ._models_py3 import HDInsightSparkActivity
-from ._models_py3 import HDInsightStreamingActivity
-from ._models_py3 import HdfsLinkedService
-from ._models_py3 import HdfsLocation
-from ._models_py3 import HdfsReadSettings
-from ._models_py3 import HdfsSource
-from ._models_py3 import HiveLinkedService
-from ._models_py3 import HiveObjectDataset
-from ._models_py3 import HiveSource
-from ._models_py3 import HttpDataset
-from ._models_py3 import HttpLinkedService
-from ._models_py3 import HttpReadSettings
-from ._models_py3 import HttpServerLocation
-from ._models_py3 import HttpSource
-from ._models_py3 import HubspotLinkedService
-from ._models_py3 import HubspotObjectDataset
-from ._models_py3 import HubspotSource
-from ._models_py3 import IfConditionActivity
-from ._models_py3 import ImpalaLinkedService
-from ._models_py3 import ImpalaObjectDataset
-from ._models_py3 import ImpalaSource
-from ._models_py3 import ImportSettings
-from ._models_py3 import InformixLinkedService
-from ._models_py3 import InformixSink
-from ._models_py3 import InformixSource
-from ._models_py3 import InformixTableDataset
-from ._models_py3 import IntegrationRuntime
-from ._models_py3 import IntegrationRuntimeAuthKeys
-from ._models_py3 import IntegrationRuntimeComputeProperties
-from ._models_py3 import IntegrationRuntimeConnectionInfo
-from ._models_py3 import IntegrationRuntimeCustomSetupScriptProperties
-from ._models_py3 import IntegrationRuntimeCustomerVirtualNetwork
-from ._models_py3 import IntegrationRuntimeDataFlowProperties
-from ._models_py3 import IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem
-from ._models_py3 import IntegrationRuntimeDataProxyProperties
-from ._models_py3 import IntegrationRuntimeDebugResource
-from ._models_py3 import IntegrationRuntimeListResponse
-from ._models_py3 import IntegrationRuntimeMonitoringData
-from ._models_py3 import IntegrationRuntimeNodeIpAddress
-from ._models_py3 import IntegrationRuntimeNodeMonitoringData
-from ._models_py3 import IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint
-from ._models_py3 import IntegrationRuntimeOutboundNetworkDependenciesEndpoint
-from ._models_py3 import IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails
-from ._models_py3 import IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse
-from ._models_py3 import IntegrationRuntimeReference
-from ._models_py3 import IntegrationRuntimeRegenerateKeyParameters
-from ._models_py3 import IntegrationRuntimeResource
-from ._models_py3 import IntegrationRuntimeSsisCatalogInfo
-from ._models_py3 import IntegrationRuntimeSsisProperties
-from ._models_py3 import IntegrationRuntimeStatus
-from ._models_py3 import IntegrationRuntimeStatusListResponse
-from ._models_py3 import IntegrationRuntimeStatusResponse
-from ._models_py3 import IntegrationRuntimeVNetProperties
-from ._models_py3 import JiraLinkedService
-from ._models_py3 import JiraObjectDataset
-from ._models_py3 import JiraSource
-from ._models_py3 import JsonDataset
-from ._models_py3 import JsonFormat
-from ._models_py3 import JsonReadSettings
-from ._models_py3 import JsonSink
-from ._models_py3 import JsonSource
-from ._models_py3 import JsonWriteSettings
-from ._models_py3 import LakeHouseLinkedService
-from ._models_py3 import LakeHouseLocation
-from ._models_py3 import LakeHouseReadSettings
-from ._models_py3 import LakeHouseTableDataset
-from ._models_py3 import LakeHouseTableSink
-from ._models_py3 import LakeHouseTableSource
-from ._models_py3 import LakeHouseWriteSettings
-from ._models_py3 import LinkedIntegrationRuntime
-from ._models_py3 import LinkedIntegrationRuntimeKeyAuthorization
-from ._models_py3 import LinkedIntegrationRuntimeRbacAuthorization
-from ._models_py3 import LinkedIntegrationRuntimeRequest
-from ._models_py3 import LinkedIntegrationRuntimeType
-from ._models_py3 import LinkedService
-from ._models_py3 import LinkedServiceDebugResource
-from ._models_py3 import LinkedServiceListResponse
-from ._models_py3 import LinkedServiceReference
-from ._models_py3 import LinkedServiceResource
-from ._models_py3 import LogLocationSettings
-from ._models_py3 import LogSettings
-from ._models_py3 import LogStorageSettings
-from ._models_py3 import LookupActivity
-from ._models_py3 import MagentoLinkedService
-from ._models_py3 import MagentoObjectDataset
-from ._models_py3 import MagentoSource
-from ._models_py3 import ManagedIdentityCredential
-from ._models_py3 import ManagedIntegrationRuntime
-from ._models_py3 import ManagedIntegrationRuntimeError
-from ._models_py3 import ManagedIntegrationRuntimeNode
-from ._models_py3 import ManagedIntegrationRuntimeOperationResult
-from ._models_py3 import ManagedIntegrationRuntimeStatus
-from ._models_py3 import ManagedPrivateEndpoint
-from ._models_py3 import ManagedPrivateEndpointListResponse
-from ._models_py3 import ManagedPrivateEndpointResource
-from ._models_py3 import ManagedVirtualNetwork
-from ._models_py3 import ManagedVirtualNetworkListResponse
-from ._models_py3 import ManagedVirtualNetworkReference
-from ._models_py3 import ManagedVirtualNetworkResource
-from ._models_py3 import MapperAttributeMapping
-from ._models_py3 import MapperAttributeMappings
-from ._models_py3 import MapperAttributeReference
-from ._models_py3 import MapperConnection
-from ._models_py3 import MapperConnectionReference
-from ._models_py3 import MapperDslConnectorProperties
-from ._models_py3 import MapperPolicy
-from ._models_py3 import MapperPolicyRecurrence
-from ._models_py3 import MapperSourceConnectionsInfo
-from ._models_py3 import MapperTable
-from ._models_py3 import MapperTableSchema
-from ._models_py3 import MapperTargetConnectionsInfo
-from ._models_py3 import MappingDataFlow
-from ._models_py3 import MariaDBLinkedService
-from ._models_py3 import MariaDBSource
-from ._models_py3 import MariaDBTableDataset
-from ._models_py3 import MarketoLinkedService
-from ._models_py3 import MarketoObjectDataset
-from ._models_py3 import MarketoSource
-from ._models_py3 import MetadataItem
-from ._models_py3 import MicrosoftAccessLinkedService
-from ._models_py3 import MicrosoftAccessSink
-from ._models_py3 import MicrosoftAccessSource
-from ._models_py3 import MicrosoftAccessTableDataset
-from ._models_py3 import MongoDbAtlasCollectionDataset
-from ._models_py3 import MongoDbAtlasLinkedService
-from ._models_py3 import MongoDbAtlasSink
-from ._models_py3 import MongoDbAtlasSource
-from ._models_py3 import MongoDbCollectionDataset
-from ._models_py3 import MongoDbCursorMethodsProperties
-from ._models_py3 import MongoDbLinkedService
-from ._models_py3 import MongoDbSource
-from ._models_py3 import MongoDbV2CollectionDataset
-from ._models_py3 import MongoDbV2LinkedService
-from ._models_py3 import MongoDbV2Sink
-from ._models_py3 import MongoDbV2Source
-from ._models_py3 import MultiplePipelineTrigger
-from ._models_py3 import MySqlLinkedService
-from ._models_py3 import MySqlSource
-from ._models_py3 import MySqlTableDataset
-from ._models_py3 import NetezzaLinkedService
-from ._models_py3 import NetezzaPartitionSettings
-from ._models_py3 import NetezzaSource
-from ._models_py3 import NetezzaTableDataset
-from ._models_py3 import NotebookParameter
-from ._models_py3 import ODataLinkedService
-from ._models_py3 import ODataResourceDataset
-from ._models_py3 import ODataSource
-from ._models_py3 import OdbcLinkedService
-from ._models_py3 import OdbcSink
-from ._models_py3 import OdbcSource
-from ._models_py3 import OdbcTableDataset
-from ._models_py3 import Office365Dataset
-from ._models_py3 import Office365LinkedService
-from ._models_py3 import Office365Source
-from ._models_py3 import Operation
-from ._models_py3 import OperationDisplay
-from ._models_py3 import OperationListResponse
-from ._models_py3 import OperationLogSpecification
-from ._models_py3 import OperationMetricAvailability
-from ._models_py3 import OperationMetricDimension
-from ._models_py3 import OperationMetricSpecification
-from ._models_py3 import OperationServiceSpecification
-from ._models_py3 import OracleCloudStorageLinkedService
-from ._models_py3 import OracleCloudStorageLocation
-from ._models_py3 import OracleCloudStorageReadSettings
-from ._models_py3 import OracleLinkedService
-from ._models_py3 import OraclePartitionSettings
-from ._models_py3 import OracleServiceCloudLinkedService
-from ._models_py3 import OracleServiceCloudObjectDataset
-from ._models_py3 import OracleServiceCloudSource
-from ._models_py3 import OracleSink
-from ._models_py3 import OracleSource
-from ._models_py3 import OracleTableDataset
-from ._models_py3 import OrcDataset
-from ._models_py3 import OrcFormat
-from ._models_py3 import OrcSink
-from ._models_py3 import OrcSource
-from ._models_py3 import OrcWriteSettings
-from ._models_py3 import OutputColumn
-from ._models_py3 import PackageStore
-from ._models_py3 import ParameterSpecification
-from ._models_py3 import ParquetDataset
-from ._models_py3 import ParquetFormat
-from ._models_py3 import ParquetReadSettings
-from ._models_py3 import ParquetSink
-from ._models_py3 import ParquetSource
-from ._models_py3 import ParquetWriteSettings
-from ._models_py3 import PaypalLinkedService
-from ._models_py3 import PaypalObjectDataset
-from ._models_py3 import PaypalSource
-from ._models_py3 import PhoenixLinkedService
-from ._models_py3 import PhoenixObjectDataset
-from ._models_py3 import PhoenixSource
-from ._models_py3 import PipelineElapsedTimeMetricPolicy
-from ._models_py3 import PipelineExternalComputeScaleProperties
-from ._models_py3 import PipelineFolder
-from ._models_py3 import PipelineListResponse
-from ._models_py3 import PipelinePolicy
-from ._models_py3 import PipelineReference
-from ._models_py3 import PipelineResource
-from ._models_py3 import PipelineRun
-from ._models_py3 import PipelineRunInvokedBy
-from ._models_py3 import PipelineRunsQueryResponse
-from ._models_py3 import PolybaseSettings
-from ._models_py3 import PostgreSqlLinkedService
-from ._models_py3 import PostgreSqlSource
-from ._models_py3 import PostgreSqlTableDataset
-from ._models_py3 import PostgreSqlV2LinkedService
-from ._models_py3 import PostgreSqlV2Source
-from ._models_py3 import PostgreSqlV2TableDataset
-from ._models_py3 import PowerQuerySink
-from ._models_py3 import PowerQuerySinkMapping
-from ._models_py3 import PowerQuerySource
-from ._models_py3 import PrestoLinkedService
-from ._models_py3 import PrestoObjectDataset
-from ._models_py3 import PrestoSource
-from ._models_py3 import PrivateEndpoint
-from ._models_py3 import PrivateEndpointConnectionListResponse
-from ._models_py3 import PrivateEndpointConnectionResource
-from ._models_py3 import PrivateLinkConnectionApprovalRequest
-from ._models_py3 import PrivateLinkConnectionApprovalRequestResource
-from ._models_py3 import PrivateLinkConnectionState
-from ._models_py3 import PrivateLinkResource
-from ._models_py3 import PrivateLinkResourceProperties
-from ._models_py3 import PrivateLinkResourcesWrapper
-from ._models_py3 import PurviewConfiguration
-from ._models_py3 import QueryDataFlowDebugSessionsResponse
-from ._models_py3 import QuickBooksLinkedService
-from ._models_py3 import QuickBooksObjectDataset
-from ._models_py3 import QuickBooksSource
-from ._models_py3 import QuickbaseLinkedService
-from ._models_py3 import RecurrenceSchedule
-from ._models_py3 import RecurrenceScheduleOccurrence
-from ._models_py3 import RedirectIncompatibleRowSettings
-from ._models_py3 import RedshiftUnloadSettings
-from ._models_py3 import RelationalSource
-from ._models_py3 import RelationalTableDataset
-from ._models_py3 import RemotePrivateEndpointConnection
-from ._models_py3 import RerunTumblingWindowTrigger
-from ._models_py3 import Resource
-from ._models_py3 import ResponsysLinkedService
-from ._models_py3 import ResponsysObjectDataset
-from ._models_py3 import ResponsysSource
-from ._models_py3 import RestResourceDataset
-from ._models_py3 import RestServiceLinkedService
-from ._models_py3 import RestSink
-from ._models_py3 import RestSource
-from ._models_py3 import RetryPolicy
-from ._models_py3 import RunFilterParameters
-from ._models_py3 import RunQueryFilter
-from ._models_py3 import RunQueryOrderBy
-from ._models_py3 import SSISAccessCredential
-from ._models_py3 import SSISChildPackage
-from ._models_py3 import SSISExecutionCredential
-from ._models_py3 import SSISExecutionParameter
-from ._models_py3 import SSISLogLocation
-from ._models_py3 import SSISPackageLocation
-from ._models_py3 import SSISPropertyOverride
-from ._models_py3 import SalesforceLinkedService
-from ._models_py3 import SalesforceMarketingCloudLinkedService
-from ._models_py3 import SalesforceMarketingCloudObjectDataset
-from ._models_py3 import SalesforceMarketingCloudSource
-from ._models_py3 import SalesforceObjectDataset
-from ._models_py3 import SalesforceServiceCloudLinkedService
-from ._models_py3 import SalesforceServiceCloudObjectDataset
-from ._models_py3 import SalesforceServiceCloudSink
-from ._models_py3 import SalesforceServiceCloudSource
-from ._models_py3 import SalesforceServiceCloudV2LinkedService
-from ._models_py3 import SalesforceServiceCloudV2ObjectDataset
-from ._models_py3 import SalesforceServiceCloudV2Sink
-from ._models_py3 import SalesforceServiceCloudV2Source
-from ._models_py3 import SalesforceSink
-from ._models_py3 import SalesforceSource
-from ._models_py3 import SalesforceV2LinkedService
-from ._models_py3 import SalesforceV2ObjectDataset
-from ._models_py3 import SalesforceV2Sink
-from ._models_py3 import SalesforceV2Source
-from ._models_py3 import SapBWLinkedService
-from ._models_py3 import SapBwCubeDataset
-from ._models_py3 import SapBwSource
-from ._models_py3 import SapCloudForCustomerLinkedService
-from ._models_py3 import SapCloudForCustomerResourceDataset
-from ._models_py3 import SapCloudForCustomerSink
-from ._models_py3 import SapCloudForCustomerSource
-from ._models_py3 import SapEccLinkedService
-from ._models_py3 import SapEccResourceDataset
-from ._models_py3 import SapEccSource
-from ._models_py3 import SapHanaLinkedService
-from ._models_py3 import SapHanaPartitionSettings
-from ._models_py3 import SapHanaSource
-from ._models_py3 import SapHanaTableDataset
-from ._models_py3 import SapOdpLinkedService
-from ._models_py3 import SapOdpResourceDataset
-from ._models_py3 import SapOdpSource
-from ._models_py3 import SapOpenHubLinkedService
-from ._models_py3 import SapOpenHubSource
-from ._models_py3 import SapOpenHubTableDataset
-from ._models_py3 import SapTableLinkedService
-from ._models_py3 import SapTablePartitionSettings
-from ._models_py3 import SapTableResourceDataset
-from ._models_py3 import SapTableSource
-from ._models_py3 import ScheduleTrigger
-from ._models_py3 import ScheduleTriggerRecurrence
-from ._models_py3 import ScriptAction
-from ._models_py3 import ScriptActivity
-from ._models_py3 import ScriptActivityParameter
-from ._models_py3 import ScriptActivityScriptBlock
-from ._models_py3 import ScriptActivityTypePropertiesLogSettings
-from ._models_py3 import SecretBase
-from ._models_py3 import SecureInputOutputPolicy
-from ._models_py3 import SecureString
-from ._models_py3 import SelfDependencyTumblingWindowTriggerReference
-from ._models_py3 import SelfHostedIntegrationRuntime
-from ._models_py3 import SelfHostedIntegrationRuntimeNode
-from ._models_py3 import SelfHostedIntegrationRuntimeStatus
-from ._models_py3 import ServiceNowLinkedService
-from ._models_py3 import ServiceNowObjectDataset
-from ._models_py3 import ServiceNowSource
-from ._models_py3 import ServiceNowV2LinkedService
-from ._models_py3 import ServiceNowV2ObjectDataset
-from ._models_py3 import ServiceNowV2Source
-from ._models_py3 import ServicePrincipalCredential
-from ._models_py3 import SetVariableActivity
-from ._models_py3 import SftpLocation
-from ._models_py3 import SftpReadSettings
-from ._models_py3 import SftpServerLinkedService
-from ._models_py3 import SftpWriteSettings
-from ._models_py3 import SharePointOnlineListLinkedService
-from ._models_py3 import SharePointOnlineListResourceDataset
-from ._models_py3 import SharePointOnlineListSource
-from ._models_py3 import ShopifyLinkedService
-from ._models_py3 import ShopifyObjectDataset
-from ._models_py3 import ShopifySource
-from ._models_py3 import SkipErrorFile
-from ._models_py3 import SmartsheetLinkedService
-from ._models_py3 import SnowflakeDataset
-from ._models_py3 import SnowflakeExportCopyCommand
-from ._models_py3 import SnowflakeImportCopyCommand
-from ._models_py3 import SnowflakeLinkedService
-from ._models_py3 import SnowflakeSink
-from ._models_py3 import SnowflakeSource
-from ._models_py3 import SnowflakeV2Dataset
-from ._models_py3 import SnowflakeV2LinkedService
-from ._models_py3 import SnowflakeV2Sink
-from ._models_py3 import SnowflakeV2Source
-from ._models_py3 import SparkConfigurationParametrizationReference
-from ._models_py3 import SparkLinkedService
-from ._models_py3 import SparkObjectDataset
-from ._models_py3 import SparkSource
-from ._models_py3 import SqlAlwaysEncryptedProperties
-from ._models_py3 import SqlDWSink
-from ._models_py3 import SqlDWSource
-from ._models_py3 import SqlDWUpsertSettings
-from ._models_py3 import SqlMISink
-from ._models_py3 import SqlMISource
-from ._models_py3 import SqlPartitionSettings
-from ._models_py3 import SqlServerBaseLinkedServiceTypeProperties
-from ._models_py3 import SqlServerLinkedService
-from ._models_py3 import SqlServerLinkedServiceTypeProperties
-from ._models_py3 import SqlServerSink
-from ._models_py3 import SqlServerSource
-from ._models_py3 import SqlServerStoredProcedureActivity
-from ._models_py3 import SqlServerTableDataset
-from ._models_py3 import SqlSink
-from ._models_py3 import SqlSource
-from ._models_py3 import SqlUpsertSettings
-from ._models_py3 import SquareLinkedService
-from ._models_py3 import SquareObjectDataset
-from ._models_py3 import SquareSource
-from ._models_py3 import SsisEnvironment
-from ._models_py3 import SsisEnvironmentReference
-from ._models_py3 import SsisFolder
-from ._models_py3 import SsisObjectMetadata
-from ._models_py3 import SsisObjectMetadataListResponse
-from ._models_py3 import SsisObjectMetadataStatusResponse
-from ._models_py3 import SsisPackage
-from ._models_py3 import SsisParameter
-from ._models_py3 import SsisProject
-from ._models_py3 import SsisVariable
-from ._models_py3 import StagingSettings
-from ._models_py3 import StoreReadSettings
-from ._models_py3 import StoreWriteSettings
-from ._models_py3 import StoredProcedureParameter
-from ._models_py3 import SubResource
-from ._models_py3 import SubResourceDebugResource
-from ._models_py3 import SwitchActivity
-from ._models_py3 import SwitchCase
-from ._models_py3 import SybaseLinkedService
-from ._models_py3 import SybaseSource
-from ._models_py3 import SybaseTableDataset
-from ._models_py3 import SynapseNotebookActivity
-from ._models_py3 import SynapseNotebookReference
-from ._models_py3 import SynapseSparkJobDefinitionActivity
-from ._models_py3 import SynapseSparkJobReference
-from ._models_py3 import TabularSource
-from ._models_py3 import TabularTranslator
-from ._models_py3 import TarGZipReadSettings
-from ._models_py3 import TarReadSettings
-from ._models_py3 import TeamDeskLinkedService
-from ._models_py3 import TeradataLinkedService
-from ._models_py3 import TeradataPartitionSettings
-from ._models_py3 import TeradataSource
-from ._models_py3 import TeradataTableDataset
-from ._models_py3 import TextFormat
-from ._models_py3 import Transformation
-from ._models_py3 import Trigger
-from ._models_py3 import TriggerDependencyReference
-from ._models_py3 import TriggerFilterParameters
-from ._models_py3 import TriggerListResponse
-from ._models_py3 import TriggerPipelineReference
-from ._models_py3 import TriggerQueryResponse
-from ._models_py3 import TriggerReference
-from ._models_py3 import TriggerResource
-from ._models_py3 import TriggerRun
-from ._models_py3 import TriggerRunsQueryResponse
-from ._models_py3 import TriggerSubscriptionOperationStatus
-from ._models_py3 import TumblingWindowTrigger
-from ._models_py3 import TumblingWindowTriggerDependencyReference
-from ._models_py3 import TwilioLinkedService
-from ._models_py3 import TypeConversionSettings
-from ._models_py3 import UntilActivity
-from ._models_py3 import UpdateIntegrationRuntimeNodeRequest
-from ._models_py3 import UpdateIntegrationRuntimeRequest
-from ._models_py3 import UserAccessPolicy
-from ._models_py3 import UserProperty
-from ._models_py3 import ValidationActivity
-from ._models_py3 import VariableSpecification
-from ._models_py3 import VerticaLinkedService
-from ._models_py3 import VerticaSource
-from ._models_py3 import VerticaTableDataset
-from ._models_py3 import WaitActivity
-from ._models_py3 import WarehouseLinkedService
-from ._models_py3 import WarehouseSink
-from ._models_py3 import WarehouseSource
-from ._models_py3 import WarehouseTableDataset
-from ._models_py3 import WebActivity
-from ._models_py3 import WebActivityAuthentication
-from ._models_py3 import WebAnonymousAuthentication
-from ._models_py3 import WebBasicAuthentication
-from ._models_py3 import WebClientCertificateAuthentication
-from ._models_py3 import WebHookActivity
-from ._models_py3 import WebLinkedService
-from ._models_py3 import WebLinkedServiceTypeProperties
-from ._models_py3 import WebSource
-from ._models_py3 import WebTableDataset
-from ._models_py3 import WranglingDataFlow
-from ._models_py3 import XeroLinkedService
-from ._models_py3 import XeroObjectDataset
-from ._models_py3 import XeroSource
-from ._models_py3 import XmlDataset
-from ._models_py3 import XmlReadSettings
-from ._models_py3 import XmlSource
-from ._models_py3 import ZendeskLinkedService
-from ._models_py3 import ZipDeflateReadSettings
-from ._models_py3 import ZohoLinkedService
-from ._models_py3 import ZohoObjectDataset
-from ._models_py3 import ZohoSource
+from typing import TYPE_CHECKING
-from ._data_factory_management_client_enums import ActivityOnInactiveMarkAs
-from ._data_factory_management_client_enums import ActivityState
-from ._data_factory_management_client_enums import AmazonRdsForOraclePartitionOption
-from ._data_factory_management_client_enums import AmazonRdsForSqlAuthenticationType
-from ._data_factory_management_client_enums import AvroCompressionCodec
-from ._data_factory_management_client_enums import AzureFunctionActivityMethod
-from ._data_factory_management_client_enums import AzureSearchIndexWriteBehaviorType
-from ._data_factory_management_client_enums import AzureSqlDWAuthenticationType
-from ._data_factory_management_client_enums import AzureSqlDatabaseAuthenticationType
-from ._data_factory_management_client_enums import AzureSqlMIAuthenticationType
-from ._data_factory_management_client_enums import AzureStorageAuthenticationType
-from ._data_factory_management_client_enums import BigDataPoolReferenceType
-from ._data_factory_management_client_enums import BlobEventTypes
-from ._data_factory_management_client_enums import CassandraSourceReadConsistencyLevels
-from ._data_factory_management_client_enums import CompressionCodec
-from ._data_factory_management_client_enums import ConfigurationType
-from ._data_factory_management_client_enums import ConnectionType
-from ._data_factory_management_client_enums import CopyBehaviorType
-from ._data_factory_management_client_enums import CosmosDbConnectionMode
-from ._data_factory_management_client_enums import CredentialReferenceType
-from ._data_factory_management_client_enums import DataFlowComputeType
-from ._data_factory_management_client_enums import DataFlowDebugCommandType
-from ._data_factory_management_client_enums import DataFlowReferenceType
-from ._data_factory_management_client_enums import DatasetCompressionLevel
-from ._data_factory_management_client_enums import DatasetReferenceType
-from ._data_factory_management_client_enums import DayOfWeek
-from ._data_factory_management_client_enums import DaysOfWeek
-from ._data_factory_management_client_enums import Db2AuthenticationType
-from ._data_factory_management_client_enums import DependencyCondition
-from ._data_factory_management_client_enums import DynamicsAuthenticationType
-from ._data_factory_management_client_enums import DynamicsDeploymentType
-from ._data_factory_management_client_enums import DynamicsSinkWriteBehavior
-from ._data_factory_management_client_enums import EventSubscriptionStatus
-from ._data_factory_management_client_enums import ExpressionType
-from ._data_factory_management_client_enums import ExpressionV2Type
-from ._data_factory_management_client_enums import FactoryIdentityType
-from ._data_factory_management_client_enums import FrequencyType
-from ._data_factory_management_client_enums import FtpAuthenticationType
-from ._data_factory_management_client_enums import GlobalParameterType
-from ._data_factory_management_client_enums import GoogleAdWordsAuthenticationType
-from ._data_factory_management_client_enums import GoogleBigQueryAuthenticationType
-from ._data_factory_management_client_enums import GoogleBigQueryV2AuthenticationType
-from ._data_factory_management_client_enums import HBaseAuthenticationType
-from ._data_factory_management_client_enums import HDInsightActivityDebugInfoOption
-from ._data_factory_management_client_enums import HdiNodeTypes
-from ._data_factory_management_client_enums import HiveAuthenticationType
-from ._data_factory_management_client_enums import HiveServerType
-from ._data_factory_management_client_enums import HiveThriftTransportProtocol
-from ._data_factory_management_client_enums import HttpAuthenticationType
-from ._data_factory_management_client_enums import ImpalaAuthenticationType
-from ._data_factory_management_client_enums import IntegrationRuntimeAuthKeyName
-from ._data_factory_management_client_enums import IntegrationRuntimeAutoUpdate
-from ._data_factory_management_client_enums import IntegrationRuntimeEdition
-from ._data_factory_management_client_enums import IntegrationRuntimeEntityReferenceType
-from ._data_factory_management_client_enums import IntegrationRuntimeInternalChannelEncryptionMode
-from ._data_factory_management_client_enums import IntegrationRuntimeLicenseType
-from ._data_factory_management_client_enums import IntegrationRuntimeReferenceType
-from ._data_factory_management_client_enums import IntegrationRuntimeSsisCatalogPricingTier
-from ._data_factory_management_client_enums import IntegrationRuntimeState
-from ._data_factory_management_client_enums import IntegrationRuntimeType
-from ._data_factory_management_client_enums import IntegrationRuntimeUpdateResult
-from ._data_factory_management_client_enums import JsonFormatFilePattern
-from ._data_factory_management_client_enums import JsonWriteFilePattern
-from ._data_factory_management_client_enums import ManagedIntegrationRuntimeNodeStatus
-from ._data_factory_management_client_enums import ManagedVirtualNetworkReferenceType
-from ._data_factory_management_client_enums import MappingType
-from ._data_factory_management_client_enums import MongoDbAuthenticationType
-from ._data_factory_management_client_enums import NetezzaPartitionOption
-from ._data_factory_management_client_enums import NotebookParameterType
-from ._data_factory_management_client_enums import NotebookReferenceType
-from ._data_factory_management_client_enums import ODataAadServicePrincipalCredentialType
-from ._data_factory_management_client_enums import ODataAuthenticationType
-from ._data_factory_management_client_enums import OraclePartitionOption
-from ._data_factory_management_client_enums import OrcCompressionCodec
-from ._data_factory_management_client_enums import ParameterType
-from ._data_factory_management_client_enums import PhoenixAuthenticationType
-from ._data_factory_management_client_enums import PipelineReferenceType
-from ._data_factory_management_client_enums import PolybaseSettingsRejectType
-from ._data_factory_management_client_enums import PrestoAuthenticationType
-from ._data_factory_management_client_enums import PublicNetworkAccess
-from ._data_factory_management_client_enums import RecurrenceFrequency
-from ._data_factory_management_client_enums import RestServiceAuthenticationType
-from ._data_factory_management_client_enums import RunQueryFilterOperand
-from ._data_factory_management_client_enums import RunQueryFilterOperator
-from ._data_factory_management_client_enums import RunQueryOrder
-from ._data_factory_management_client_enums import RunQueryOrderByField
-from ._data_factory_management_client_enums import SalesforceSinkWriteBehavior
-from ._data_factory_management_client_enums import SalesforceSourceReadBehavior
-from ._data_factory_management_client_enums import SalesforceV2SinkWriteBehavior
-from ._data_factory_management_client_enums import SapCloudForCustomerSinkWriteBehavior
-from ._data_factory_management_client_enums import SapHanaAuthenticationType
-from ._data_factory_management_client_enums import SapHanaPartitionOption
-from ._data_factory_management_client_enums import SapTablePartitionOption
-from ._data_factory_management_client_enums import ScriptActivityLogDestination
-from ._data_factory_management_client_enums import ScriptActivityParameterDirection
-from ._data_factory_management_client_enums import ScriptActivityParameterType
-from ._data_factory_management_client_enums import ScriptType
-from ._data_factory_management_client_enums import SelfHostedIntegrationRuntimeNodeStatus
-from ._data_factory_management_client_enums import ServiceNowAuthenticationType
-from ._data_factory_management_client_enums import ServiceNowV2AuthenticationType
-from ._data_factory_management_client_enums import ServicePrincipalCredentialType
-from ._data_factory_management_client_enums import SftpAuthenticationType
-from ._data_factory_management_client_enums import SnowflakeAuthenticationType
-from ._data_factory_management_client_enums import SparkAuthenticationType
-from ._data_factory_management_client_enums import SparkConfigurationReferenceType
-from ._data_factory_management_client_enums import SparkJobReferenceType
-from ._data_factory_management_client_enums import SparkServerType
-from ._data_factory_management_client_enums import SparkThriftTransportProtocol
-from ._data_factory_management_client_enums import SqlAlwaysEncryptedAkvAuthType
-from ._data_factory_management_client_enums import SqlDWWriteBehaviorEnum
-from ._data_factory_management_client_enums import SqlPartitionOption
-from ._data_factory_management_client_enums import SqlServerAuthenticationType
-from ._data_factory_management_client_enums import SqlWriteBehaviorEnum
-from ._data_factory_management_client_enums import SsisLogLocationType
-from ._data_factory_management_client_enums import SsisObjectMetadataType
-from ._data_factory_management_client_enums import SsisPackageLocationType
-from ._data_factory_management_client_enums import StoredProcedureParameterType
-from ._data_factory_management_client_enums import SybaseAuthenticationType
-from ._data_factory_management_client_enums import TeamDeskAuthenticationType
-from ._data_factory_management_client_enums import TeradataAuthenticationType
-from ._data_factory_management_client_enums import TeradataPartitionOption
-from ._data_factory_management_client_enums import TriggerReferenceType
-from ._data_factory_management_client_enums import TriggerRunStatus
-from ._data_factory_management_client_enums import TriggerRuntimeState
-from ._data_factory_management_client_enums import TumblingWindowFrequency
-from ._data_factory_management_client_enums import Type
-from ._data_factory_management_client_enums import VariableType
-from ._data_factory_management_client_enums import WebActivityMethod
-from ._data_factory_management_client_enums import WebAuthenticationType
-from ._data_factory_management_client_enums import WebHookActivityMethod
-from ._data_factory_management_client_enums import ZendeskAuthenticationType
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+
+from ._models_py3 import ( # type: ignore
+ AccessPolicyResponse,
+ Activity,
+ ActivityDependency,
+ ActivityPolicy,
+ ActivityRun,
+ ActivityRunsQueryResponse,
+ AddDataFlowToDebugSessionResponse,
+ AdditionalColumns,
+ AmazonMWSLinkedService,
+ AmazonMWSObjectDataset,
+ AmazonMWSSource,
+ AmazonRdsForOracleLinkedService,
+ AmazonRdsForOraclePartitionSettings,
+ AmazonRdsForOracleSource,
+ AmazonRdsForOracleTableDataset,
+ AmazonRdsForSqlServerLinkedService,
+ AmazonRdsForSqlServerLinkedServiceTypeProperties,
+ AmazonRdsForSqlServerSource,
+ AmazonRdsForSqlServerTableDataset,
+ AmazonRedshiftLinkedService,
+ AmazonRedshiftSource,
+ AmazonRedshiftTableDataset,
+ AmazonS3CompatibleLinkedService,
+ AmazonS3CompatibleLocation,
+ AmazonS3CompatibleReadSettings,
+ AmazonS3Dataset,
+ AmazonS3LinkedService,
+ AmazonS3Location,
+ AmazonS3ReadSettings,
+ AppFiguresLinkedService,
+ AppendVariableActivity,
+ ArmIdWrapper,
+ AsanaLinkedService,
+ AvroDataset,
+ AvroFormat,
+ AvroSink,
+ AvroSource,
+ AvroWriteSettings,
+ AzPowerShellSetup,
+ AzureBatchLinkedService,
+ AzureBlobDataset,
+ AzureBlobFSDataset,
+ AzureBlobFSLinkedService,
+ AzureBlobFSLocation,
+ AzureBlobFSReadSettings,
+ AzureBlobFSSink,
+ AzureBlobFSSource,
+ AzureBlobFSWriteSettings,
+ AzureBlobStorageLinkedService,
+ AzureBlobStorageLocation,
+ AzureBlobStorageReadSettings,
+ AzureBlobStorageWriteSettings,
+ AzureDataExplorerCommandActivity,
+ AzureDataExplorerLinkedService,
+ AzureDataExplorerSink,
+ AzureDataExplorerSource,
+ AzureDataExplorerTableDataset,
+ AzureDataLakeAnalyticsLinkedService,
+ AzureDataLakeStoreDataset,
+ AzureDataLakeStoreLinkedService,
+ AzureDataLakeStoreLocation,
+ AzureDataLakeStoreReadSettings,
+ AzureDataLakeStoreSink,
+ AzureDataLakeStoreSource,
+ AzureDataLakeStoreWriteSettings,
+ AzureDatabricksDeltaLakeDataset,
+ AzureDatabricksDeltaLakeExportCommand,
+ AzureDatabricksDeltaLakeImportCommand,
+ AzureDatabricksDeltaLakeLinkedService,
+ AzureDatabricksDeltaLakeSink,
+ AzureDatabricksDeltaLakeSource,
+ AzureDatabricksLinkedService,
+ AzureFileStorageLinkedService,
+ AzureFileStorageLocation,
+ AzureFileStorageReadSettings,
+ AzureFileStorageWriteSettings,
+ AzureFunctionActivity,
+ AzureFunctionLinkedService,
+ AzureKeyVaultLinkedService,
+ AzureKeyVaultSecretReference,
+ AzureMLBatchExecutionActivity,
+ AzureMLExecutePipelineActivity,
+ AzureMLLinkedService,
+ AzureMLServiceLinkedService,
+ AzureMLUpdateResourceActivity,
+ AzureMLWebServiceFile,
+ AzureMariaDBLinkedService,
+ AzureMariaDBSource,
+ AzureMariaDBTableDataset,
+ AzureMySqlLinkedService,
+ AzureMySqlSink,
+ AzureMySqlSource,
+ AzureMySqlTableDataset,
+ AzurePostgreSqlLinkedService,
+ AzurePostgreSqlSink,
+ AzurePostgreSqlSource,
+ AzurePostgreSqlTableDataset,
+ AzureQueueSink,
+ AzureSearchIndexDataset,
+ AzureSearchIndexSink,
+ AzureSearchLinkedService,
+ AzureSqlDWLinkedService,
+ AzureSqlDWLinkedServiceTypeProperties,
+ AzureSqlDWTableDataset,
+ AzureSqlDatabaseLinkedService,
+ AzureSqlDatabaseLinkedServiceTypeProperties,
+ AzureSqlMILinkedService,
+ AzureSqlMILinkedServiceTypeProperties,
+ AzureSqlMITableDataset,
+ AzureSqlSink,
+ AzureSqlSource,
+ AzureSqlTableDataset,
+ AzureStorageLinkedService,
+ AzureStorageLinkedServiceTypeProperties,
+ AzureSynapseArtifactsLinkedService,
+ AzureTableDataset,
+ AzureTableSink,
+ AzureTableSource,
+ AzureTableStorageLinkedService,
+ AzureTableStorageLinkedServiceTypeProperties,
+ BigDataPoolParametrizationReference,
+ BinaryDataset,
+ BinaryReadSettings,
+ BinarySink,
+ BinarySource,
+ BlobEventsTrigger,
+ BlobSink,
+ BlobSource,
+ BlobTrigger,
+ CMKIdentityDefinition,
+ CassandraLinkedService,
+ CassandraSource,
+ CassandraTableDataset,
+ ChainingTrigger,
+ ChangeDataCaptureFolder,
+ ChangeDataCaptureListResponse,
+ ChangeDataCaptureResource,
+ CloudError,
+ CmdkeySetup,
+ CommonDataServiceForAppsEntityDataset,
+ CommonDataServiceForAppsLinkedService,
+ CommonDataServiceForAppsSink,
+ CommonDataServiceForAppsSource,
+ ComponentSetup,
+ CompressionReadSettings,
+ ConcurLinkedService,
+ ConcurObjectDataset,
+ ConcurSource,
+ ConnectionStateProperties,
+ ContinuationSettingsReference,
+ ControlActivity,
+ CopyActivity,
+ CopyActivityLogSettings,
+ CopyComputeScaleProperties,
+ CopySink,
+ CopySource,
+ CopyTranslator,
+ CosmosDbLinkedService,
+ CosmosDbMongoDbApiCollectionDataset,
+ CosmosDbMongoDbApiLinkedService,
+ CosmosDbMongoDbApiSink,
+ CosmosDbMongoDbApiSource,
+ CosmosDbSqlApiCollectionDataset,
+ CosmosDbSqlApiSink,
+ CosmosDbSqlApiSource,
+ CouchbaseLinkedService,
+ CouchbaseSource,
+ CouchbaseTableDataset,
+ CreateDataFlowDebugSessionRequest,
+ CreateDataFlowDebugSessionResponse,
+ CreateLinkedIntegrationRuntimeRequest,
+ CreateRunResponse,
+ Credential,
+ CredentialListResponse,
+ CredentialReference,
+ CredentialResource,
+ CustomActivity,
+ CustomActivityReferenceObject,
+ CustomDataSourceLinkedService,
+ CustomDataset,
+ CustomEventsTrigger,
+ CustomSetupBase,
+ DWCopyCommandDefaultValue,
+ DWCopyCommandSettings,
+ DataFlow,
+ DataFlowDebugCommandPayload,
+ DataFlowDebugCommandRequest,
+ DataFlowDebugCommandResponse,
+ DataFlowDebugPackage,
+ DataFlowDebugPackageDebugSettings,
+ DataFlowDebugResource,
+ DataFlowDebugSessionInfo,
+ DataFlowFolder,
+ DataFlowListResponse,
+ DataFlowReference,
+ DataFlowResource,
+ DataFlowSink,
+ DataFlowSource,
+ DataFlowSourceSetting,
+ DataFlowStagingInfo,
+ DataLakeAnalyticsUSQLActivity,
+ DataMapperMapping,
+ DatabricksNotebookActivity,
+ DatabricksSparkJarActivity,
+ DatabricksSparkPythonActivity,
+ Dataset,
+ DatasetCompression,
+ DatasetDataElement,
+ DatasetDebugResource,
+ DatasetFolder,
+ DatasetListResponse,
+ DatasetLocation,
+ DatasetReference,
+ DatasetResource,
+ DatasetSchemaDataElement,
+ DatasetStorageFormat,
+ DataworldLinkedService,
+ Db2LinkedService,
+ Db2Source,
+ Db2TableDataset,
+ DeleteActivity,
+ DeleteDataFlowDebugSessionRequest,
+ DelimitedTextDataset,
+ DelimitedTextReadSettings,
+ DelimitedTextSink,
+ DelimitedTextSource,
+ DelimitedTextWriteSettings,
+ DependencyReference,
+ DistcpSettings,
+ DocumentDbCollectionDataset,
+ DocumentDbCollectionSink,
+ DocumentDbCollectionSource,
+ DrillLinkedService,
+ DrillSource,
+ DrillTableDataset,
+ DynamicsAXLinkedService,
+ DynamicsAXResourceDataset,
+ DynamicsAXSource,
+ DynamicsCrmEntityDataset,
+ DynamicsCrmLinkedService,
+ DynamicsCrmSink,
+ DynamicsCrmSource,
+ DynamicsEntityDataset,
+ DynamicsLinkedService,
+ DynamicsSink,
+ DynamicsSource,
+ EloquaLinkedService,
+ EloquaObjectDataset,
+ EloquaSource,
+ EncryptionConfiguration,
+ EntityReference,
+ EnvironmentVariableSetup,
+ ExcelDataset,
+ ExcelSource,
+ ExecuteDataFlowActivity,
+ ExecuteDataFlowActivityTypeProperties,
+ ExecuteDataFlowActivityTypePropertiesCompute,
+ ExecutePipelineActivity,
+ ExecutePipelineActivityPolicy,
+ ExecutePowerQueryActivityTypeProperties,
+ ExecuteSSISPackageActivity,
+ ExecuteWranglingDataflowActivity,
+ ExecutionActivity,
+ ExportSettings,
+ ExposureControlBatchRequest,
+ ExposureControlBatchResponse,
+ ExposureControlRequest,
+ ExposureControlResponse,
+ Expression,
+ ExpressionV2,
+ Factory,
+ FactoryGitHubConfiguration,
+ FactoryIdentity,
+ FactoryListResponse,
+ FactoryRepoConfiguration,
+ FactoryRepoUpdate,
+ FactoryUpdateParameters,
+ FactoryVSTSConfiguration,
+ FailActivity,
+ FileServerLinkedService,
+ FileServerLocation,
+ FileServerReadSettings,
+ FileServerWriteSettings,
+ FileShareDataset,
+ FileSystemSink,
+ FileSystemSource,
+ FilterActivity,
+ Flowlet,
+ ForEachActivity,
+ FormatReadSettings,
+ FormatWriteSettings,
+ FtpReadSettings,
+ FtpServerLinkedService,
+ FtpServerLocation,
+ GetDataFactoryOperationStatusResponse,
+ GetMetadataActivity,
+ GetSsisObjectMetadataRequest,
+ GitHubAccessTokenRequest,
+ GitHubAccessTokenResponse,
+ GitHubClientSecret,
+ GlobalParameterListResponse,
+ GlobalParameterResource,
+ GlobalParameterSpecification,
+ GoogleAdWordsLinkedService,
+ GoogleAdWordsObjectDataset,
+ GoogleAdWordsSource,
+ GoogleBigQueryLinkedService,
+ GoogleBigQueryObjectDataset,
+ GoogleBigQuerySource,
+ GoogleBigQueryV2LinkedService,
+ GoogleBigQueryV2ObjectDataset,
+ GoogleBigQueryV2Source,
+ GoogleCloudStorageLinkedService,
+ GoogleCloudStorageLocation,
+ GoogleCloudStorageReadSettings,
+ GoogleSheetsLinkedService,
+ GreenplumLinkedService,
+ GreenplumSource,
+ GreenplumTableDataset,
+ HBaseLinkedService,
+ HBaseObjectDataset,
+ HBaseSource,
+ HDInsightHiveActivity,
+ HDInsightLinkedService,
+ HDInsightMapReduceActivity,
+ HDInsightOnDemandLinkedService,
+ HDInsightPigActivity,
+ HDInsightSparkActivity,
+ HDInsightStreamingActivity,
+ HdfsLinkedService,
+ HdfsLocation,
+ HdfsReadSettings,
+ HdfsSource,
+ HiveLinkedService,
+ HiveObjectDataset,
+ HiveSource,
+ HttpDataset,
+ HttpLinkedService,
+ HttpReadSettings,
+ HttpServerLocation,
+ HttpSource,
+ HubspotLinkedService,
+ HubspotObjectDataset,
+ HubspotSource,
+ IcebergDataset,
+ IcebergSink,
+ IcebergWriteSettings,
+ IfConditionActivity,
+ ImpalaLinkedService,
+ ImpalaObjectDataset,
+ ImpalaSource,
+ ImportSettings,
+ InformixLinkedService,
+ InformixSink,
+ InformixSource,
+ InformixTableDataset,
+ IntegrationRuntime,
+ IntegrationRuntimeAuthKeys,
+ IntegrationRuntimeComputeProperties,
+ IntegrationRuntimeConnectionInfo,
+ IntegrationRuntimeCustomSetupScriptProperties,
+ IntegrationRuntimeCustomerVirtualNetwork,
+ IntegrationRuntimeDataFlowProperties,
+ IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem,
+ IntegrationRuntimeDataProxyProperties,
+ IntegrationRuntimeDebugResource,
+ IntegrationRuntimeListResponse,
+ IntegrationRuntimeMonitoringData,
+ IntegrationRuntimeNodeIpAddress,
+ IntegrationRuntimeNodeMonitoringData,
+ IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint,
+ IntegrationRuntimeOutboundNetworkDependenciesEndpoint,
+ IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails,
+ IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse,
+ IntegrationRuntimeReference,
+ IntegrationRuntimeRegenerateKeyParameters,
+ IntegrationRuntimeResource,
+ IntegrationRuntimeSsisCatalogInfo,
+ IntegrationRuntimeSsisProperties,
+ IntegrationRuntimeStatus,
+ IntegrationRuntimeStatusListResponse,
+ IntegrationRuntimeStatusResponse,
+ IntegrationRuntimeVNetProperties,
+ JiraLinkedService,
+ JiraObjectDataset,
+ JiraSource,
+ JsonDataset,
+ JsonFormat,
+ JsonReadSettings,
+ JsonSink,
+ JsonSource,
+ JsonWriteSettings,
+ LakeHouseLinkedService,
+ LakeHouseLocation,
+ LakeHouseReadSettings,
+ LakeHouseTableDataset,
+ LakeHouseTableSink,
+ LakeHouseTableSource,
+ LakeHouseWriteSettings,
+ LinkedIntegrationRuntime,
+ LinkedIntegrationRuntimeKeyAuthorization,
+ LinkedIntegrationRuntimeRbacAuthorization,
+ LinkedIntegrationRuntimeRequest,
+ LinkedIntegrationRuntimeType,
+ LinkedService,
+ LinkedServiceDebugResource,
+ LinkedServiceListResponse,
+ LinkedServiceReference,
+ LinkedServiceResource,
+ LogLocationSettings,
+ LogSettings,
+ LogStorageSettings,
+ LookupActivity,
+ MagentoLinkedService,
+ MagentoObjectDataset,
+ MagentoSource,
+ ManagedIdentityCredential,
+ ManagedIntegrationRuntime,
+ ManagedIntegrationRuntimeError,
+ ManagedIntegrationRuntimeNode,
+ ManagedIntegrationRuntimeOperationResult,
+ ManagedIntegrationRuntimeStatus,
+ ManagedPrivateEndpoint,
+ ManagedPrivateEndpointListResponse,
+ ManagedPrivateEndpointResource,
+ ManagedVirtualNetwork,
+ ManagedVirtualNetworkListResponse,
+ ManagedVirtualNetworkReference,
+ ManagedVirtualNetworkResource,
+ MapperAttributeMapping,
+ MapperAttributeMappings,
+ MapperAttributeReference,
+ MapperConnection,
+ MapperConnectionReference,
+ MapperDslConnectorProperties,
+ MapperPolicy,
+ MapperPolicyRecurrence,
+ MapperSourceConnectionsInfo,
+ MapperTable,
+ MapperTableSchema,
+ MapperTargetConnectionsInfo,
+ MappingDataFlow,
+ MariaDBLinkedService,
+ MariaDBSource,
+ MariaDBTableDataset,
+ MarketoLinkedService,
+ MarketoObjectDataset,
+ MarketoSource,
+ MetadataItem,
+ MicrosoftAccessLinkedService,
+ MicrosoftAccessSink,
+ MicrosoftAccessSource,
+ MicrosoftAccessTableDataset,
+ MongoDbAtlasCollectionDataset,
+ MongoDbAtlasLinkedService,
+ MongoDbAtlasSink,
+ MongoDbAtlasSource,
+ MongoDbCollectionDataset,
+ MongoDbCursorMethodsProperties,
+ MongoDbLinkedService,
+ MongoDbSource,
+ MongoDbV2CollectionDataset,
+ MongoDbV2LinkedService,
+ MongoDbV2Sink,
+ MongoDbV2Source,
+ MultiplePipelineTrigger,
+ MySqlLinkedService,
+ MySqlSource,
+ MySqlTableDataset,
+ NetezzaLinkedService,
+ NetezzaPartitionSettings,
+ NetezzaSource,
+ NetezzaTableDataset,
+ NotebookParameter,
+ ODataLinkedService,
+ ODataResourceDataset,
+ ODataSource,
+ OdbcLinkedService,
+ OdbcSink,
+ OdbcSource,
+ OdbcTableDataset,
+ Office365Dataset,
+ Office365LinkedService,
+ Office365Source,
+ Operation,
+ OperationDisplay,
+ OperationListResponse,
+ OperationLogSpecification,
+ OperationMetricAvailability,
+ OperationMetricDimension,
+ OperationMetricSpecification,
+ OperationServiceSpecification,
+ OracleCloudStorageLinkedService,
+ OracleCloudStorageLocation,
+ OracleCloudStorageReadSettings,
+ OracleLinkedService,
+ OraclePartitionSettings,
+ OracleServiceCloudLinkedService,
+ OracleServiceCloudObjectDataset,
+ OracleServiceCloudSource,
+ OracleSink,
+ OracleSource,
+ OracleTableDataset,
+ OrcDataset,
+ OrcFormat,
+ OrcSink,
+ OrcSource,
+ OrcWriteSettings,
+ OutputColumn,
+ PackageStore,
+ ParameterSpecification,
+ ParquetDataset,
+ ParquetFormat,
+ ParquetReadSettings,
+ ParquetSink,
+ ParquetSource,
+ ParquetWriteSettings,
+ PaypalLinkedService,
+ PaypalObjectDataset,
+ PaypalSource,
+ PhoenixLinkedService,
+ PhoenixObjectDataset,
+ PhoenixSource,
+ PipelineElapsedTimeMetricPolicy,
+ PipelineExternalComputeScaleProperties,
+ PipelineFolder,
+ PipelineListResponse,
+ PipelinePolicy,
+ PipelineReference,
+ PipelineResource,
+ PipelineRun,
+ PipelineRunInvokedBy,
+ PipelineRunsQueryResponse,
+ PolybaseSettings,
+ PostgreSqlLinkedService,
+ PostgreSqlSource,
+ PostgreSqlTableDataset,
+ PostgreSqlV2LinkedService,
+ PostgreSqlV2Source,
+ PostgreSqlV2TableDataset,
+ PowerQuerySink,
+ PowerQuerySinkMapping,
+ PowerQuerySource,
+ PrestoLinkedService,
+ PrestoObjectDataset,
+ PrestoSource,
+ PrivateEndpoint,
+ PrivateEndpointConnectionListResponse,
+ PrivateEndpointConnectionResource,
+ PrivateLinkConnectionApprovalRequest,
+ PrivateLinkConnectionApprovalRequestResource,
+ PrivateLinkConnectionState,
+ PrivateLinkResource,
+ PrivateLinkResourceProperties,
+ PrivateLinkResourcesWrapper,
+ PurviewConfiguration,
+ QueryDataFlowDebugSessionsResponse,
+ QuickBooksLinkedService,
+ QuickBooksObjectDataset,
+ QuickBooksSource,
+ QuickbaseLinkedService,
+ RecurrenceSchedule,
+ RecurrenceScheduleOccurrence,
+ RedirectIncompatibleRowSettings,
+ RedshiftUnloadSettings,
+ RelationalSource,
+ RelationalTableDataset,
+ RemotePrivateEndpointConnection,
+ RerunTumblingWindowTrigger,
+ Resource,
+ ResponsysLinkedService,
+ ResponsysObjectDataset,
+ ResponsysSource,
+ RestResourceDataset,
+ RestServiceLinkedService,
+ RestSink,
+ RestSource,
+ RetryPolicy,
+ RunFilterParameters,
+ RunQueryFilter,
+ RunQueryOrderBy,
+ SSISAccessCredential,
+ SSISChildPackage,
+ SSISExecutionCredential,
+ SSISExecutionParameter,
+ SSISLogLocation,
+ SSISPackageLocation,
+ SSISPropertyOverride,
+ SalesforceLinkedService,
+ SalesforceMarketingCloudLinkedService,
+ SalesforceMarketingCloudObjectDataset,
+ SalesforceMarketingCloudSource,
+ SalesforceObjectDataset,
+ SalesforceServiceCloudLinkedService,
+ SalesforceServiceCloudObjectDataset,
+ SalesforceServiceCloudSink,
+ SalesforceServiceCloudSource,
+ SalesforceServiceCloudV2LinkedService,
+ SalesforceServiceCloudV2ObjectDataset,
+ SalesforceServiceCloudV2Sink,
+ SalesforceServiceCloudV2Source,
+ SalesforceSink,
+ SalesforceSource,
+ SalesforceV2LinkedService,
+ SalesforceV2ObjectDataset,
+ SalesforceV2Sink,
+ SalesforceV2Source,
+ SapBWLinkedService,
+ SapBwCubeDataset,
+ SapBwSource,
+ SapCloudForCustomerLinkedService,
+ SapCloudForCustomerResourceDataset,
+ SapCloudForCustomerSink,
+ SapCloudForCustomerSource,
+ SapEccLinkedService,
+ SapEccResourceDataset,
+ SapEccSource,
+ SapHanaLinkedService,
+ SapHanaPartitionSettings,
+ SapHanaSource,
+ SapHanaTableDataset,
+ SapOdpLinkedService,
+ SapOdpResourceDataset,
+ SapOdpSource,
+ SapOpenHubLinkedService,
+ SapOpenHubSource,
+ SapOpenHubTableDataset,
+ SapTableLinkedService,
+ SapTablePartitionSettings,
+ SapTableResourceDataset,
+ SapTableSource,
+ ScheduleTrigger,
+ ScheduleTriggerRecurrence,
+ ScriptAction,
+ ScriptActivity,
+ ScriptActivityParameter,
+ ScriptActivityScriptBlock,
+ ScriptActivityTypePropertiesLogSettings,
+ SecretBase,
+ SecureInputOutputPolicy,
+ SecureString,
+ SelfDependencyTumblingWindowTriggerReference,
+ SelfHostedIntegrationRuntime,
+ SelfHostedIntegrationRuntimeNode,
+ SelfHostedIntegrationRuntimeStatus,
+ ServiceNowLinkedService,
+ ServiceNowObjectDataset,
+ ServiceNowSource,
+ ServiceNowV2LinkedService,
+ ServiceNowV2ObjectDataset,
+ ServiceNowV2Source,
+ ServicePrincipalCredential,
+ SetVariableActivity,
+ SftpLocation,
+ SftpReadSettings,
+ SftpServerLinkedService,
+ SftpWriteSettings,
+ SharePointOnlineListLinkedService,
+ SharePointOnlineListResourceDataset,
+ SharePointOnlineListSource,
+ ShopifyLinkedService,
+ ShopifyObjectDataset,
+ ShopifySource,
+ SkipErrorFile,
+ SmartsheetLinkedService,
+ SnowflakeDataset,
+ SnowflakeExportCopyCommand,
+ SnowflakeImportCopyCommand,
+ SnowflakeLinkedService,
+ SnowflakeSink,
+ SnowflakeSource,
+ SnowflakeV2Dataset,
+ SnowflakeV2LinkedService,
+ SnowflakeV2Sink,
+ SnowflakeV2Source,
+ SparkConfigurationParametrizationReference,
+ SparkLinkedService,
+ SparkObjectDataset,
+ SparkSource,
+ SqlAlwaysEncryptedProperties,
+ SqlDWSink,
+ SqlDWSource,
+ SqlDWUpsertSettings,
+ SqlMISink,
+ SqlMISource,
+ SqlPartitionSettings,
+ SqlServerBaseLinkedServiceTypeProperties,
+ SqlServerLinkedService,
+ SqlServerLinkedServiceTypeProperties,
+ SqlServerSink,
+ SqlServerSource,
+ SqlServerStoredProcedureActivity,
+ SqlServerTableDataset,
+ SqlSink,
+ SqlSource,
+ SqlUpsertSettings,
+ SquareLinkedService,
+ SquareObjectDataset,
+ SquareSource,
+ SsisEnvironment,
+ SsisEnvironmentReference,
+ SsisFolder,
+ SsisObjectMetadata,
+ SsisObjectMetadataListResponse,
+ SsisObjectMetadataStatusResponse,
+ SsisPackage,
+ SsisParameter,
+ SsisProject,
+ SsisVariable,
+ StagingSettings,
+ StoreReadSettings,
+ StoreWriteSettings,
+ StoredProcedureParameter,
+ SubResource,
+ SubResourceDebugResource,
+ SwitchActivity,
+ SwitchCase,
+ SybaseLinkedService,
+ SybaseSource,
+ SybaseTableDataset,
+ SynapseNotebookActivity,
+ SynapseNotebookReference,
+ SynapseSparkJobDefinitionActivity,
+ SynapseSparkJobReference,
+ TabularSource,
+ TabularTranslator,
+ TarGZipReadSettings,
+ TarReadSettings,
+ TeamDeskLinkedService,
+ TeradataLinkedService,
+ TeradataPartitionSettings,
+ TeradataSource,
+ TeradataTableDataset,
+ TextFormat,
+ Transformation,
+ Trigger,
+ TriggerDependencyReference,
+ TriggerFilterParameters,
+ TriggerListResponse,
+ TriggerPipelineReference,
+ TriggerQueryResponse,
+ TriggerReference,
+ TriggerResource,
+ TriggerRun,
+ TriggerRunsQueryResponse,
+ TriggerSubscriptionOperationStatus,
+ TumblingWindowTrigger,
+ TumblingWindowTriggerDependencyReference,
+ TwilioLinkedService,
+ TypeConversionSettings,
+ UntilActivity,
+ UpdateIntegrationRuntimeNodeRequest,
+ UpdateIntegrationRuntimeRequest,
+ UserAccessPolicy,
+ UserProperty,
+ ValidationActivity,
+ VariableSpecification,
+ VerticaLinkedService,
+ VerticaSource,
+ VerticaTableDataset,
+ WaitActivity,
+ WarehouseLinkedService,
+ WarehouseSink,
+ WarehouseSource,
+ WarehouseTableDataset,
+ WebActivity,
+ WebActivityAuthentication,
+ WebAnonymousAuthentication,
+ WebBasicAuthentication,
+ WebClientCertificateAuthentication,
+ WebHookActivity,
+ WebLinkedService,
+ WebLinkedServiceTypeProperties,
+ WebSource,
+ WebTableDataset,
+ WranglingDataFlow,
+ XeroLinkedService,
+ XeroObjectDataset,
+ XeroSource,
+ XmlDataset,
+ XmlReadSettings,
+ XmlSource,
+ ZendeskLinkedService,
+ ZipDeflateReadSettings,
+ ZohoLinkedService,
+ ZohoObjectDataset,
+ ZohoSource,
+)
+
+from ._data_factory_management_client_enums import ( # type: ignore
+ ActivityOnInactiveMarkAs,
+ ActivityState,
+ AmazonRdsForOraclePartitionOption,
+ AmazonRdsForSqlAuthenticationType,
+ AvroCompressionCodec,
+ AzureFunctionActivityMethod,
+ AzureSearchIndexWriteBehaviorType,
+ AzureSqlDWAuthenticationType,
+ AzureSqlDatabaseAuthenticationType,
+ AzureSqlMIAuthenticationType,
+ AzureStorageAuthenticationType,
+ BigDataPoolReferenceType,
+ BlobEventTypes,
+ CassandraSourceReadConsistencyLevels,
+ CompressionCodec,
+ ConfigurationType,
+ ConnectionType,
+ CopyBehaviorType,
+ CosmosDbConnectionMode,
+ CredentialReferenceType,
+ DataFlowComputeType,
+ DataFlowDebugCommandType,
+ DataFlowReferenceType,
+ DatasetCompressionLevel,
+ DatasetReferenceType,
+ DayOfWeek,
+ DaysOfWeek,
+ Db2AuthenticationType,
+ DependencyCondition,
+ DynamicsAuthenticationType,
+ DynamicsDeploymentType,
+ DynamicsSinkWriteBehavior,
+ EventSubscriptionStatus,
+ ExpressionType,
+ ExpressionV2Type,
+ FactoryIdentityType,
+ FrequencyType,
+ FtpAuthenticationType,
+ GlobalParameterType,
+ GoogleAdWordsAuthenticationType,
+ GoogleBigQueryAuthenticationType,
+ GoogleBigQueryV2AuthenticationType,
+ HBaseAuthenticationType,
+ HDInsightActivityDebugInfoOption,
+ HdiNodeTypes,
+ HiveAuthenticationType,
+ HiveServerType,
+ HiveThriftTransportProtocol,
+ HttpAuthenticationType,
+ ImpalaAuthenticationType,
+ IntegrationRuntimeAuthKeyName,
+ IntegrationRuntimeAutoUpdate,
+ IntegrationRuntimeEdition,
+ IntegrationRuntimeEntityReferenceType,
+ IntegrationRuntimeInternalChannelEncryptionMode,
+ IntegrationRuntimeLicenseType,
+ IntegrationRuntimeReferenceType,
+ IntegrationRuntimeSsisCatalogPricingTier,
+ IntegrationRuntimeState,
+ IntegrationRuntimeType,
+ IntegrationRuntimeUpdateResult,
+ JsonFormatFilePattern,
+ JsonWriteFilePattern,
+ ManagedIntegrationRuntimeNodeStatus,
+ ManagedVirtualNetworkReferenceType,
+ MappingType,
+ MongoDbAuthenticationType,
+ NetezzaPartitionOption,
+ NotebookParameterType,
+ NotebookReferenceType,
+ ODataAadServicePrincipalCredentialType,
+ ODataAuthenticationType,
+ OraclePartitionOption,
+ OrcCompressionCodec,
+ ParameterType,
+ PhoenixAuthenticationType,
+ PipelineReferenceType,
+ PolybaseSettingsRejectType,
+ PrestoAuthenticationType,
+ PublicNetworkAccess,
+ RecurrenceFrequency,
+ RestServiceAuthenticationType,
+ RunQueryFilterOperand,
+ RunQueryFilterOperator,
+ RunQueryOrder,
+ RunQueryOrderByField,
+ SalesforceSinkWriteBehavior,
+ SalesforceSourceReadBehavior,
+ SalesforceV2SinkWriteBehavior,
+ SapCloudForCustomerSinkWriteBehavior,
+ SapHanaAuthenticationType,
+ SapHanaPartitionOption,
+ SapTablePartitionOption,
+ ScriptActivityLogDestination,
+ ScriptActivityParameterDirection,
+ ScriptActivityParameterType,
+ ScriptType,
+ SelfHostedIntegrationRuntimeNodeStatus,
+ ServiceNowAuthenticationType,
+ ServiceNowV2AuthenticationType,
+ ServicePrincipalCredentialType,
+ SftpAuthenticationType,
+ SnowflakeAuthenticationType,
+ SparkAuthenticationType,
+ SparkConfigurationReferenceType,
+ SparkJobReferenceType,
+ SparkServerType,
+ SparkThriftTransportProtocol,
+ SqlAlwaysEncryptedAkvAuthType,
+ SqlDWWriteBehaviorEnum,
+ SqlPartitionOption,
+ SqlServerAuthenticationType,
+ SqlWriteBehaviorEnum,
+ SsisLogLocationType,
+ SsisObjectMetadataType,
+ SsisPackageLocationType,
+ StoredProcedureParameterType,
+ SybaseAuthenticationType,
+ TeamDeskAuthenticationType,
+ TeradataAuthenticationType,
+ TeradataPartitionOption,
+ TriggerReferenceType,
+ TriggerRunStatus,
+ TriggerRuntimeState,
+ TumblingWindowFrequency,
+ Type,
+ VariableType,
+ WebActivityMethod,
+ WebAuthenticationType,
+ WebHookActivityMethod,
+ ZendeskAuthenticationType,
+)
from ._patch import __all__ as _patch_all
-from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
@@ -1271,6 +1286,9 @@
"HubspotLinkedService",
"HubspotObjectDataset",
"HubspotSource",
+ "IcebergDataset",
+ "IcebergSink",
+ "IcebergWriteSettings",
"IfConditionActivity",
"ImpalaLinkedService",
"ImpalaObjectDataset",
@@ -1843,5 +1861,5 @@
"WebHookActivityMethod",
"ZendeskAuthenticationType",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py
index bc478c060a66..ac24170e4c2a 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py
@@ -1,3 +1,4 @@
+# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py
index 6415cfc305b6..c93dd92dc173 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py
@@ -1,5 +1,5 @@
-# coding=utf-8
# pylint: disable=too-many-lines
+# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
@@ -16,10 +16,9 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from .. import models as _models
JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object
@@ -287,7 +286,7 @@ def __init__(
self.secure_output = secure_output
-class ActivityRun(_serialization.Model): # pylint: disable=too-many-instance-attributes
+class ActivityRun(_serialization.Model):
"""Information about an activity run in a pipeline.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -699,7 +698,7 @@ def __init__(
self.annotations = annotations
-class AmazonMWSLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class AmazonMWSLinkedService(LinkedService):
"""Amazon Marketplace Web Service linked service.
All required parameters must be populated in order to send to server.
@@ -879,7 +878,7 @@ class Dataset(_serialization.Model):
DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, ExcelDataset,
FileShareDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset,
GoogleBigQueryV2ObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset,
- HttpDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset,
+ HttpDataset, HubspotObjectDataset, IcebergDataset, ImpalaObjectDataset, InformixTableDataset,
JiraObjectDataset, JsonDataset, LakeHouseTableDataset, MagentoObjectDataset,
MariaDBTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset,
MongoDbAtlasCollectionDataset, MongoDbCollectionDataset, MongoDbV2CollectionDataset,
@@ -988,6 +987,7 @@ class Dataset(_serialization.Model):
"HiveObject": "HiveObjectDataset",
"HttpFile": "HttpDataset",
"HubspotObject": "HubspotObjectDataset",
+ "Iceberg": "IcebergDataset",
"ImpalaObject": "ImpalaObjectDataset",
"InformixTable": "InformixTableDataset",
"JiraObject": "JiraObjectDataset",
@@ -1763,7 +1763,7 @@ def __init__(
self.partition_lower_bound = partition_lower_bound
-class AmazonRdsForOracleSource(CopySource): # pylint: disable=too-many-instance-attributes
+class AmazonRdsForOracleSource(CopySource):
"""A copy activity AmazonRdsForOracle source.
All required parameters must be populated in order to send to server.
@@ -1884,7 +1884,7 @@ def __init__(
self.additional_columns = additional_columns
-class AmazonRdsForOracleTableDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class AmazonRdsForOracleTableDataset(Dataset):
"""The AmazonRdsForOracle database dataset.
All required parameters must be populated in order to send to server.
@@ -1997,7 +1997,7 @@ def __init__(
self.table = table
-class AmazonRdsForSqlServerLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class AmazonRdsForSqlServerLinkedService(LinkedService):
"""Amazon RDS for SQL Server linked service.
All required parameters must be populated in order to send to server.
@@ -2342,7 +2342,7 @@ def __init__( # pylint: disable=too-many-locals
self.always_encrypted_settings = always_encrypted_settings
-class SqlServerBaseLinkedServiceTypeProperties(_serialization.Model): # pylint: disable=too-many-instance-attributes
+class SqlServerBaseLinkedServiceTypeProperties(_serialization.Model):
"""Sql Server family connector common linked service properties.
:ivar server: The name or network address of the instance of SQL Server to which to connect,
@@ -2577,7 +2577,7 @@ def __init__(
class AmazonRdsForSqlServerLinkedServiceTypeProperties(
SqlServerBaseLinkedServiceTypeProperties
-): # pylint: disable=too-many-instance-attributes,name-too-long
+): # pylint: disable=name-too-long
"""Amazon Rds for SQL Server linked service properties.
:ivar server: The name or network address of the instance of SQL Server to which to connect,
@@ -2865,7 +2865,7 @@ def __init__( # pylint: disable=too-many-locals
self.always_encrypted_settings = always_encrypted_settings
-class AmazonRdsForSqlServerSource(TabularSource): # pylint: disable=too-many-instance-attributes
+class AmazonRdsForSqlServerSource(TabularSource):
"""A copy activity Amazon RDS for SQL Server source.
All required parameters must be populated in order to send to server.
@@ -3020,7 +3020,7 @@ def __init__(
self.partition_settings = partition_settings
-class AmazonRdsForSqlServerTableDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class AmazonRdsForSqlServerTableDataset(Dataset):
"""The Amazon RDS for SQL Server dataset.
All required parameters must be populated in order to send to server.
@@ -3133,7 +3133,7 @@ def __init__(
self.table = table
-class AmazonRedshiftLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class AmazonRedshiftLinkedService(LinkedService):
"""Linked service for Amazon Redshift.
All required parameters must be populated in order to send to server.
@@ -3372,7 +3372,7 @@ def __init__(
self.redshift_unload_settings = redshift_unload_settings
-class AmazonRedshiftTableDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class AmazonRedshiftTableDataset(Dataset):
"""The Amazon Redshift table dataset.
All required parameters must be populated in order to send to server.
@@ -3494,7 +3494,7 @@ def __init__(
self.schema_type_properties_schema = schema_type_properties_schema
-class AmazonS3CompatibleLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class AmazonS3CompatibleLinkedService(LinkedService):
"""Linked service for Amazon S3 Compatible.
All required parameters must be populated in order to send to server.
@@ -3849,7 +3849,7 @@ def __init__(
self.disable_metrics_collection = disable_metrics_collection
-class AmazonS3CompatibleReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-attributes
+class AmazonS3CompatibleReadSettings(StoreReadSettings):
"""Amazon S3 Compatible read settings.
All required parameters must be populated in order to send to server.
@@ -3998,7 +3998,7 @@ def __init__(
self.modified_datetime_end = modified_datetime_end
-class AmazonS3Dataset(Dataset): # pylint: disable=too-many-instance-attributes
+class AmazonS3Dataset(Dataset):
"""A single Amazon Simple Storage Service (S3) object or a set of S3 objects.
All required parameters must be populated in order to send to server.
@@ -4162,7 +4162,7 @@ def __init__(
self.compression = compression
-class AmazonS3LinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class AmazonS3LinkedService(LinkedService):
"""Linked service for Amazon S3.
All required parameters must be populated in order to send to server.
@@ -4362,7 +4362,7 @@ def __init__(
self.version = version
-class AmazonS3ReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-attributes
+class AmazonS3ReadSettings(StoreReadSettings):
"""Amazon S3 read settings.
All required parameters must be populated in order to send to server.
@@ -4939,7 +4939,7 @@ def __init__(
self.encrypted_credential = encrypted_credential
-class AvroDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class AvroDataset(Dataset):
"""Avro dataset.
All required parameters must be populated in order to send to server.
@@ -5180,11 +5180,12 @@ class CopySink(_serialization.Model):
AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink,
AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink,
CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink,
- DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink,
- JsonSink, LakeHouseTableSink, MicrosoftAccessSink, MongoDbAtlasSink, MongoDbV2Sink, OdbcSink,
- OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink,
- SalesforceServiceCloudV2Sink, SalesforceSink, SalesforceV2Sink, SapCloudForCustomerSink,
- SnowflakeSink, SnowflakeV2Sink, SqlDWSink, SqlMISink, SqlServerSink, SqlSink, WarehouseSink
+ DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, IcebergSink,
+ InformixSink, JsonSink, LakeHouseTableSink, MicrosoftAccessSink, MongoDbAtlasSink,
+ MongoDbV2Sink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink,
+ SalesforceServiceCloudSink, SalesforceServiceCloudV2Sink, SalesforceSink, SalesforceV2Sink,
+ SapCloudForCustomerSink, SnowflakeSink, SnowflakeV2Sink, SqlDWSink, SqlMISink, SqlServerSink,
+ SqlSink, WarehouseSink
All required parameters must be populated in order to send to server.
@@ -5251,6 +5252,7 @@ class CopySink(_serialization.Model):
"DynamicsCrmSink": "DynamicsCrmSink",
"DynamicsSink": "DynamicsSink",
"FileSystemSink": "FileSystemSink",
+ "IcebergSink": "IcebergSink",
"InformixSink": "InformixSink",
"JsonSink": "JsonSink",
"LakeHouseTableSink": "LakeHouseTableSink",
@@ -5525,8 +5527,8 @@ class FormatWriteSettings(_serialization.Model):
"""Format write settings.
You probably want to use the sub-classes and not this class directly. Known sub-classes are:
- AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings, OrcWriteSettings,
- ParquetWriteSettings
+ AvroWriteSettings, DelimitedTextWriteSettings, IcebergWriteSettings, JsonWriteSettings,
+ OrcWriteSettings, ParquetWriteSettings
All required parameters must be populated in order to send to server.
@@ -5550,6 +5552,7 @@ class FormatWriteSettings(_serialization.Model):
"type": {
"AvroWriteSettings": "AvroWriteSettings",
"DelimitedTextWriteSettings": "DelimitedTextWriteSettings",
+ "IcebergWriteSettings": "IcebergWriteSettings",
"JsonWriteSettings": "JsonWriteSettings",
"OrcWriteSettings": "OrcWriteSettings",
"ParquetWriteSettings": "ParquetWriteSettings",
@@ -5703,7 +5706,7 @@ def __init__(self, *, version: str, **kwargs: Any) -> None:
self.version = version
-class AzureBatchLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class AzureBatchLinkedService(LinkedService):
"""Azure Batch linked service.
All required parameters must be populated in order to send to server.
@@ -5838,7 +5841,7 @@ def __init__(
self.credential = credential
-class AzureBlobDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class AzureBlobDataset(Dataset):
"""The Azure Blob storage.
All required parameters must be populated in order to send to server.
@@ -5992,7 +5995,7 @@ def __init__(
self.compression = compression
-class AzureBlobFSDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class AzureBlobFSDataset(Dataset):
"""The Azure Data Lake Storage Gen2 storage.
All required parameters must be populated in order to send to server.
@@ -6119,7 +6122,7 @@ def __init__(
self.compression = compression
-class AzureBlobFSLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class AzureBlobFSLinkedService(LinkedService):
"""Azure Data Lake Storage Gen2 linked service.
All required parameters must be populated in order to send to server.
@@ -6370,7 +6373,7 @@ def __init__(
self.file_system = file_system
-class AzureBlobFSReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-attributes
+class AzureBlobFSReadSettings(StoreReadSettings):
"""Azure blobFS read settings.
All required parameters must be populated in order to send to server.
@@ -6893,7 +6896,7 @@ def __init__(
self.block_size_in_mb = block_size_in_mb
-class AzureBlobStorageLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class AzureBlobStorageLinkedService(LinkedService):
"""The azure blob storage linked service.
All required parameters must be populated in order to send to server.
@@ -7158,7 +7161,7 @@ def __init__(
self.container = container
-class AzureBlobStorageReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-attributes
+class AzureBlobStorageReadSettings(StoreReadSettings):
"""Azure blob read settings.
All required parameters must be populated in order to send to server.
@@ -7389,7 +7392,7 @@ def __init__(
self.block_size_in_mb = block_size_in_mb
-class AzureDatabricksDeltaLakeDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class AzureDatabricksDeltaLakeDataset(Dataset):
"""Azure Databricks Delta Lake dataset.
All required parameters must be populated in order to send to server.
@@ -7692,7 +7695,7 @@ def __init__(
self.timestamp_format = timestamp_format
-class AzureDatabricksDeltaLakeLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class AzureDatabricksDeltaLakeLinkedService(LinkedService):
"""Azure Databricks Delta Lake linked service.
All required parameters must be populated in order to send to server.
@@ -8023,7 +8026,7 @@ def __init__(
self.export_settings = export_settings
-class AzureDatabricksLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class AzureDatabricksLinkedService(LinkedService):
"""Azure Databricks linked service.
All required parameters must be populated in order to send to server.
@@ -8430,7 +8433,7 @@ def __init__(
self.policy = policy
-class AzureDataExplorerCommandActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes
+class AzureDataExplorerCommandActivity(ExecutionActivity):
"""Azure Data Explorer command activity.
All required parameters must be populated in order to send to server.
@@ -8551,7 +8554,7 @@ def __init__(
self.command_timeout = command_timeout
-class AzureDataExplorerLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class AzureDataExplorerLinkedService(LinkedService):
"""Azure Data Explorer (Kusto) linked service.
All required parameters must be populated in order to send to server.
@@ -8681,7 +8684,7 @@ def __init__(
self.credential = credential
-class AzureDataExplorerSink(CopySink): # pylint: disable=too-many-instance-attributes
+class AzureDataExplorerSink(CopySink):
"""A copy activity Azure Data Explorer sink.
All required parameters must be populated in order to send to server.
@@ -9017,7 +9020,7 @@ def __init__(
self.table = table
-class AzureDataLakeAnalyticsLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class AzureDataLakeAnalyticsLinkedService(LinkedService):
"""Azure Data Lake Analytics linked service.
All required parameters must be populated in order to send to server.
@@ -9165,7 +9168,7 @@ def __init__(
self.encrypted_credential = encrypted_credential
-class AzureDataLakeStoreDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class AzureDataLakeStoreDataset(Dataset):
"""Azure Data Lake Store dataset.
All required parameters must be populated in order to send to server.
@@ -9294,7 +9297,7 @@ def __init__(
self.compression = compression
-class AzureDataLakeStoreLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class AzureDataLakeStoreLinkedService(LinkedService):
"""Azure Data Lake Store linked service.
All required parameters must be populated in order to send to server.
@@ -9513,7 +9516,7 @@ def __init__(
self.type: str = "AzureDataLakeStoreLocation"
-class AzureDataLakeStoreReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-attributes
+class AzureDataLakeStoreReadSettings(StoreReadSettings):
"""Azure data lake store read settings.
All required parameters must be populated in order to send to server.
@@ -9952,7 +9955,7 @@ def __init__(
self.expiry_date_time = expiry_date_time
-class AzureFileStorageLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class AzureFileStorageLinkedService(LinkedService):
"""Azure File Storage linked service.
All required parameters must be populated in order to send to server.
@@ -10178,7 +10181,7 @@ def __init__(
self.type: str = "AzureFileStorageLocation"
-class AzureFileStorageReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-attributes
+class AzureFileStorageReadSettings(StoreReadSettings):
"""Azure File Storage read settings.
All required parameters must be populated in order to send to server.
@@ -10400,7 +10403,7 @@ def __init__(
self.type: str = "AzureFileStorageWriteSettings"
-class AzureFunctionActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes
+class AzureFunctionActivity(ExecutionActivity):
"""Azure Function activity.
All required parameters must be populated in order to send to server.
@@ -10542,7 +10545,7 @@ def __init__(
self.body = body
-class AzureFunctionLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class AzureFunctionLinkedService(LinkedService):
"""Azure Function linked service.
All required parameters must be populated in order to send to server.
@@ -11146,7 +11149,7 @@ def __init__(
self.table_name = table_name
-class AzureMLBatchExecutionActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes
+class AzureMLBatchExecutionActivity(ExecutionActivity):
"""Azure ML Batch Execution activity.
All required parameters must be populated in order to send to server.
@@ -11286,7 +11289,7 @@ def __init__(
self.web_service_inputs = web_service_inputs
-class AzureMLExecutePipelineActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes
+class AzureMLExecutePipelineActivity(ExecutionActivity):
"""Azure ML Execute Pipeline activity.
All required parameters must be populated in order to send to server.
@@ -11473,7 +11476,7 @@ def __init__(
self.continue_on_step_failure = continue_on_step_failure
-class AzureMLLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class AzureMLLinkedService(LinkedService):
"""Azure ML Studio Web Service linked service.
All required parameters must be populated in order to send to server.
@@ -11621,7 +11624,7 @@ def __init__(
self.authentication = authentication
-class AzureMLServiceLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class AzureMLServiceLinkedService(LinkedService):
"""Azure ML Service linked service.
All required parameters must be populated in order to send to server.
@@ -11772,7 +11775,7 @@ def __init__(
self.encrypted_credential = encrypted_credential
-class AzureMLUpdateResourceActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes
+class AzureMLUpdateResourceActivity(ExecutionActivity):
"""Azure ML Update Resource management activity.
All required parameters must be populated in order to send to server.
@@ -12252,7 +12255,7 @@ def __init__(
self.query = query
-class AzureMySqlTableDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class AzureMySqlTableDataset(Dataset):
"""The Azure MySQL database dataset.
All required parameters must be populated in order to send to server.
@@ -12388,6 +12391,35 @@ class AzurePostgreSqlLinkedService(LinkedService):
:ivar connection_string: An ODBC connection string. Type: string, SecureString or
AzureKeyVaultSecretReference.
:vartype connection_string: JSON
+ :ivar server: Server name for connection. Type: string.
+ :vartype server: JSON
+ :ivar port: The port for the connection. Type: integer.
+ :vartype port: JSON
+ :ivar username: Username for authentication. Type: string.
+ :vartype username: JSON
+ :ivar database: Database name for connection. Type: string.
+ :vartype database: JSON
+ :ivar ssl_mode: SSL mode for connection. Type: integer. 0: disable, 1:allow, 2: prefer, 3:
+ require, 4: verify-ca, 5: verify-full. Type: integer.
+ :vartype ssl_mode: JSON
+ :ivar timeout: The time to wait (in seconds) while trying to establish a connection before
+ terminating the attempt and generating an error. Type: integer.
+ :vartype timeout: JSON
+ :ivar command_timeout: The time to wait (in seconds) while trying to execute a command before
+ terminating the attempt and generating an error. Set to zero for infinity. Type: integer.
+ :vartype command_timeout: JSON
+ :ivar trust_server_certificate: Whether to trust the server certificate without validating it.
+ Type: boolean.
+ :vartype trust_server_certificate: JSON
+ :ivar read_buffer_size: Determines the size of the internal buffer uses when reading.
+ Increasing may improve performance if transferring large values from the database. Type:
+ integer.
+ :vartype read_buffer_size: JSON
+ :ivar timezone: Gets or sets the session timezone. Type: string.
+ :vartype timezone: JSON
+ :ivar encoding: Gets or sets the .NET encoding that will be used to encode/decode PostgreSQL
+ string data. Type: string.
+ :vartype encoding: JSON
:ivar password: The Azure key vault secret reference of password in connection string.
:vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference
:ivar encrypted_credential: The encrypted credential used for authentication. Credentials are
@@ -12408,6 +12440,17 @@ class AzurePostgreSqlLinkedService(LinkedService):
"parameters": {"key": "parameters", "type": "{ParameterSpecification}"},
"annotations": {"key": "annotations", "type": "[object]"},
"connection_string": {"key": "typeProperties.connectionString", "type": "object"},
+ "server": {"key": "typeProperties.server", "type": "object"},
+ "port": {"key": "typeProperties.port", "type": "object"},
+ "username": {"key": "typeProperties.username", "type": "object"},
+ "database": {"key": "typeProperties.database", "type": "object"},
+ "ssl_mode": {"key": "typeProperties.sslMode", "type": "object"},
+ "timeout": {"key": "typeProperties.timeout", "type": "object"},
+ "command_timeout": {"key": "typeProperties.commandTimeout", "type": "object"},
+ "trust_server_certificate": {"key": "typeProperties.trustServerCertificate", "type": "object"},
+ "read_buffer_size": {"key": "typeProperties.readBufferSize", "type": "object"},
+ "timezone": {"key": "typeProperties.timezone", "type": "object"},
+ "encoding": {"key": "typeProperties.encoding", "type": "object"},
"password": {"key": "typeProperties.password", "type": "AzureKeyVaultSecretReference"},
"encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"},
}
@@ -12422,6 +12465,17 @@ def __init__(
parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None,
annotations: Optional[List[JSON]] = None,
connection_string: Optional[JSON] = None,
+ server: Optional[JSON] = None,
+ port: Optional[JSON] = None,
+ username: Optional[JSON] = None,
+ database: Optional[JSON] = None,
+ ssl_mode: Optional[JSON] = None,
+ timeout: Optional[JSON] = None,
+ command_timeout: Optional[JSON] = None,
+ trust_server_certificate: Optional[JSON] = None,
+ read_buffer_size: Optional[JSON] = None,
+ timezone: Optional[JSON] = None,
+ encoding: Optional[JSON] = None,
password: Optional["_models.AzureKeyVaultSecretReference"] = None,
encrypted_credential: Optional[str] = None,
**kwargs: Any
@@ -12443,6 +12497,36 @@ def __init__(
:keyword connection_string: An ODBC connection string. Type: string, SecureString or
AzureKeyVaultSecretReference.
:paramtype connection_string: JSON
+ :keyword server: Server name for connection. Type: string.
+ :paramtype server: JSON
+ :keyword port: The port for the connection. Type: integer.
+ :paramtype port: JSON
+ :keyword username: Username for authentication. Type: string.
+ :paramtype username: JSON
+ :keyword database: Database name for connection. Type: string.
+ :paramtype database: JSON
+ :keyword ssl_mode: SSL mode for connection. Type: integer. 0: disable, 1:allow, 2: prefer, 3:
+ require, 4: verify-ca, 5: verify-full. Type: integer.
+ :paramtype ssl_mode: JSON
+ :keyword timeout: The time to wait (in seconds) while trying to establish a connection before
+ terminating the attempt and generating an error. Type: integer.
+ :paramtype timeout: JSON
+ :keyword command_timeout: The time to wait (in seconds) while trying to execute a command
+ before terminating the attempt and generating an error. Set to zero for infinity. Type:
+ integer.
+ :paramtype command_timeout: JSON
+ :keyword trust_server_certificate: Whether to trust the server certificate without validating
+ it. Type: boolean.
+ :paramtype trust_server_certificate: JSON
+ :keyword read_buffer_size: Determines the size of the internal buffer uses when reading.
+ Increasing may improve performance if transferring large values from the database. Type:
+ integer.
+ :paramtype read_buffer_size: JSON
+ :keyword timezone: Gets or sets the session timezone. Type: string.
+ :paramtype timezone: JSON
+ :keyword encoding: Gets or sets the .NET encoding that will be used to encode/decode PostgreSQL
+ string data. Type: string.
+ :paramtype encoding: JSON
:keyword password: The Azure key vault secret reference of password in connection string.
:paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference
:keyword encrypted_credential: The encrypted credential used for authentication. Credentials
@@ -12460,6 +12544,17 @@ def __init__(
)
self.type: str = "AzurePostgreSql"
self.connection_string = connection_string
+ self.server = server
+ self.port = port
+ self.username = username
+ self.database = database
+ self.ssl_mode = ssl_mode
+ self.timeout = timeout
+ self.command_timeout = command_timeout
+ self.trust_server_certificate = trust_server_certificate
+ self.read_buffer_size = read_buffer_size
+ self.timezone = timezone
+ self.encoding = encoding
self.password = password
self.encrypted_credential = encrypted_credential
@@ -12668,7 +12763,7 @@ def __init__(
self.query = query
-class AzurePostgreSqlTableDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class AzurePostgreSqlTableDataset(Dataset):
"""Azure PostgreSQL dataset.
All required parameters must be populated in order to send to server.
@@ -13192,7 +13287,7 @@ def __init__(
self.encrypted_credential = encrypted_credential
-class AzureSqlDatabaseLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class AzureSqlDatabaseLinkedService(LinkedService):
"""Microsoft Azure SQL Database linked service.
All required parameters must be populated in order to send to server.
@@ -13610,7 +13705,7 @@ def __init__( # pylint: disable=too-many-locals
class AzureSqlDatabaseLinkedServiceTypeProperties(
SqlServerBaseLinkedServiceTypeProperties
-): # pylint: disable=too-many-instance-attributes,name-too-long
+): # pylint: disable=name-too-long
"""Azure SQL Database linked service properties.
:ivar server: The name or network address of the instance of SQL Server to which to connect,
@@ -13969,7 +14064,7 @@ def __init__( # pylint: disable=too-many-locals
self.credential = credential
-class AzureSqlDWLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class AzureSqlDWLinkedService(LinkedService):
"""Azure SQL Data Warehouse linked service.
All required parameters must be populated in order to send to server.
@@ -14374,9 +14469,7 @@ def __init__( # pylint: disable=too-many-locals
self.credential = credential
-class AzureSqlDWLinkedServiceTypeProperties(
- SqlServerBaseLinkedServiceTypeProperties
-): # pylint: disable=too-many-instance-attributes
+class AzureSqlDWLinkedServiceTypeProperties(SqlServerBaseLinkedServiceTypeProperties):
"""Azure SQL Data Warehouse linked service properties.
:ivar server: The name or network address of the instance of SQL Server to which to connect,
@@ -14727,7 +14820,7 @@ def __init__( # pylint: disable=too-many-locals
self.credential = credential
-class AzureSqlDWTableDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class AzureSqlDWTableDataset(Dataset):
"""The Azure SQL Data Warehouse dataset.
All required parameters must be populated in order to send to server.
@@ -14849,7 +14942,7 @@ def __init__(
self.table = table
-class AzureSqlMILinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class AzureSqlMILinkedService(LinkedService):
"""Azure SQL Managed Instance linked service.
All required parameters must be populated in order to send to server.
@@ -15265,9 +15358,7 @@ def __init__( # pylint: disable=too-many-locals
self.credential = credential
-class AzureSqlMILinkedServiceTypeProperties(
- SqlServerBaseLinkedServiceTypeProperties
-): # pylint: disable=too-many-instance-attributes
+class AzureSqlMILinkedServiceTypeProperties(SqlServerBaseLinkedServiceTypeProperties):
"""Azure SQL Managed Instance linked service properties.
:ivar server: The name or network address of the instance of SQL Server to which to connect,
@@ -15626,7 +15717,7 @@ def __init__( # pylint: disable=too-many-locals
self.credential = credential
-class AzureSqlMITableDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class AzureSqlMITableDataset(Dataset):
"""The Azure SQL Managed Instance dataset.
All required parameters must be populated in order to send to server.
@@ -15748,7 +15839,7 @@ def __init__(
self.table = table
-class AzureSqlSink(CopySink): # pylint: disable=too-many-instance-attributes
+class AzureSqlSink(CopySink):
"""A copy activity Azure SQL sink.
All required parameters must be populated in order to send to server.
@@ -15921,7 +16012,7 @@ def __init__(
self.upsert_settings = upsert_settings
-class AzureSqlSource(TabularSource): # pylint: disable=too-many-instance-attributes
+class AzureSqlSource(TabularSource):
"""A copy activity Azure SQL source.
All required parameters must be populated in order to send to server.
@@ -16078,7 +16169,7 @@ def __init__(
self.partition_settings = partition_settings
-class AzureSqlTableDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class AzureSqlTableDataset(Dataset):
"""The Azure SQL Server database dataset.
All required parameters must be populated in order to send to server.
@@ -16200,7 +16291,7 @@ def __init__(
self.table = table
-class AzureStorageLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class AzureStorageLinkedService(LinkedService):
"""The storage account linked service.
All required parameters must be populated in order to send to server.
@@ -16583,7 +16674,7 @@ def __init__(
self.table_name = table_name
-class AzureTableSink(CopySink): # pylint: disable=too-many-instance-attributes
+class AzureTableSink(CopySink):
"""A copy activity Azure Table sink.
All required parameters must be populated in order to send to server.
@@ -16823,7 +16914,7 @@ def __init__(
self.azure_table_source_ignore_table_not_found = azure_table_source_ignore_table_not_found
-class AzureTableStorageLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class AzureTableStorageLinkedService(LinkedService):
"""The azure table storage linked service.
All required parameters must be populated in order to send to server.
@@ -17070,7 +17161,7 @@ def __init__(
self.reference_name = reference_name
-class BinaryDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class BinaryDataset(Dataset):
"""Binary dataset.
All required parameters must be populated in order to send to server.
@@ -17604,7 +17695,7 @@ def __init__(
self.pipelines = pipelines
-class BlobEventsTrigger(MultiplePipelineTrigger): # pylint: disable=too-many-instance-attributes
+class BlobEventsTrigger(MultiplePipelineTrigger):
"""Trigger that runs every time a Blob event occurs.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -17718,7 +17809,7 @@ def __init__(
self.scope = scope
-class BlobSink(CopySink): # pylint: disable=too-many-instance-attributes
+class BlobSink(CopySink):
"""A copy activity Azure Blob sink.
All required parameters must be populated in order to send to server.
@@ -18050,7 +18141,7 @@ def __init__(
self.linked_service = linked_service
-class CassandraLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class CassandraLinkedService(LinkedService):
"""Linked service for Cassandra data source.
All required parameters must be populated in order to send to server.
@@ -18297,7 +18388,7 @@ def __init__(
self.consistency_level = consistency_level
-class CassandraTableDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class CassandraTableDataset(Dataset):
"""The Cassandra database dataset.
All required parameters must be populated in order to send to server.
@@ -18590,7 +18681,7 @@ def __init__(self, **kwargs: Any) -> None:
self.etag = None
-class ChangeDataCaptureResource(SubResource): # pylint: disable=too-many-instance-attributes
+class ChangeDataCaptureResource(SubResource):
"""Change data capture resource type.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -18928,7 +19019,7 @@ def __init__(
self.entity_name = entity_name
-class CommonDataServiceForAppsLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class CommonDataServiceForAppsLinkedService(LinkedService):
"""Common Data Service for Apps linked service.
All required parameters must be populated in order to send to server.
@@ -19147,7 +19238,7 @@ def __init__(
self.encrypted_credential = encrypted_credential
-class CommonDataServiceForAppsSink(CopySink): # pylint: disable=too-many-instance-attributes
+class CommonDataServiceForAppsSink(CopySink):
"""A copy activity Common Data Service for Apps sink.
All required parameters must be populated in order to send to server.
@@ -19443,7 +19534,7 @@ def __init__(self, *, additional_properties: Optional[Dict[str, JSON]] = None, *
self.type: Optional[str] = None
-class ConcurLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class ConcurLinkedService(LinkedService):
"""Concur Service linked service.
All required parameters must be populated in order to send to server.
@@ -19865,7 +19956,7 @@ def __init__(
self.customized_checkpoint_key = customized_checkpoint_key
-class CopyActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes
+class CopyActivity(ExecutionActivity):
"""Copy activity.
All required parameters must be populated in order to send to server.
@@ -20228,7 +20319,7 @@ def __init__(self, *, additional_properties: Optional[Dict[str, JSON]] = None, *
self.type: Optional[str] = None
-class CosmosDbLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class CosmosDbLinkedService(LinkedService):
"""Microsoft Azure Cosmos Database (CosmosDB) linked service.
All required parameters must be populated in order to send to server.
@@ -20731,7 +20822,7 @@ def __init__(
self.write_behavior = write_behavior
-class CosmosDbMongoDbApiSource(CopySource): # pylint: disable=too-many-instance-attributes
+class CosmosDbMongoDbApiSource(CopySource):
"""A copy activity source for a CosmosDB (MongoDB API) database.
All required parameters must be populated in order to send to server.
@@ -21062,7 +21153,7 @@ def __init__(
self.write_behavior = write_behavior
-class CosmosDbSqlApiSource(CopySource): # pylint: disable=too-many-instance-attributes
+class CosmosDbSqlApiSource(CopySource):
"""A copy activity Azure CosmosDB (SQL API) Collection source.
All required parameters must be populated in order to send to server.
@@ -21823,7 +21914,7 @@ def __init__(self, *, properties: "_models.Credential", **kwargs: Any) -> None:
self.properties = properties
-class CustomActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes
+class CustomActivity(ExecutionActivity):
"""Custom activity type.
All required parameters must be populated in order to send to server.
@@ -22304,7 +22395,7 @@ def __init__(
self.scope = scope
-class DatabricksNotebookActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes
+class DatabricksNotebookActivity(ExecutionActivity):
"""DatabricksNotebook activity.
All required parameters must be populated in order to send to server.
@@ -22435,7 +22526,7 @@ def __init__(
self.libraries = libraries
-class DatabricksSparkJarActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes
+class DatabricksSparkJarActivity(ExecutionActivity):
"""DatabricksSparkJar activity.
All required parameters must be populated in order to send to server.
@@ -22564,7 +22655,7 @@ def __init__(
self.libraries = libraries
-class DatabricksSparkPythonActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes
+class DatabricksSparkPythonActivity(ExecutionActivity):
"""DatabricksSparkPython activity.
All required parameters must be populated in order to send to server.
@@ -23563,7 +23654,7 @@ def __init__(
self.folder_path = folder_path
-class DataLakeAnalyticsUSQLActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes
+class DataLakeAnalyticsUSQLActivity(ExecutionActivity):
"""Data Lake Analytics U-SQL activity.
All required parameters must be populated in order to send to server.
@@ -24168,7 +24259,7 @@ def __init__(
self.encrypted_credential = encrypted_credential
-class Db2LinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class Db2LinkedService(LinkedService):
"""Linked service for DB2 data source.
All required parameters must be populated in order to send to server.
@@ -24427,7 +24518,7 @@ def __init__(
self.query = query
-class Db2TableDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class Db2TableDataset(Dataset):
"""The Db2 table dataset.
All required parameters must be populated in order to send to server.
@@ -24547,7 +24638,7 @@ def __init__(
self.table = table
-class DeleteActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes
+class DeleteActivity(ExecutionActivity):
"""Delete activity.
All required parameters must be populated in order to send to server.
@@ -24721,7 +24812,7 @@ def __init__(self, *, session_id: Optional[str] = None, **kwargs: Any) -> None:
self.session_id = session_id
-class DelimitedTextDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class DelimitedTextDataset(Dataset):
"""Delimited text dataset.
All required parameters must be populated in order to send to server.
@@ -25849,7 +25940,7 @@ def __init__(
self.query = query
-class DrillTableDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class DrillTableDataset(Dataset):
"""Drill server dataset.
All required parameters must be populated in order to send to server.
@@ -26042,7 +26133,7 @@ def __init__(
self.additional_options = additional_options
-class DynamicsAXLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class DynamicsAXLinkedService(LinkedService):
"""Dynamics AX linked service.
All required parameters must be populated in order to send to server.
@@ -26503,7 +26594,7 @@ def __init__(
self.entity_name = entity_name
-class DynamicsCrmLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class DynamicsCrmLinkedService(LinkedService):
"""Dynamics CRM linked service.
All required parameters must be populated in order to send to server.
@@ -26720,7 +26811,7 @@ def __init__(
self.encrypted_credential = encrypted_credential
-class DynamicsCrmSink(CopySink): # pylint: disable=too-many-instance-attributes
+class DynamicsCrmSink(CopySink):
"""A copy activity Dynamics CRM sink.
All required parameters must be populated in order to send to server.
@@ -27038,7 +27129,7 @@ def __init__(
self.entity_name = entity_name
-class DynamicsLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class DynamicsLinkedService(LinkedService):
"""Dynamics linked service.
All required parameters must be populated in order to send to server.
@@ -27254,7 +27345,7 @@ def __init__(
self.credential = credential
-class DynamicsSink(CopySink): # pylint: disable=too-many-instance-attributes
+class DynamicsSink(CopySink):
"""A copy activity Dynamics sink.
All required parameters must be populated in order to send to server.
@@ -27468,7 +27559,7 @@ def __init__(
self.additional_columns = additional_columns
-class EloquaLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class EloquaLinkedService(LinkedService):
"""Eloqua server linked service.
All required parameters must be populated in order to send to server.
@@ -27940,7 +28031,7 @@ def __init__(self, *, variable_name: str, variable_value: str, **kwargs: Any) ->
self.variable_value = variable_value
-class ExcelDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class ExcelDataset(Dataset):
"""Excel dataset.
All required parameters must be populated in order to send to server.
@@ -28186,7 +28277,7 @@ def __init__(
self.additional_columns = additional_columns
-class ExecuteDataFlowActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes
+class ExecuteDataFlowActivity(ExecutionActivity):
"""Execute data flow activity.
All required parameters must be populated in order to send to server.
@@ -28497,7 +28588,7 @@ def __init__(
self.core_count = core_count
-class ExecutePipelineActivity(ControlActivity): # pylint: disable=too-many-instance-attributes
+class ExecutePipelineActivity(ControlActivity):
"""Execute pipeline activity.
All required parameters must be populated in order to send to server.
@@ -28652,9 +28743,7 @@ def __init__(
self.secure_input = secure_input
-class ExecutePowerQueryActivityTypeProperties(
- ExecuteDataFlowActivityTypeProperties
-): # pylint: disable=too-many-instance-attributes
+class ExecutePowerQueryActivityTypeProperties(ExecuteDataFlowActivityTypeProperties):
"""Execute power query data flow activity properties.
All required parameters must be populated in order to send to server.
@@ -28769,7 +28858,7 @@ def __init__(
self.queries = queries
-class ExecuteSSISPackageActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes
+class ExecuteSSISPackageActivity(ExecutionActivity):
"""Execute SSIS package activity.
All required parameters must be populated in order to send to server.
@@ -28977,7 +29066,7 @@ def __init__(
self.log_location = log_location
-class ExecuteWranglingDataflowActivity(Activity): # pylint: disable=too-many-instance-attributes
+class ExecuteWranglingDataflowActivity(Activity):
"""Execute power query activity.
All required parameters must be populated in order to send to server.
@@ -29408,7 +29497,7 @@ def __init__(self, *, location: Optional[str] = None, tags: Optional[Dict[str, s
self.e_tag = None
-class Factory(Resource): # pylint: disable=too-many-instance-attributes
+class Factory(Resource):
"""Factory resource type.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -30079,7 +30168,7 @@ def __init__(
self.error_code = error_code
-class FileServerLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class FileServerLinkedService(LinkedService):
"""File system linked service.
All required parameters must be populated in order to send to server.
@@ -30242,7 +30331,7 @@ def __init__(
self.type: str = "FileServerLocation"
-class FileServerReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-attributes
+class FileServerReadSettings(StoreReadSettings):
"""File server read settings.
All required parameters must be populated in order to send to server.
@@ -30464,7 +30553,7 @@ def __init__(
self.type: str = "FileServerWriteSettings"
-class FileShareDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class FileShareDataset(Dataset):
"""An on-premises file system dataset.
All required parameters must be populated in order to send to server.
@@ -30998,7 +31087,7 @@ def __init__(
self.script_lines = script_lines
-class ForEachActivity(ControlActivity): # pylint: disable=too-many-instance-attributes
+class ForEachActivity(ControlActivity):
"""This activity is used for iterating over a collection and execute given activities.
All required parameters must be populated in order to send to server.
@@ -31119,7 +31208,7 @@ def __init__(
self.activities = activities
-class FtpReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-attributes
+class FtpReadSettings(StoreReadSettings):
"""Ftp read settings.
All required parameters must be populated in order to send to server.
@@ -31259,7 +31348,7 @@ def __init__(
self.disable_chunking = disable_chunking
-class FtpServerLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class FtpServerLinkedService(LinkedService):
"""A FTP server Linked Service.
All required parameters must be populated in order to send to server.
@@ -31493,7 +31582,7 @@ def __init__(
self.status = status
-class GetMetadataActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes
+class GetMetadataActivity(ExecutionActivity):
"""Activity to get metadata of dataset.
All required parameters must be populated in order to send to server.
@@ -31858,7 +31947,7 @@ def __init__(self, *, type: Union[str, "_models.GlobalParameterType"], value: JS
self.value = value
-class GoogleAdWordsLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class GoogleAdWordsLinkedService(LinkedService):
"""Google AdWords service linked service.
All required parameters must be populated in order to send to server.
@@ -32301,7 +32390,7 @@ def __init__(
self.query = query
-class GoogleBigQueryLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class GoogleBigQueryLinkedService(LinkedService):
"""Google BigQuery service linked service.
All required parameters must be populated in order to send to server.
@@ -32499,7 +32588,7 @@ def __init__(
self.encrypted_credential = encrypted_credential
-class GoogleBigQueryObjectDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class GoogleBigQueryObjectDataset(Dataset):
"""Google BigQuery service dataset.
All required parameters must be populated in order to send to server.
@@ -32723,7 +32812,7 @@ def __init__(
self.query = query
-class GoogleBigQueryV2LinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class GoogleBigQueryV2LinkedService(LinkedService):
"""Google BigQuery service linked service.
All required parameters must be populated in order to send to server.
@@ -32864,7 +32953,7 @@ def __init__(
self.encrypted_credential = encrypted_credential
-class GoogleBigQueryV2ObjectDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class GoogleBigQueryV2ObjectDataset(Dataset):
"""Google BigQuery service dataset.
All required parameters must be populated in order to send to server.
@@ -33079,7 +33168,7 @@ def __init__(
self.query = query
-class GoogleCloudStorageLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class GoogleCloudStorageLinkedService(LinkedService):
"""Linked service for Google Cloud Storage.
All required parameters must be populated in order to send to server.
@@ -33265,7 +33354,7 @@ def __init__(
self.version = version
-class GoogleCloudStorageReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-attributes
+class GoogleCloudStorageReadSettings(StoreReadSettings):
"""Google Cloud Storage read settings.
All required parameters must be populated in order to send to server.
@@ -33706,7 +33795,7 @@ def __init__(
self.query = query
-class GreenplumTableDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class GreenplumTableDataset(Dataset):
"""Greenplum Database dataset.
All required parameters must be populated in order to send to server.
@@ -33827,7 +33916,7 @@ def __init__(
self.schema_type_properties_schema = schema_type_properties_schema
-class HBaseLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class HBaseLinkedService(LinkedService):
"""HBase server linked service.
All required parameters must be populated in order to send to server.
@@ -34203,7 +34292,7 @@ def __init__(
self.query = query
-class HdfsLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class HdfsLinkedService(LinkedService):
"""Hadoop Distributed File System (HDFS) linked service.
All required parameters must be populated in order to send to server.
@@ -34375,7 +34464,7 @@ def __init__(
self.type: str = "HdfsLocation"
-class HdfsReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-attributes
+class HdfsReadSettings(StoreReadSettings):
"""HDFS read settings.
All required parameters must be populated in order to send to server.
@@ -34613,7 +34702,7 @@ def __init__(
self.distcp_settings = distcp_settings
-class HDInsightHiveActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes
+class HDInsightHiveActivity(ExecutionActivity):
"""HDInsight Hive activity type.
All required parameters must be populated in order to send to server.
@@ -34774,7 +34863,7 @@ def __init__(
self.query_timeout = query_timeout
-class HDInsightLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class HDInsightLinkedService(LinkedService):
"""HDInsight linked service.
All required parameters must be populated in order to send to server.
@@ -34920,7 +35009,7 @@ def __init__(
self.file_system = file_system
-class HDInsightMapReduceActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes
+class HDInsightMapReduceActivity(ExecutionActivity):
"""HDInsight MapReduce activity type.
All required parameters must be populated in order to send to server.
@@ -35082,7 +35171,7 @@ def __init__(
self.defines = defines
-class HDInsightOnDemandLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class HDInsightOnDemandLinkedService(LinkedService):
"""HDInsight ondemand linked service.
All required parameters must be populated in order to send to server.
@@ -35470,7 +35559,7 @@ def __init__( # pylint: disable=too-many-locals
self.credential = credential
-class HDInsightPigActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes
+class HDInsightPigActivity(ExecutionActivity):
"""HDInsight Pig activity type.
All required parameters must be populated in order to send to server.
@@ -35617,7 +35706,7 @@ def __init__(
self.defines = defines
-class HDInsightSparkActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes
+class HDInsightSparkActivity(ExecutionActivity):
"""HDInsight Spark activity.
All required parameters must be populated in order to send to server.
@@ -35786,7 +35875,7 @@ def __init__(
self.spark_config = spark_config
-class HDInsightStreamingActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes
+class HDInsightStreamingActivity(ExecutionActivity):
"""HDInsight streaming activity type.
All required parameters must be populated in order to send to server.
@@ -35984,7 +36073,7 @@ def __init__(
self.defines = defines
-class HiveLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class HiveLinkedService(LinkedService):
"""Hive Server linked service.
All required parameters must be populated in order to send to server.
@@ -36211,7 +36300,7 @@ def __init__( # pylint: disable=too-many-locals
self.encrypted_credential = encrypted_credential
-class HiveObjectDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class HiveObjectDataset(Dataset):
"""Hive Server dataset.
All required parameters must be populated in order to send to server.
@@ -36434,7 +36523,7 @@ def __init__(
self.query = query
-class HttpDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class HttpDataset(Dataset):
"""A file in an HTTP web server.
All required parameters must be populated in order to send to server.
@@ -36585,7 +36674,7 @@ def __init__(
self.compression = compression
-class HttpLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class HttpLinkedService(LinkedService):
"""Linked service for an HTTP source.
All required parameters must be populated in order to send to server.
@@ -37005,7 +37094,7 @@ def __init__(
self.http_request_timeout = http_request_timeout
-class HubspotLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class HubspotLinkedService(LinkedService):
"""Hubspot Service linked service.
All required parameters must be populated in order to send to server.
@@ -37354,7 +37443,249 @@ def __init__(
self.query = query
-class IfConditionActivity(ControlActivity): # pylint: disable=too-many-instance-attributes
+class IcebergDataset(Dataset):
+ """Iceberg dataset.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :vartype additional_properties: dict[str, JSON]
+ :ivar type: Type of dataset. Required.
+ :vartype type: str
+ :ivar description: Dataset description.
+ :vartype description: str
+ :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression
+ with resultType array), itemType: DatasetDataElement.
+ :vartype structure: JSON
+ :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or
+ Expression with resultType array), itemType: DatasetSchemaDataElement.
+ :vartype schema: JSON
+ :ivar linked_service_name: Linked service reference. Required.
+ :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference
+ :ivar parameters: Parameters for dataset.
+ :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification]
+ :ivar annotations: List of tags that can be used for describing the Dataset.
+ :vartype annotations: list[JSON]
+ :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the
+ root level.
+ :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder
+ :ivar location: The location of the iceberg storage. Setting a file name is not allowed for
+ iceberg format.
+ :vartype location: ~azure.mgmt.datafactory.models.DatasetLocation
+ """
+
+ _validation = {
+ "type": {"required": True},
+ "linked_service_name": {"required": True},
+ }
+
+ _attribute_map = {
+ "additional_properties": {"key": "", "type": "{object}"},
+ "type": {"key": "type", "type": "str"},
+ "description": {"key": "description", "type": "str"},
+ "structure": {"key": "structure", "type": "object"},
+ "schema": {"key": "schema", "type": "object"},
+ "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"},
+ "parameters": {"key": "parameters", "type": "{ParameterSpecification}"},
+ "annotations": {"key": "annotations", "type": "[object]"},
+ "folder": {"key": "folder", "type": "DatasetFolder"},
+ "location": {"key": "typeProperties.location", "type": "DatasetLocation"},
+ }
+
+ def __init__(
+ self,
+ *,
+ linked_service_name: "_models.LinkedServiceReference",
+ additional_properties: Optional[Dict[str, JSON]] = None,
+ description: Optional[str] = None,
+ structure: Optional[JSON] = None,
+ schema: Optional[JSON] = None,
+ parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None,
+ annotations: Optional[List[JSON]] = None,
+ folder: Optional["_models.DatasetFolder"] = None,
+ location: Optional["_models.DatasetLocation"] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :paramtype additional_properties: dict[str, JSON]
+ :keyword description: Dataset description.
+ :paramtype description: str
+ :keyword structure: Columns that define the structure of the dataset. Type: array (or
+ Expression with resultType array), itemType: DatasetDataElement.
+ :paramtype structure: JSON
+ :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or
+ Expression with resultType array), itemType: DatasetSchemaDataElement.
+ :paramtype schema: JSON
+ :keyword linked_service_name: Linked service reference. Required.
+ :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference
+ :keyword parameters: Parameters for dataset.
+ :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification]
+ :keyword annotations: List of tags that can be used for describing the Dataset.
+ :paramtype annotations: list[JSON]
+ :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at
+ the root level.
+ :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder
+ :keyword location: The location of the iceberg storage. Setting a file name is not allowed for
+ iceberg format.
+ :paramtype location: ~azure.mgmt.datafactory.models.DatasetLocation
+ """
+ super().__init__(
+ additional_properties=additional_properties,
+ description=description,
+ structure=structure,
+ schema=schema,
+ linked_service_name=linked_service_name,
+ parameters=parameters,
+ annotations=annotations,
+ folder=folder,
+ **kwargs
+ )
+ self.type: str = "Iceberg"
+ self.location = location
+
+
+class IcebergSink(CopySink):
+ """A copy activity Iceberg sink.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :vartype additional_properties: dict[str, JSON]
+ :ivar type: Copy sink type. Required.
+ :vartype type: str
+ :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType
+ integer), minimum: 0.
+ :vartype write_batch_size: JSON
+ :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType
+ string), pattern: ((\\d+).)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ :vartype write_batch_timeout: JSON
+ :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType
+ integer).
+ :vartype sink_retry_count: JSON
+ :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string),
+ pattern: ((\\d+).)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ :vartype sink_retry_wait: JSON
+ :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data
+ store. Type: integer (or Expression with resultType integer).
+ :vartype max_concurrent_connections: JSON
+ :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :vartype disable_metrics_collection: JSON
+ :ivar store_settings: Iceberg store settings.
+ :vartype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings
+ :ivar format_settings: Iceberg format settings.
+ :vartype format_settings: ~azure.mgmt.datafactory.models.IcebergWriteSettings
+ """
+
+ _validation = {
+ "type": {"required": True},
+ }
+
+ _attribute_map = {
+ "additional_properties": {"key": "", "type": "{object}"},
+ "type": {"key": "type", "type": "str"},
+ "write_batch_size": {"key": "writeBatchSize", "type": "object"},
+ "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"},
+ "sink_retry_count": {"key": "sinkRetryCount", "type": "object"},
+ "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"},
+ "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"},
+ "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"},
+ "store_settings": {"key": "storeSettings", "type": "StoreWriteSettings"},
+ "format_settings": {"key": "formatSettings", "type": "IcebergWriteSettings"},
+ }
+
+ def __init__(
+ self,
+ *,
+ additional_properties: Optional[Dict[str, JSON]] = None,
+ write_batch_size: Optional[JSON] = None,
+ write_batch_timeout: Optional[JSON] = None,
+ sink_retry_count: Optional[JSON] = None,
+ sink_retry_wait: Optional[JSON] = None,
+ max_concurrent_connections: Optional[JSON] = None,
+ disable_metrics_collection: Optional[JSON] = None,
+ store_settings: Optional["_models.StoreWriteSettings"] = None,
+ format_settings: Optional["_models.IcebergWriteSettings"] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :paramtype additional_properties: dict[str, JSON]
+ :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType
+ integer), minimum: 0.
+ :paramtype write_batch_size: JSON
+ :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType
+ string), pattern: ((\\d+).)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ :paramtype write_batch_timeout: JSON
+ :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType
+ integer).
+ :paramtype sink_retry_count: JSON
+ :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string),
+ pattern: ((\\d+).)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ :paramtype sink_retry_wait: JSON
+ :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data
+ store. Type: integer (or Expression with resultType integer).
+ :paramtype max_concurrent_connections: JSON
+ :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :paramtype disable_metrics_collection: JSON
+ :keyword store_settings: Iceberg store settings.
+ :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings
+ :keyword format_settings: Iceberg format settings.
+ :paramtype format_settings: ~azure.mgmt.datafactory.models.IcebergWriteSettings
+ """
+ super().__init__(
+ additional_properties=additional_properties,
+ write_batch_size=write_batch_size,
+ write_batch_timeout=write_batch_timeout,
+ sink_retry_count=sink_retry_count,
+ sink_retry_wait=sink_retry_wait,
+ max_concurrent_connections=max_concurrent_connections,
+ disable_metrics_collection=disable_metrics_collection,
+ **kwargs
+ )
+ self.type: str = "IcebergSink"
+ self.store_settings = store_settings
+ self.format_settings = format_settings
+
+
+class IcebergWriteSettings(FormatWriteSettings):
+ """Iceberg write settings.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :vartype additional_properties: dict[str, JSON]
+ :ivar type: The write setting type. Required.
+ :vartype type: str
+ """
+
+ _validation = {
+ "type": {"required": True},
+ }
+
+ _attribute_map = {
+ "additional_properties": {"key": "", "type": "{object}"},
+ "type": {"key": "type", "type": "str"},
+ }
+
+ def __init__(self, *, additional_properties: Optional[Dict[str, JSON]] = None, **kwargs: Any) -> None:
+ """
+ :keyword additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :paramtype additional_properties: dict[str, JSON]
+ """
+ super().__init__(additional_properties=additional_properties, **kwargs)
+ self.type: str = "IcebergWriteSettings"
+
+
+class IfConditionActivity(ControlActivity):
"""This activity evaluates a boolean expression and executes either the activities under the
ifTrueActivities property or the ifFalseActivities property depending on the result of the
expression.
@@ -37474,7 +37805,7 @@ def __init__(
self.if_false_activities = if_false_activities
-class ImpalaLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class ImpalaLinkedService(LinkedService):
"""Impala server linked service.
All required parameters must be populated in order to send to server.
@@ -37648,7 +37979,7 @@ def __init__(
self.encrypted_credential = encrypted_credential
-class ImpalaObjectDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class ImpalaObjectDataset(Dataset):
"""Impala server dataset.
All required parameters must be populated in order to send to server.
@@ -37871,7 +38202,7 @@ def __init__(
self.query = query
-class InformixLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class InformixLinkedService(LinkedService):
"""Informix linked service.
All required parameters must be populated in order to send to server.
@@ -39531,7 +39862,7 @@ def __init__(
self.subnet_id = subnet_id
-class JiraLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class JiraLinkedService(LinkedService):
"""Jira Service linked service.
All required parameters must be populated in order to send to server.
@@ -39882,7 +40213,7 @@ def __init__(
self.query = query
-class JsonDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class JsonDataset(Dataset):
"""Json dataset.
All required parameters must be populated in order to send to server.
@@ -40406,7 +40737,7 @@ def __init__(
self.file_pattern = file_pattern
-class LakeHouseLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class LakeHouseLinkedService(LinkedService):
"""Microsoft Fabric LakeHouse linked service.
All required parameters must be populated in order to send to server.
@@ -40614,7 +40945,7 @@ def __init__(
self.type: str = "LakeHouseLocation"
-class LakeHouseReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-attributes
+class LakeHouseReadSettings(StoreReadSettings):
"""Microsoft Fabric LakeHouse Files read settings.
All required parameters must be populated in order to send to server.
@@ -40754,7 +41085,7 @@ def __init__(
self.modified_datetime_end = modified_datetime_end
-class LakeHouseTableDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class LakeHouseTableDataset(Dataset):
"""Microsoft Fabric LakeHouse Table.
All required parameters must be populated in order to send to server.
@@ -40867,7 +41198,7 @@ def __init__(
self.table = table
-class LakeHouseTableSink(CopySink): # pylint: disable=too-many-instance-attributes
+class LakeHouseTableSink(CopySink):
"""A copy activity for Microsoft Fabric LakeHouse Table sink.
All required parameters must be populated in order to send to server.
@@ -41646,7 +41977,7 @@ def __init__(
self.enable_reliable_logging = enable_reliable_logging
-class LookupActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes
+class LookupActivity(ExecutionActivity):
"""Lookup activity.
All required parameters must be populated in order to send to server.
@@ -41773,7 +42104,7 @@ def __init__(
self.first_row_only = first_row_only
-class MagentoLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class MagentoLinkedService(LinkedService):
"""Magento server linked service.
All required parameters must be populated in order to send to server.
@@ -43325,7 +43656,7 @@ def __init__(
self.script_lines = script_lines
-class MariaDBLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class MariaDBLinkedService(LinkedService):
"""MariaDB server linked service.
All required parameters must be populated in order to send to server.
@@ -43347,7 +43678,7 @@ class MariaDBLinkedService(LinkedService): # pylint: disable=too-many-instance-
:vartype annotations: list[JSON]
:ivar driver_version: The version of the MariaDB driver. Type: string. V1 or empty for legacy
driver, V2 for new driver. V1 can support connection string and property bag, V2 can only
- support connection string.
+ support connection string. The legacy driver is scheduled for deprecation by October 2024.
:vartype driver_version: JSON
:ivar connection_string: An ODBC connection string. Type: string, SecureString or
AzureKeyVaultSecretReference.
@@ -43360,6 +43691,15 @@ class MariaDBLinkedService(LinkedService): # pylint: disable=too-many-instance-
:vartype username: JSON
:ivar database: Database name for connection. Type: string.
:vartype database: JSON
+ :ivar ssl_mode: This option specifies whether the driver uses TLS encryption and verification
+ when connecting to MariaDB. E.g., SSLMode=<0/1/2/3/4>. Options: DISABLED (0) / PREFERRED (1)
+ (Default) / REQUIRED (2) / VERIFY_CA (3) / VERIFY_IDENTITY (4), REQUIRED (2) is recommended to
+ only allow connections encrypted with SSL/TLS.
+ :vartype ssl_mode: JSON
+ :ivar use_system_trust_store: This option specifies whether to use a CA certificate from the
+ system trust store, or from a specified PEM file. E.g. UseSystemTrustStore=<0/1>; Options:
+ Enabled (1) / Disabled (0) (Default).
+ :vartype use_system_trust_store: JSON
:ivar password: The Azure key vault secret reference of password in connection string.
:vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference
:ivar encrypted_credential: The encrypted credential used for authentication. Credentials are
@@ -43385,6 +43725,8 @@ class MariaDBLinkedService(LinkedService): # pylint: disable=too-many-instance-
"port": {"key": "typeProperties.port", "type": "object"},
"username": {"key": "typeProperties.username", "type": "object"},
"database": {"key": "typeProperties.database", "type": "object"},
+ "ssl_mode": {"key": "typeProperties.sslMode", "type": "object"},
+ "use_system_trust_store": {"key": "typeProperties.useSystemTrustStore", "type": "object"},
"password": {"key": "typeProperties.password", "type": "AzureKeyVaultSecretReference"},
"encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"},
}
@@ -43404,6 +43746,8 @@ def __init__(
port: Optional[JSON] = None,
username: Optional[JSON] = None,
database: Optional[JSON] = None,
+ ssl_mode: Optional[JSON] = None,
+ use_system_trust_store: Optional[JSON] = None,
password: Optional["_models.AzureKeyVaultSecretReference"] = None,
encrypted_credential: Optional[str] = None,
**kwargs: Any
@@ -43424,7 +43768,7 @@ def __init__(
:paramtype annotations: list[JSON]
:keyword driver_version: The version of the MariaDB driver. Type: string. V1 or empty for
legacy driver, V2 for new driver. V1 can support connection string and property bag, V2 can
- only support connection string.
+ only support connection string. The legacy driver is scheduled for deprecation by October 2024.
:paramtype driver_version: JSON
:keyword connection_string: An ODBC connection string. Type: string, SecureString or
AzureKeyVaultSecretReference.
@@ -43437,6 +43781,15 @@ def __init__(
:paramtype username: JSON
:keyword database: Database name for connection. Type: string.
:paramtype database: JSON
+ :keyword ssl_mode: This option specifies whether the driver uses TLS encryption and
+ verification when connecting to MariaDB. E.g., SSLMode=<0/1/2/3/4>. Options: DISABLED (0) /
+ PREFERRED (1) (Default) / REQUIRED (2) / VERIFY_CA (3) / VERIFY_IDENTITY (4), REQUIRED (2) is
+ recommended to only allow connections encrypted with SSL/TLS.
+ :paramtype ssl_mode: JSON
+ :keyword use_system_trust_store: This option specifies whether to use a CA certificate from the
+ system trust store, or from a specified PEM file. E.g. UseSystemTrustStore=<0/1>; Options:
+ Enabled (1) / Disabled (0) (Default).
+ :paramtype use_system_trust_store: JSON
:keyword password: The Azure key vault secret reference of password in connection string.
:paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference
:keyword encrypted_credential: The encrypted credential used for authentication. Credentials
@@ -43459,6 +43812,8 @@ def __init__(
self.port = port
self.username = username
self.database = database
+ self.ssl_mode = ssl_mode
+ self.use_system_trust_store = use_system_trust_store
self.password = password
self.encrypted_credential = encrypted_credential
@@ -43667,7 +44022,7 @@ def __init__(
self.table_name = table_name
-class MarketoLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class MarketoLinkedService(LinkedService):
"""Marketo server linked service.
All required parameters must be populated in order to send to server.
@@ -44033,7 +44388,7 @@ def __init__(self, *, name: Optional[JSON] = None, value: Optional[JSON] = None,
self.value = value
-class MicrosoftAccessLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class MicrosoftAccessLinkedService(LinkedService):
"""Microsoft Access linked service.
All required parameters must be populated in order to send to server.
@@ -44775,7 +45130,7 @@ def __init__(
self.write_behavior = write_behavior
-class MongoDbAtlasSource(CopySource): # pylint: disable=too-many-instance-attributes
+class MongoDbAtlasSource(CopySource):
"""A copy activity source for a MongoDB Atlas database.
All required parameters must be populated in order to send to server.
@@ -45073,7 +45428,7 @@ def __init__(
self.limit = limit
-class MongoDbLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class MongoDbLinkedService(LinkedService):
"""Linked service for MongoDb data source.
All required parameters must be populated in order to send to server.
@@ -45637,7 +45992,7 @@ def __init__(
self.write_behavior = write_behavior
-class MongoDbV2Source(CopySource): # pylint: disable=too-many-instance-attributes
+class MongoDbV2Source(CopySource):
"""A copy activity source for a MongoDB database.
All required parameters must be populated in order to send to server.
@@ -45761,7 +46116,7 @@ def __init__(
self.additional_columns = additional_columns
-class MySqlLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class MySqlLinkedService(LinkedService):
"""Linked service for MySQL data source.
All required parameters must be populated in order to send to server.
@@ -45807,6 +46162,28 @@ class MySqlLinkedService(LinkedService): # pylint: disable=too-many-instance-at
:ivar encrypted_credential: The encrypted credential used for authentication. Credentials are
encrypted using the integration runtime credential manager. Type: string.
:vartype encrypted_credential: str
+ :ivar allow_zero_date_time: This allows the special “zero” date value 0000-00-00 to be
+ retrieved from the database. Type: boolean.
+ :vartype allow_zero_date_time: JSON
+ :ivar connection_timeout: The length of time (in seconds) to wait for a connection to the
+ server before terminating the attempt and generating an error. Type: integer.
+ :vartype connection_timeout: JSON
+ :ivar convert_zero_date_time: True to return DateTime.MinValue for date or datetime columns
+ that have disallowed values. Type: boolean.
+ :vartype convert_zero_date_time: JSON
+ :ivar guid_format: Determines which column type (if any) should be read as a GUID. Type:
+ string. None: No column types are automatically read as a Guid; Char36: All CHAR(36) columns
+ are read/written as a Guid using lowercase hex with hyphens, which matches UUID.
+ :vartype guid_format: JSON
+ :ivar ssl_cert: The path to the client’s SSL certificate file in PEM format. SslKey must also
+ be specified. Type: string.
+ :vartype ssl_cert: JSON
+ :ivar ssl_key: The path to the client’s SSL private key in PEM format. SslCert must also be
+ specified. Type: string.
+ :vartype ssl_key: JSON
+ :ivar treat_tiny_as_boolean: When set to true, TINYINT(1) values are returned as booleans.
+ Type: bool.
+ :vartype treat_tiny_as_boolean: JSON
"""
_validation = {
@@ -45831,9 +46208,16 @@ class MySqlLinkedService(LinkedService): # pylint: disable=too-many-instance-at
"use_system_trust_store": {"key": "typeProperties.useSystemTrustStore", "type": "object"},
"password": {"key": "typeProperties.password", "type": "AzureKeyVaultSecretReference"},
"encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"},
+ "allow_zero_date_time": {"key": "typeProperties.allowZeroDateTime", "type": "object"},
+ "connection_timeout": {"key": "typeProperties.connectionTimeout", "type": "object"},
+ "convert_zero_date_time": {"key": "typeProperties.convertZeroDateTime", "type": "object"},
+ "guid_format": {"key": "typeProperties.guidFormat", "type": "object"},
+ "ssl_cert": {"key": "typeProperties.sslCert", "type": "object"},
+ "ssl_key": {"key": "typeProperties.sslKey", "type": "object"},
+ "treat_tiny_as_boolean": {"key": "typeProperties.treatTinyAsBoolean", "type": "object"},
}
- def __init__(
+ def __init__( # pylint: disable=too-many-locals
self,
*,
additional_properties: Optional[Dict[str, JSON]] = None,
@@ -45852,6 +46236,13 @@ def __init__(
use_system_trust_store: Optional[JSON] = None,
password: Optional["_models.AzureKeyVaultSecretReference"] = None,
encrypted_credential: Optional[str] = None,
+ allow_zero_date_time: Optional[JSON] = None,
+ connection_timeout: Optional[JSON] = None,
+ convert_zero_date_time: Optional[JSON] = None,
+ guid_format: Optional[JSON] = None,
+ ssl_cert: Optional[JSON] = None,
+ ssl_key: Optional[JSON] = None,
+ treat_tiny_as_boolean: Optional[JSON] = None,
**kwargs: Any
) -> None:
"""
@@ -45894,6 +46285,28 @@ def __init__(
:keyword encrypted_credential: The encrypted credential used for authentication. Credentials
are encrypted using the integration runtime credential manager. Type: string.
:paramtype encrypted_credential: str
+ :keyword allow_zero_date_time: This allows the special “zero” date value 0000-00-00 to be
+ retrieved from the database. Type: boolean.
+ :paramtype allow_zero_date_time: JSON
+ :keyword connection_timeout: The length of time (in seconds) to wait for a connection to the
+ server before terminating the attempt and generating an error. Type: integer.
+ :paramtype connection_timeout: JSON
+ :keyword convert_zero_date_time: True to return DateTime.MinValue for date or datetime columns
+ that have disallowed values. Type: boolean.
+ :paramtype convert_zero_date_time: JSON
+ :keyword guid_format: Determines which column type (if any) should be read as a GUID. Type:
+ string. None: No column types are automatically read as a Guid; Char36: All CHAR(36) columns
+ are read/written as a Guid using lowercase hex with hyphens, which matches UUID.
+ :paramtype guid_format: JSON
+ :keyword ssl_cert: The path to the client’s SSL certificate file in PEM format. SslKey must
+ also be specified. Type: string.
+ :paramtype ssl_cert: JSON
+ :keyword ssl_key: The path to the client’s SSL private key in PEM format. SslCert must also be
+ specified. Type: string.
+ :paramtype ssl_key: JSON
+ :keyword treat_tiny_as_boolean: When set to true, TINYINT(1) values are returned as booleans.
+ Type: bool.
+ :paramtype treat_tiny_as_boolean: JSON
"""
super().__init__(
additional_properties=additional_properties,
@@ -45915,6 +46328,13 @@ def __init__(
self.use_system_trust_store = use_system_trust_store
self.password = password
self.encrypted_credential = encrypted_credential
+ self.allow_zero_date_time = allow_zero_date_time
+ self.connection_timeout = connection_timeout
+ self.convert_zero_date_time = convert_zero_date_time
+ self.guid_format = guid_format
+ self.ssl_cert = ssl_cert
+ self.ssl_key = ssl_key
+ self.treat_tiny_as_boolean = treat_tiny_as_boolean
class MySqlSource(TabularSource):
@@ -46267,7 +46687,7 @@ def __init__(
self.partition_lower_bound = partition_lower_bound
-class NetezzaSource(TabularSource): # pylint: disable=too-many-instance-attributes
+class NetezzaSource(TabularSource):
"""A copy activity Netezza source.
All required parameters must be populated in order to send to server.
@@ -46386,7 +46806,7 @@ def __init__(
self.partition_settings = partition_settings
-class NetezzaTableDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class NetezzaTableDataset(Dataset):
"""Netezza dataset.
All required parameters must be populated in order to send to server.
@@ -46542,7 +46962,7 @@ def __init__(
self.type = type
-class ODataLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class ODataLinkedService(LinkedService):
"""Open Data Protocol (OData) linked service.
All required parameters must be populated in order to send to server.
@@ -46967,7 +47387,7 @@ def __init__(
self.additional_columns = additional_columns
-class OdbcLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class OdbcLinkedService(LinkedService):
"""Open Database Connectivity (ODBC) linked service.
All required parameters must be populated in order to send to server.
@@ -47400,7 +47820,7 @@ def __init__(
self.table_name = table_name
-class Office365Dataset(Dataset): # pylint: disable=too-many-instance-attributes
+class Office365Dataset(Dataset):
"""The Office365 account.
All required parameters must be populated in order to send to server.
@@ -47514,7 +47934,7 @@ def __init__(
self.predicate = predicate
-class Office365LinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class Office365LinkedService(LinkedService):
"""Office365 linked service.
All required parameters must be populated in order to send to server.
@@ -47635,7 +48055,7 @@ def __init__(
self.encrypted_credential = encrypted_credential
-class Office365Source(CopySource): # pylint: disable=too-many-instance-attributes
+class Office365Source(CopySource):
"""A copy activity source for an Office 365 service.
All required parameters must be populated in order to send to server.
@@ -48114,7 +48534,7 @@ def __init__(
self.metric_specifications = metric_specifications
-class OracleCloudStorageLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class OracleCloudStorageLinkedService(LinkedService):
"""Linked service for Oracle Cloud Storage.
All required parameters must be populated in order to send to server.
@@ -48300,7 +48720,7 @@ def __init__(
self.version = version
-class OracleCloudStorageReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-attributes
+class OracleCloudStorageReadSettings(StoreReadSettings):
"""Oracle Cloud Storage read settings.
All required parameters must be populated in order to send to server.
@@ -48605,7 +49025,7 @@ def __init__(
self.partition_lower_bound = partition_lower_bound
-class OracleServiceCloudLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class OracleServiceCloudLinkedService(LinkedService):
"""Oracle Service Cloud linked service.
All required parameters must be populated in order to send to server.
@@ -49051,7 +49471,7 @@ def __init__(
self.pre_copy_script = pre_copy_script
-class OracleSource(CopySource): # pylint: disable=too-many-instance-attributes
+class OracleSource(CopySource):
"""A copy activity Oracle source.
All required parameters must be populated in order to send to server.
@@ -49170,7 +49590,7 @@ def __init__(
self.additional_columns = additional_columns
-class OracleTableDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class OracleTableDataset(Dataset):
"""The on-premises Oracle database dataset.
All required parameters must be populated in order to send to server.
@@ -49292,7 +49712,7 @@ def __init__(
self.table = table
-class OrcDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class OrcDataset(Dataset):
"""ORC dataset.
All required parameters must be populated in order to send to server.
@@ -49796,7 +50216,7 @@ def __init__(
self.default_value = default_value
-class ParquetDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class ParquetDataset(Dataset):
"""Parquet dataset.
All required parameters must be populated in order to send to server.
@@ -50261,7 +50681,7 @@ def __init__(
self.file_name_prefix = file_name_prefix
-class PaypalLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class PaypalLinkedService(LinkedService):
"""Paypal Service linked service.
All required parameters must be populated in order to send to server.
@@ -50600,7 +51020,7 @@ def __init__(
self.query = query
-class PhoenixLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class PhoenixLinkedService(LinkedService):
"""Phoenix server linked service.
All required parameters must be populated in order to send to server.
@@ -50786,7 +51206,7 @@ def __init__(
self.encrypted_credential = encrypted_credential
-class PhoenixObjectDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class PhoenixObjectDataset(Dataset):
"""Phoenix server dataset.
All required parameters must be populated in order to send to server.
@@ -51212,7 +51632,7 @@ def __init__(
self.name = name
-class PipelineResource(SubResource): # pylint: disable=too-many-instance-attributes
+class PipelineResource(SubResource):
"""Pipeline resource type.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -51326,7 +51746,7 @@ def __init__(
self.policy = policy
-class PipelineRun(_serialization.Model): # pylint: disable=too-many-instance-attributes
+class PipelineRun(_serialization.Model):
"""Information about a pipeline run.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -51764,7 +52184,7 @@ def __init__(
self.query = query
-class PostgreSqlTableDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class PostgreSqlTableDataset(Dataset):
"""The PostgreSQL table dataset.
All required parameters must be populated in order to send to server.
@@ -51884,7 +52304,7 @@ def __init__(
self.schema_type_properties_schema = schema_type_properties_schema
-class PostgreSqlV2LinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class PostgreSqlV2LinkedService(LinkedService):
"""Linked service for PostgreSQLV2 data source.
All required parameters must be populated in order to send to server.
@@ -51912,6 +52332,8 @@ class PostgreSqlV2LinkedService(LinkedService): # pylint: disable=too-many-inst
:vartype username: JSON
:ivar database: Database name for connection. Type: string. Required.
:vartype database: JSON
+ :ivar authentication_type: The authentication type to use. Type: string. Required.
+ :vartype authentication_type: JSON
:ivar ssl_mode: SSL mode for connection. Type: integer. 0: disable, 1:allow, 2: prefer, 3:
require, 4: verify-ca, 5: verify-full. Type: integer. Required.
:vartype ssl_mode: JSON
@@ -51960,6 +52382,7 @@ class PostgreSqlV2LinkedService(LinkedService): # pylint: disable=too-many-inst
"server": {"required": True},
"username": {"required": True},
"database": {"required": True},
+ "authentication_type": {"required": True},
"ssl_mode": {"required": True},
}
@@ -51975,6 +52398,7 @@ class PostgreSqlV2LinkedService(LinkedService): # pylint: disable=too-many-inst
"port": {"key": "typeProperties.port", "type": "object"},
"username": {"key": "typeProperties.username", "type": "object"},
"database": {"key": "typeProperties.database", "type": "object"},
+ "authentication_type": {"key": "typeProperties.authenticationType", "type": "object"},
"ssl_mode": {"key": "typeProperties.sslMode", "type": "object"},
"schema": {"key": "typeProperties.schema", "type": "object"},
"pooling": {"key": "typeProperties.pooling", "type": "object"},
@@ -51998,6 +52422,7 @@ def __init__( # pylint: disable=too-many-locals
server: JSON,
username: JSON,
database: JSON,
+ authentication_type: JSON,
ssl_mode: JSON,
additional_properties: Optional[Dict[str, JSON]] = None,
version: Optional[str] = None,
@@ -52044,6 +52469,8 @@ def __init__( # pylint: disable=too-many-locals
:paramtype username: JSON
:keyword database: Database name for connection. Type: string. Required.
:paramtype database: JSON
+ :keyword authentication_type: The authentication type to use. Type: string. Required.
+ :paramtype authentication_type: JSON
:keyword ssl_mode: SSL mode for connection. Type: integer. 0: disable, 1:allow, 2: prefer, 3:
require, 4: verify-ca, 5: verify-full. Type: integer. Required.
:paramtype ssl_mode: JSON
@@ -52102,6 +52529,7 @@ def __init__( # pylint: disable=too-many-locals
self.port = port
self.username = username
self.database = database
+ self.authentication_type = authentication_type
self.ssl_mode = ssl_mode
self.schema = schema
self.pooling = pooling
@@ -52219,7 +52647,7 @@ def __init__(
self.query = query
-class PostgreSqlV2TableDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class PostgreSqlV2TableDataset(Dataset):
"""The PostgreSQLV2 table dataset.
All required parameters must be populated in order to send to server.
@@ -52519,7 +52947,7 @@ def __init__(
self.script = script
-class PrestoLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class PrestoLinkedService(LinkedService):
"""Presto server linked service.
All required parameters must be populated in order to send to server.
@@ -52716,7 +53144,7 @@ def __init__(
self.encrypted_credential = encrypted_credential
-class PrestoObjectDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class PrestoObjectDataset(Dataset):
"""Presto server dataset.
All required parameters must be populated in order to send to server.
@@ -53412,7 +53840,7 @@ def __init__(
self.encrypted_credential = encrypted_credential
-class QuickBooksLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class QuickBooksLinkedService(LinkedService):
"""QuickBooks server linked service.
All required parameters must be populated in order to send to server.
@@ -54301,7 +54729,7 @@ def __init__(
self.rerun_concurrency = rerun_concurrency
-class ResponsysLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class ResponsysLinkedService(LinkedService):
"""Responsys linked service.
All required parameters must be populated in order to send to server.
@@ -54646,7 +55074,7 @@ def __init__(
self.query = query
-class RestResourceDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class RestResourceDataset(Dataset):
"""A Rest service dataset.
All required parameters must be populated in order to send to server.
@@ -54782,7 +55210,7 @@ def __init__(
self.pagination_rules = pagination_rules
-class RestServiceLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class RestServiceLinkedService(LinkedService):
"""Rest Service linked service.
All required parameters must be populated in order to send to server.
@@ -55063,7 +55491,7 @@ def __init__( # pylint: disable=too-many-locals
self.service_principal_embedded_cert_password = service_principal_embedded_cert_password
-class RestSink(CopySink): # pylint: disable=too-many-instance-attributes
+class RestSink(CopySink):
"""A copy activity Rest service Sink.
All required parameters must be populated in order to send to server.
@@ -55205,7 +55633,7 @@ def __init__(
self.http_compression_type = http_compression_type
-class RestSource(CopySource): # pylint: disable=too-many-instance-attributes
+class RestSource(CopySource):
"""A copy activity Rest service source.
All required parameters must be populated in order to send to server.
@@ -55553,7 +55981,7 @@ def __init__(
self.order = order
-class SalesforceLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class SalesforceLinkedService(LinkedService):
"""Linked service for Salesforce.
All required parameters must be populated in order to send to server.
@@ -55681,7 +56109,7 @@ def __init__(
self.encrypted_credential = encrypted_credential
-class SalesforceMarketingCloudLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class SalesforceMarketingCloudLinkedService(LinkedService):
"""Salesforce Marketing Cloud linked service.
All required parameters must be populated in order to send to server.
@@ -56130,7 +56558,7 @@ def __init__(
self.object_api_name = object_api_name
-class SalesforceServiceCloudLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class SalesforceServiceCloudLinkedService(LinkedService):
"""Linked service for Salesforce Service Cloud.
All required parameters must be populated in order to send to server.
@@ -56371,7 +56799,7 @@ def __init__(
self.object_api_name = object_api_name
-class SalesforceServiceCloudSink(CopySink): # pylint: disable=too-many-instance-attributes
+class SalesforceServiceCloudSink(CopySink):
"""A copy activity Salesforce Service Cloud sink.
All required parameters must be populated in order to send to server.
@@ -56600,7 +57028,7 @@ def __init__(
self.additional_columns = additional_columns
-class SalesforceServiceCloudV2LinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class SalesforceServiceCloudV2LinkedService(LinkedService):
"""Linked service for Salesforce Service Cloud V2.
All required parameters must be populated in order to send to server.
@@ -56730,7 +57158,7 @@ def __init__(
self.encrypted_credential = encrypted_credential
-class SalesforceServiceCloudV2ObjectDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class SalesforceServiceCloudV2ObjectDataset(Dataset):
"""The Salesforce Service Cloud V2 object dataset.
All required parameters must be populated in order to send to server.
@@ -56843,7 +57271,7 @@ def __init__(
self.report_id = report_id
-class SalesforceServiceCloudV2Sink(CopySink): # pylint: disable=too-many-instance-attributes
+class SalesforceServiceCloudV2Sink(CopySink):
"""A copy activity Salesforce Service Cloud V2 sink.
All required parameters must be populated in order to send to server.
@@ -57091,7 +57519,7 @@ def __init__(
self.additional_columns = additional_columns
-class SalesforceSink(CopySink): # pylint: disable=too-many-instance-attributes
+class SalesforceSink(CopySink):
"""A copy activity Salesforce sink.
All required parameters must be populated in order to send to server.
@@ -57329,7 +57757,7 @@ def __init__(
self.read_behavior = read_behavior
-class SalesforceV2LinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class SalesforceV2LinkedService(LinkedService):
"""Linked service for Salesforce V2.
All required parameters must be populated in order to send to server.
@@ -57459,7 +57887,7 @@ def __init__(
self.encrypted_credential = encrypted_credential
-class SalesforceV2ObjectDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class SalesforceV2ObjectDataset(Dataset):
"""The Salesforce V2 object dataset.
All required parameters must be populated in order to send to server.
@@ -57572,7 +58000,7 @@ def __init__(
self.report_id = report_id
-class SalesforceV2Sink(CopySink): # pylint: disable=too-many-instance-attributes
+class SalesforceV2Sink(CopySink):
"""A copy activity Salesforce V2 sink.
All required parameters must be populated in order to send to server.
@@ -57701,7 +58129,7 @@ def __init__(
self.ignore_null_values = ignore_null_values
-class SalesforceV2Source(TabularSource): # pylint: disable=too-many-instance-attributes
+class SalesforceV2Source(TabularSource):
"""A copy activity Salesforce V2 source.
All required parameters must be populated in order to send to server.
@@ -57742,6 +58170,9 @@ class SalesforceV2Source(TabularSource): # pylint: disable=too-many-instance-at
:ivar include_deleted_objects: This property control whether query result contains Deleted
objects. Default is false. Type: boolean (or Expression with resultType boolean).
:vartype include_deleted_objects: JSON
+ :ivar page_size: Page size for each http request, too large pageSize will caused timeout,
+ default 300,000. Type: integer (or Expression with resultType integer).
+ :vartype page_size: JSON
"""
_validation = {
@@ -57760,6 +58191,7 @@ class SalesforceV2Source(TabularSource): # pylint: disable=too-many-instance-at
"soql_query": {"key": "SOQLQuery", "type": "object"},
"query": {"key": "query", "type": "object"},
"include_deleted_objects": {"key": "includeDeletedObjects", "type": "object"},
+ "page_size": {"key": "pageSize", "type": "object"},
}
def __init__(
@@ -57775,6 +58207,7 @@ def __init__(
soql_query: Optional[JSON] = None,
query: Optional[JSON] = None,
include_deleted_objects: Optional[JSON] = None,
+ page_size: Optional[JSON] = None,
**kwargs: Any
) -> None:
"""
@@ -57812,6 +58245,9 @@ def __init__(
:keyword include_deleted_objects: This property control whether query result contains Deleted
objects. Default is false. Type: boolean (or Expression with resultType boolean).
:paramtype include_deleted_objects: JSON
+ :keyword page_size: Page size for each http request, too large pageSize will caused timeout,
+ default 300,000. Type: integer (or Expression with resultType integer).
+ :paramtype page_size: JSON
"""
super().__init__(
additional_properties=additional_properties,
@@ -57827,6 +58263,7 @@ def __init__(
self.soql_query = soql_query
self.query = query
self.include_deleted_objects = include_deleted_objects
+ self.page_size = page_size
class SapBwCubeDataset(Dataset):
@@ -57924,7 +58361,7 @@ def __init__(
self.type: str = "SapBwCube"
-class SapBWLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class SapBWLinkedService(LinkedService):
"""SAP Business Warehouse Linked Service.
All required parameters must be populated in order to send to server.
@@ -58153,7 +58590,7 @@ def __init__(
self.query = query
-class SapCloudForCustomerLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class SapCloudForCustomerLinkedService(LinkedService):
"""Linked service for SAP Cloud for Customer.
All required parameters must be populated in order to send to server.
@@ -58603,7 +59040,7 @@ def __init__(
self.http_request_timeout = http_request_timeout
-class SapEccLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class SapEccLinkedService(LinkedService):
"""Linked service for SAP ERP Central Component(SAP ECC).
All required parameters must be populated in order to send to server.
@@ -58936,7 +59373,7 @@ def __init__(
self.http_request_timeout = http_request_timeout
-class SapHanaLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class SapHanaLinkedService(LinkedService):
"""SAP HANA Linked Service.
All required parameters must be populated in order to send to server.
@@ -59084,7 +59521,7 @@ def __init__(self, *, partition_column_name: Optional[JSON] = None, **kwargs: An
self.partition_column_name = partition_column_name
-class SapHanaSource(TabularSource): # pylint: disable=too-many-instance-attributes
+class SapHanaSource(TabularSource):
"""A copy activity source for SAP HANA source.
All required parameters must be populated in order to send to server.
@@ -59210,7 +59647,7 @@ def __init__(
self.partition_settings = partition_settings
-class SapHanaTableDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class SapHanaTableDataset(Dataset):
"""SAP HANA Table properties.
All required parameters must be populated in order to send to server.
@@ -59322,7 +59759,7 @@ def __init__(
self.table = table
-class SapOdpLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class SapOdpLinkedService(LinkedService):
"""SAP ODP Linked Service.
All required parameters must be populated in order to send to server.
@@ -59560,7 +59997,7 @@ def __init__( # pylint: disable=too-many-locals
self.encrypted_credential = encrypted_credential
-class SapOdpResourceDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class SapOdpResourceDataset(Dataset):
"""SAP ODP Resource properties.
All required parameters must be populated in order to send to server.
@@ -59675,7 +60112,7 @@ def __init__(
self.object_name = object_name
-class SapOdpSource(TabularSource): # pylint: disable=too-many-instance-attributes
+class SapOdpSource(TabularSource):
"""A copy activity source for SAP ODP source.
All required parameters must be populated in order to send to server.
@@ -59804,7 +60241,7 @@ def __init__(
self.projection = projection
-class SapOpenHubLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class SapOpenHubLinkedService(LinkedService):
"""SAP Business Warehouse Open Hub Destination Linked Service.
All required parameters must be populated in order to send to server.
@@ -59980,7 +60417,7 @@ def __init__(
self.encrypted_credential = encrypted_credential
-class SapOpenHubSource(TabularSource): # pylint: disable=too-many-instance-attributes
+class SapOpenHubSource(TabularSource):
"""A copy activity source for SAP Business Warehouse Open Hub Destination source.
All required parameters must be populated in order to send to server.
@@ -60113,7 +60550,7 @@ def __init__(
self.sap_data_column_delimiter = sap_data_column_delimiter
-class SapOpenHubTableDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class SapOpenHubTableDataset(Dataset):
"""Sap Business Warehouse Open Hub Destination Table properties.
All required parameters must be populated in order to send to server.
@@ -60238,7 +60675,7 @@ def __init__(
self.base_request_id = base_request_id
-class SapTableLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class SapTableLinkedService(LinkedService):
"""SAP Table Linked Service.
All required parameters must be populated in order to send to server.
@@ -60621,7 +61058,7 @@ def __init__(
self.table_name = table_name
-class SapTableSource(TabularSource): # pylint: disable=too-many-instance-attributes
+class SapTableSource(TabularSource):
"""A copy activity source for SAP Table source.
All required parameters must be populated in order to send to server.
@@ -60991,7 +61428,7 @@ def __init__(self, *, name: str, uri: str, roles: JSON, parameters: Optional[str
self.parameters = parameters
-class ScriptActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes
+class ScriptActivity(ExecutionActivity):
"""Script activity type.
All required parameters must be populated in order to send to server.
@@ -61442,7 +61879,7 @@ def __init__(
self.self_contained_interactive_authoring_enabled = self_contained_interactive_authoring_enabled
-class SelfHostedIntegrationRuntimeNode(_serialization.Model): # pylint: disable=too-many-instance-attributes
+class SelfHostedIntegrationRuntimeNode(_serialization.Model):
"""Properties of Self-hosted integration runtime node.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -61565,7 +62002,7 @@ def __init__(self, *, additional_properties: Optional[Dict[str, JSON]] = None, *
self.max_concurrent_jobs = None
-class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): # pylint: disable=too-many-instance-attributes
+class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus):
"""Self-hosted integration runtime status.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -61717,7 +62154,7 @@ def __init__(
self.self_contained_interactive_authoring_enabled = None
-class ServiceNowLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class ServiceNowLinkedService(LinkedService):
"""ServiceNow server linked service.
All required parameters must be populated in order to send to server.
@@ -62087,7 +62524,7 @@ def __init__(
self.query = query
-class ServiceNowV2LinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class ServiceNowV2LinkedService(LinkedService):
"""ServiceNowV2 server linked service.
All required parameters must be populated in order to send to server.
@@ -62363,6 +62800,9 @@ class ServiceNowV2Source(TabularSource):
:vartype additional_columns: JSON
:ivar expression: Expression to filter data from source.
:vartype expression: ~azure.mgmt.datafactory.models.ExpressionV2
+ :ivar page_size: Page size of the result. Type: integer (or Expression with resultType
+ integer).
+ :vartype page_size: JSON
"""
_validation = {
@@ -62379,6 +62819,7 @@ class ServiceNowV2Source(TabularSource):
"query_timeout": {"key": "queryTimeout", "type": "object"},
"additional_columns": {"key": "additionalColumns", "type": "object"},
"expression": {"key": "expression", "type": "ExpressionV2"},
+ "page_size": {"key": "pageSize", "type": "object"},
}
def __init__(
@@ -62392,6 +62833,7 @@ def __init__(
query_timeout: Optional[JSON] = None,
additional_columns: Optional[JSON] = None,
expression: Optional["_models.ExpressionV2"] = None,
+ page_size: Optional[JSON] = None,
**kwargs: Any
) -> None:
"""
@@ -62418,6 +62860,9 @@ def __init__(
:paramtype additional_columns: JSON
:keyword expression: Expression to filter data from source.
:paramtype expression: ~azure.mgmt.datafactory.models.ExpressionV2
+ :keyword page_size: Page size of the result. Type: integer (or Expression with resultType
+ integer).
+ :paramtype page_size: JSON
"""
super().__init__(
additional_properties=additional_properties,
@@ -62431,6 +62876,7 @@ def __init__(
)
self.type: str = "ServiceNowV2Source"
self.expression = expression
+ self.page_size = page_size
class ServicePrincipalCredential(Credential):
@@ -62504,7 +62950,7 @@ def __init__(
self.tenant = tenant
-class SetVariableActivity(ControlActivity): # pylint: disable=too-many-instance-attributes
+class SetVariableActivity(ControlActivity):
"""Set value for a Variable.
All required parameters must be populated in order to send to server.
@@ -62674,7 +63120,7 @@ def __init__(
self.type: str = "SftpLocation"
-class SftpReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-attributes
+class SftpReadSettings(StoreReadSettings):
"""Sftp read settings.
All required parameters must be populated in order to send to server.
@@ -62823,7 +63269,7 @@ def __init__(
self.disable_chunking = disable_chunking
-class SftpServerLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class SftpServerLinkedService(LinkedService):
"""A linked service for an SSH File Transfer Protocol (SFTP) server.
All required parameters must be populated in order to send to server.
@@ -63096,7 +63542,7 @@ def __init__(
self.use_temp_file_rename = use_temp_file_rename
-class SharePointOnlineListLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class SharePointOnlineListLinkedService(LinkedService):
"""SharePoint Online List linked service.
All required parameters must be populated in order to send to server.
@@ -63459,7 +63905,7 @@ def __init__(
self.http_request_timeout = http_request_timeout
-class ShopifyLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class ShopifyLinkedService(LinkedService):
"""Shopify Service linked service.
All required parameters must be populated in order to send to server.
@@ -63915,7 +64361,7 @@ def __init__(
self.encrypted_credential = encrypted_credential
-class SnowflakeDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class SnowflakeDataset(Dataset):
"""The snowflake dataset.
All required parameters must be populated in order to send to server.
@@ -64469,7 +64915,7 @@ def __init__(
self.export_settings = export_settings
-class SnowflakeV2Dataset(Dataset): # pylint: disable=too-many-instance-attributes
+class SnowflakeV2Dataset(Dataset):
"""The snowflake dataset.
All required parameters must be populated in order to send to server.
@@ -64582,7 +65028,7 @@ def __init__(
self.table = table
-class SnowflakeV2LinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class SnowflakeV2LinkedService(LinkedService):
"""Snowflake linked service.
All required parameters must be populated in order to send to server.
@@ -64633,6 +65079,8 @@ class SnowflakeV2LinkedService(LinkedService): # pylint: disable=too-many-insta
:ivar private_key_passphrase: The Azure key vault secret reference of private key password for
KeyPair auth with encrypted private key.
:vartype private_key_passphrase: ~azure.mgmt.datafactory.models.SecretBase
+ :ivar host: The host name of the Snowflake account.
+ :vartype host: JSON
:ivar encrypted_credential: The encrypted credential used for authentication. Credentials are
encrypted using the integration runtime credential manager. Type: string.
:vartype encrypted_credential: str
@@ -64665,6 +65113,7 @@ class SnowflakeV2LinkedService(LinkedService): # pylint: disable=too-many-insta
"scope": {"key": "typeProperties.scope", "type": "object"},
"private_key": {"key": "typeProperties.privateKey", "type": "SecretBase"},
"private_key_passphrase": {"key": "typeProperties.privateKeyPassphrase", "type": "SecretBase"},
+ "host": {"key": "typeProperties.host", "type": "object"},
"encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"},
}
@@ -64689,6 +65138,7 @@ def __init__(
scope: Optional[JSON] = None,
private_key: Optional["_models.SecretBase"] = None,
private_key_passphrase: Optional["_models.SecretBase"] = None,
+ host: Optional[JSON] = None,
encrypted_credential: Optional[str] = None,
**kwargs: Any
) -> None:
@@ -64738,6 +65188,8 @@ def __init__(
:keyword private_key_passphrase: The Azure key vault secret reference of private key password
for KeyPair auth with encrypted private key.
:paramtype private_key_passphrase: ~azure.mgmt.datafactory.models.SecretBase
+ :keyword host: The host name of the Snowflake account.
+ :paramtype host: JSON
:keyword encrypted_credential: The encrypted credential used for authentication. Credentials
are encrypted using the integration runtime credential manager. Type: string.
:paramtype encrypted_credential: str
@@ -64764,6 +65216,7 @@ def __init__(
self.scope = scope
self.private_key = private_key
self.private_key_passphrase = private_key_passphrase
+ self.host = host
self.encrypted_credential = encrypted_credential
@@ -65003,7 +65456,7 @@ def __init__(
self.reference_name = reference_name
-class SparkLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class SparkLinkedService(LinkedService):
"""Spark Server linked service.
All required parameters must be populated in order to send to server.
@@ -65205,7 +65658,7 @@ def __init__(
self.encrypted_credential = encrypted_credential
-class SparkObjectDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class SparkObjectDataset(Dataset):
"""Spark Server dataset.
All required parameters must be populated in order to send to server.
@@ -65490,7 +65943,7 @@ def __init__(
self.credential = credential
-class SqlDWSink(CopySink): # pylint: disable=too-many-instance-attributes
+class SqlDWSink(CopySink):
"""A copy activity SQL Data Warehouse sink.
All required parameters must be populated in order to send to server.
@@ -65660,7 +66113,7 @@ def __init__(
self.upsert_settings = upsert_settings
-class SqlDWSource(TabularSource): # pylint: disable=too-many-instance-attributes
+class SqlDWSource(TabularSource):
"""A copy activity SQL Data Warehouse source.
All required parameters must be populated in order to send to server.
@@ -65845,7 +66298,7 @@ def __init__(
self.keys = keys
-class SqlMISink(CopySink): # pylint: disable=too-many-instance-attributes
+class SqlMISink(CopySink):
"""A copy activity Azure SQL Managed Instance sink.
All required parameters must be populated in order to send to server.
@@ -66018,7 +66471,7 @@ def __init__(
self.upsert_settings = upsert_settings
-class SqlMISource(TabularSource): # pylint: disable=too-many-instance-attributes
+class SqlMISource(TabularSource):
"""A copy activity Azure SQL Managed Instance source.
All required parameters must be populated in order to send to server.
@@ -66232,7 +66685,7 @@ def __init__(
self.partition_lower_bound = partition_lower_bound
-class SqlServerLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class SqlServerLinkedService(LinkedService):
"""SQL Server linked service.
All required parameters must be populated in order to send to server.
@@ -66583,9 +67036,7 @@ def __init__( # pylint: disable=too-many-locals
self.credential = credential
-class SqlServerLinkedServiceTypeProperties(
- SqlServerBaseLinkedServiceTypeProperties
-): # pylint: disable=too-many-instance-attributes
+class SqlServerLinkedServiceTypeProperties(SqlServerBaseLinkedServiceTypeProperties):
"""SQL Server linked service properties.
:ivar server: The name or network address of the instance of SQL Server to which to connect,
@@ -66879,7 +67330,7 @@ def __init__( # pylint: disable=too-many-locals
self.credential = credential
-class SqlServerSink(CopySink): # pylint: disable=too-many-instance-attributes
+class SqlServerSink(CopySink):
"""A copy activity SQL server sink.
All required parameters must be populated in order to send to server.
@@ -67052,7 +67503,7 @@ def __init__(
self.upsert_settings = upsert_settings
-class SqlServerSource(TabularSource): # pylint: disable=too-many-instance-attributes
+class SqlServerSource(TabularSource):
"""A copy activity SQL server source.
All required parameters must be populated in order to send to server.
@@ -67209,7 +67660,7 @@ def __init__(
self.partition_settings = partition_settings
-class SqlServerStoredProcedureActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes
+class SqlServerStoredProcedureActivity(ExecutionActivity):
"""SQL stored procedure activity type.
All required parameters must be populated in order to send to server.
@@ -67330,7 +67781,7 @@ def __init__(
self.stored_procedure_parameters = stored_procedure_parameters
-class SqlServerTableDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class SqlServerTableDataset(Dataset):
"""The on-premises SQL Server dataset.
All required parameters must be populated in order to send to server.
@@ -67452,7 +67903,7 @@ def __init__(
self.table = table
-class SqlSink(CopySink): # pylint: disable=too-many-instance-attributes
+class SqlSink(CopySink):
"""A copy activity SQL sink.
All required parameters must be populated in order to send to server.
@@ -67625,7 +68076,7 @@ def __init__(
self.upsert_settings = upsert_settings
-class SqlSource(TabularSource): # pylint: disable=too-many-instance-attributes
+class SqlSource(TabularSource):
"""A copy activity SQL source.
All required parameters must be populated in order to send to server.
@@ -67820,7 +68271,7 @@ def __init__(
self.keys = keys
-class SquareLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class SquareLinkedService(LinkedService):
"""Square Service linked service.
All required parameters must be populated in order to send to server.
@@ -68874,7 +69325,7 @@ def __init__(
self.child_packages = child_packages
-class SsisParameter(_serialization.Model): # pylint: disable=too-many-instance-attributes
+class SsisParameter(_serialization.Model):
"""Ssis parameter.
:ivar id: Parameter id.
@@ -69248,7 +69699,7 @@ def __init__(
self.type = type
-class SwitchActivity(ControlActivity): # pylint: disable=too-many-instance-attributes
+class SwitchActivity(ControlActivity):
"""This activity evaluates an expression and executes activities under the cases property that
correspond to the expression evaluation expected in the equals property.
@@ -69395,7 +69846,7 @@ def __init__(
self.activities = activities
-class SybaseLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class SybaseLinkedService(LinkedService):
"""Linked service for Sybase data source.
All required parameters must be populated in order to send to server.
@@ -69734,7 +70185,7 @@ def __init__(
self.table_name = table_name
-class SynapseNotebookActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes
+class SynapseNotebookActivity(ExecutionActivity):
"""Execute Synapse notebook activity.
All required parameters must be populated in order to send to server.
@@ -69968,7 +70419,7 @@ def __init__(
self.reference_name = reference_name
-class SynapseSparkJobDefinitionActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes
+class SynapseSparkJobDefinitionActivity(ExecutionActivity):
"""Execute spark job activity.
All required parameters must be populated in order to send to server.
@@ -70470,7 +70921,7 @@ def __init__(
self.preserve_compression_file_name_as_folder = preserve_compression_file_name_as_folder
-class TeamDeskLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class TeamDeskLinkedService(LinkedService):
"""Linked service for TeamDesk.
All required parameters must be populated in order to send to server.
@@ -70597,7 +71048,7 @@ def __init__(
self.encrypted_credential = encrypted_credential
-class TeradataLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class TeradataLinkedService(LinkedService):
"""Linked service for Teradata data source.
All required parameters must be populated in order to send to server.
@@ -70772,7 +71223,7 @@ def __init__(
self.partition_lower_bound = partition_lower_bound
-class TeradataSource(TabularSource): # pylint: disable=too-many-instance-attributes
+class TeradataSource(TabularSource):
"""A copy activity Teradata source.
All required parameters must be populated in order to send to server.
@@ -70889,7 +71340,7 @@ def __init__(
self.partition_settings = partition_settings
-class TeradataTableDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class TeradataTableDataset(Dataset):
"""The Teradata database dataset.
All required parameters must be populated in order to send to server.
@@ -71001,7 +71452,7 @@ def __init__(
self.table = table
-class TextFormat(DatasetStorageFormat): # pylint: disable=too-many-instance-attributes
+class TextFormat(DatasetStorageFormat):
"""The data stored in text format.
All required parameters must be populated in order to send to server.
@@ -71381,7 +71832,7 @@ def __init__(self, *, properties: "_models.Trigger", **kwargs: Any) -> None:
self.properties = properties
-class TriggerRun(_serialization.Model): # pylint: disable=too-many-instance-attributes
+class TriggerRun(_serialization.Model):
"""Trigger runs.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -71524,7 +71975,7 @@ def __init__(self, **kwargs: Any) -> None:
self.status = None
-class TumblingWindowTrigger(Trigger): # pylint: disable=too-many-instance-attributes
+class TumblingWindowTrigger(Trigger):
"""Trigger that schedules pipeline runs for all fixed time interval windows from a start time
without gaps and also supports backfill scenarios (when start time is in the past).
@@ -71891,7 +72342,7 @@ def __init__(
self.culture = culture
-class UntilActivity(ControlActivity): # pylint: disable=too-many-instance-attributes
+class UntilActivity(ControlActivity):
"""This activity executes inner activities until the specified boolean expression results to true
or timeout is reached, whichever is earlier.
@@ -72170,7 +72621,7 @@ def __init__(self, *, name: str, value: JSON, **kwargs: Any) -> None:
self.value = value
-class ValidationActivity(ControlActivity): # pylint: disable=too-many-instance-attributes
+class ValidationActivity(ControlActivity):
"""This activity verifies that an external resource exists.
All required parameters must be populated in order to send to server.
@@ -72342,7 +72793,7 @@ def __init__(
self.default_value = default_value
-class VerticaLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class VerticaLinkedService(LinkedService):
"""Vertica linked service.
All required parameters must be populated in order to send to server.
@@ -72571,7 +73022,7 @@ def __init__(
self.query = query
-class VerticaTableDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class VerticaTableDataset(Dataset):
"""Vertica dataset.
All required parameters must be populated in order to send to server.
@@ -72791,7 +73242,7 @@ def __init__(
self.wait_time_in_seconds = wait_time_in_seconds
-class WarehouseLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class WarehouseLinkedService(LinkedService):
"""Microsoft Fabric Warehouse linked service.
All required parameters must be populated in order to send to server.
@@ -72956,7 +73407,7 @@ def __init__(
self.service_principal_credential = service_principal_credential
-class WarehouseSink(CopySink): # pylint: disable=too-many-instance-attributes
+class WarehouseSink(CopySink):
"""A copy activity Microsoft Fabric Warehouse sink.
All required parameters must be populated in order to send to server.
@@ -73094,7 +73545,7 @@ def __init__(
self.write_behavior = write_behavior
-class WarehouseSource(TabularSource): # pylint: disable=too-many-instance-attributes
+class WarehouseSource(TabularSource):
"""A copy activity Microsoft Fabric Warehouse source.
All required parameters must be populated in order to send to server.
@@ -73247,7 +73698,7 @@ def __init__(
self.partition_settings = partition_settings
-class WarehouseTableDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class WarehouseTableDataset(Dataset):
"""Microsoft Fabric Warehouse dataset.
All required parameters must be populated in order to send to server.
@@ -73360,7 +73811,7 @@ def __init__(
self.table = table
-class WebActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes
+class WebActivity(ExecutionActivity):
"""Web activity.
All required parameters must be populated in order to send to server.
@@ -73814,7 +74265,7 @@ def __init__(self, *, url: JSON, pfx: "_models.SecretBase", password: "_models.S
self.password = password
-class WebHookActivity(ControlActivity): # pylint: disable=too-many-instance-attributes
+class WebHookActivity(ControlActivity):
"""WebHook activity.
All required parameters must be populated in order to send to server.
@@ -74144,7 +74595,7 @@ def __init__(
self.additional_columns = additional_columns
-class WebTableDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class WebTableDataset(Dataset):
"""The dataset points to a HTML table in the web page.
All required parameters must be populated in order to send to server.
@@ -74327,7 +74778,7 @@ def __init__(
self.document_locale = document_locale
-class XeroLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class XeroLinkedService(LinkedService):
"""Xero Service linked service.
All required parameters must be populated in order to send to server.
@@ -74678,7 +75129,7 @@ def __init__(
self.query = query
-class XmlDataset(Dataset): # pylint: disable=too-many-instance-attributes
+class XmlDataset(Dataset):
"""Xml dataset.
All required parameters must be populated in order to send to server.
@@ -74991,7 +75442,7 @@ def __init__(
self.additional_columns = additional_columns
-class ZendeskLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class ZendeskLinkedService(LinkedService):
"""Linked service for Zendesk.
All required parameters must be populated in order to send to server.
@@ -75162,7 +75613,7 @@ def __init__(
self.preserve_zip_file_name_as_folder = preserve_zip_file_name_as_folder
-class ZohoLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes
+class ZohoLinkedService(LinkedService):
"""Zoho server linked service.
All required parameters must be populated in order to send to server.
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py
index 668131aae0e5..f42af8783eb3 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py
@@ -5,33 +5,39 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._operations import Operations
-from ._factories_operations import FactoriesOperations
-from ._exposure_control_operations import ExposureControlOperations
-from ._integration_runtimes_operations import IntegrationRuntimesOperations
-from ._integration_runtime_object_metadata_operations import IntegrationRuntimeObjectMetadataOperations
-from ._integration_runtime_nodes_operations import IntegrationRuntimeNodesOperations
-from ._linked_services_operations import LinkedServicesOperations
-from ._datasets_operations import DatasetsOperations
-from ._pipelines_operations import PipelinesOperations
-from ._pipeline_runs_operations import PipelineRunsOperations
-from ._activity_runs_operations import ActivityRunsOperations
-from ._triggers_operations import TriggersOperations
-from ._trigger_runs_operations import TriggerRunsOperations
-from ._data_flows_operations import DataFlowsOperations
-from ._data_flow_debug_session_operations import DataFlowDebugSessionOperations
-from ._managed_virtual_networks_operations import ManagedVirtualNetworksOperations
-from ._managed_private_endpoints_operations import ManagedPrivateEndpointsOperations
-from ._credential_operations_operations import CredentialOperationsOperations
-from ._private_end_point_connections_operations import PrivateEndPointConnectionsOperations
-from ._private_endpoint_connection_operations import PrivateEndpointConnectionOperations
-from ._private_link_resources_operations import PrivateLinkResourcesOperations
-from ._global_parameters_operations import GlobalParametersOperations
-from ._change_data_capture_operations import ChangeDataCaptureOperations
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._operations import Operations # type: ignore
+from ._factories_operations import FactoriesOperations # type: ignore
+from ._exposure_control_operations import ExposureControlOperations # type: ignore
+from ._integration_runtimes_operations import IntegrationRuntimesOperations # type: ignore
+from ._integration_runtime_object_metadata_operations import IntegrationRuntimeObjectMetadataOperations # type: ignore
+from ._integration_runtime_nodes_operations import IntegrationRuntimeNodesOperations # type: ignore
+from ._linked_services_operations import LinkedServicesOperations # type: ignore
+from ._datasets_operations import DatasetsOperations # type: ignore
+from ._pipelines_operations import PipelinesOperations # type: ignore
+from ._pipeline_runs_operations import PipelineRunsOperations # type: ignore
+from ._activity_runs_operations import ActivityRunsOperations # type: ignore
+from ._triggers_operations import TriggersOperations # type: ignore
+from ._trigger_runs_operations import TriggerRunsOperations # type: ignore
+from ._data_flows_operations import DataFlowsOperations # type: ignore
+from ._data_flow_debug_session_operations import DataFlowDebugSessionOperations # type: ignore
+from ._managed_virtual_networks_operations import ManagedVirtualNetworksOperations # type: ignore
+from ._managed_private_endpoints_operations import ManagedPrivateEndpointsOperations # type: ignore
+from ._credential_operations_operations import CredentialOperationsOperations # type: ignore
+from ._private_end_point_connections_operations import PrivateEndPointConnectionsOperations # type: ignore
+from ._private_endpoint_connection_operations import PrivateEndpointConnectionOperations # type: ignore
+from ._private_link_resources_operations import PrivateLinkResourcesOperations # type: ignore
+from ._global_parameters_operations import GlobalParametersOperations # type: ignore
+from ._change_data_capture_operations import ChangeDataCaptureOperations # type: ignore
from ._patch import __all__ as _patch_all
-from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
@@ -59,5 +65,5 @@
"GlobalParametersOperations",
"ChangeDataCaptureOperations",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_activity_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_activity_runs_operations.py
index 12464e49f484..933ce1086207 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_activity_runs_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_activity_runs_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload
+from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -30,7 +29,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -183,7 +182,7 @@ def query_by_pipeline_run(
:rtype: ~azure.mgmt.datafactory.models.ActivityRunsQueryResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_change_data_capture_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_change_data_capture_operations.py
index e7ffb77afaaa..aaaeebb08295 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_change_data_capture_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_change_data_capture_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, overload
+from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core.exceptions import (
@@ -32,7 +31,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -427,7 +426,7 @@ def list_by_factory(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ChangeDataCaptureListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -587,7 +586,7 @@ def create_or_update(
:rtype: ~azure.mgmt.datafactory.models.ChangeDataCaptureResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -668,7 +667,7 @@ def get(
:rtype: ~azure.mgmt.datafactory.models.ChangeDataCaptureResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -728,7 +727,7 @@ def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -783,7 +782,7 @@ def start( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -838,7 +837,7 @@ def stop( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -891,7 +890,7 @@ def status(self, resource_group_name: str, factory_name: str, change_data_captur
:rtype: str
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_credential_operations_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_credential_operations_operations.py
index adbba32f483b..39ee7bd61ed9 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_credential_operations_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_credential_operations_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, overload
+from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core.exceptions import (
@@ -32,7 +31,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -282,7 +281,7 @@ def list_by_factory(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.CredentialListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -438,7 +437,7 @@ def create_or_update(
:rtype: ~azure.mgmt.datafactory.models.CredentialResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -519,7 +518,7 @@ def get(
:rtype: ~azure.mgmt.datafactory.models.CredentialResource or None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -581,7 +580,7 @@ def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py
index 84a649ab4a6c..d3f8278c24ee 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload
+from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.exceptions import (
@@ -36,7 +35,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -282,7 +281,7 @@ def _create_initial(
request: Union[_models.CreateDataFlowDebugSessionRequest, IO[bytes]],
**kwargs: Any
) -> Iterator[bytes]:
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -492,7 +491,7 @@ def query_by_factory(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.QueryDataFlowDebugSessionsResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -627,7 +626,7 @@ def add_data_flow(
:rtype: ~azure.mgmt.datafactory.models.AddDataFlowToDebugSessionResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -682,7 +681,7 @@ def add_data_flow(
return deserialized # type: ignore
@overload
- def delete( # pylint: disable=inconsistent-return-statements
+ def delete(
self,
resource_group_name: str,
factory_name: str,
@@ -708,7 +707,7 @@ def delete( # pylint: disable=inconsistent-return-statements
"""
@overload
- def delete( # pylint: disable=inconsistent-return-statements
+ def delete(
self,
resource_group_name: str,
factory_name: str,
@@ -754,7 +753,7 @@ def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -811,7 +810,7 @@ def _execute_command_initial(
request: Union[_models.DataFlowDebugCommandRequest, IO[bytes]],
**kwargs: Any
) -> Iterator[bytes]:
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flows_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flows_operations.py
index 062bf0723367..c9aa05541122 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flows_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flows_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, overload
+from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core.exceptions import (
@@ -32,7 +31,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -356,7 +355,7 @@ def create_or_update(
:rtype: ~azure.mgmt.datafactory.models.DataFlowResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -437,7 +436,7 @@ def get(
:rtype: ~azure.mgmt.datafactory.models.DataFlowResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -497,7 +496,7 @@ def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -556,7 +555,7 @@ def list_by_factory(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.DataFlowListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_datasets_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_datasets_operations.py
index 62880ce2cad6..3686a80ad860 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_datasets_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_datasets_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, overload
+from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core.exceptions import (
@@ -32,7 +31,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -282,7 +281,7 @@ def list_by_factory(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.DatasetListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -438,7 +437,7 @@ def create_or_update(
:rtype: ~azure.mgmt.datafactory.models.DatasetResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -519,7 +518,7 @@ def get(
:rtype: ~azure.mgmt.datafactory.models.DatasetResource or None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -581,7 +580,7 @@ def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_exposure_control_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_exposure_control_operations.py
index 94d33618346b..bb80c8edd349 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_exposure_control_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_exposure_control_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload
+from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -30,7 +29,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -239,7 +238,7 @@ def get_feature_value(
:rtype: ~azure.mgmt.datafactory.models.ExposureControlResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -366,7 +365,7 @@ def get_feature_value_by_factory(
:rtype: ~azure.mgmt.datafactory.models.ExposureControlResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -497,7 +496,7 @@ def query_feature_values_by_factory(
:rtype: ~azure.mgmt.datafactory.models.ExposureControlBatchResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_factories_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_factories_operations.py
index 8854b1aced51..6656f49415bd 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_factories_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_factories_operations.py
@@ -1,4 +1,4 @@
-# pylint: disable=too-many-lines,too-many-statements
+# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +8,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, overload
+from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core.exceptions import (
@@ -32,7 +32,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -419,7 +419,7 @@ def list(self, **kwargs: Any) -> Iterable["_models.Factory"]:
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.FactoryListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -535,7 +535,7 @@ def configure_factory_repo(
:rtype: ~azure.mgmt.datafactory.models.Factory
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -604,7 +604,7 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.FactoryListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -750,7 +750,7 @@ def create_or_update(
:rtype: ~azure.mgmt.datafactory.models.Factory
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -879,7 +879,7 @@ def update(
:rtype: ~azure.mgmt.datafactory.models.Factory
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -951,7 +951,7 @@ def get(
:rtype: ~azure.mgmt.datafactory.models.Factory or None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1010,7 +1010,7 @@ def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1122,7 +1122,7 @@ def get_git_hub_access_token(
:rtype: ~azure.mgmt.datafactory.models.GitHubAccessTokenResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1249,7 +1249,7 @@ def get_data_plane_access(
:rtype: ~azure.mgmt.datafactory.models.AccessPolicyResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_global_parameters_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_global_parameters_operations.py
index e68dbb25f6f8..866ee719ebbb 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_global_parameters_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_global_parameters_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, overload
+from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core.exceptions import (
@@ -32,7 +31,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -267,7 +266,7 @@ def list_by_factory(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.GlobalParameterListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -345,7 +344,7 @@ def get(
:rtype: ~azure.mgmt.datafactory.models.GlobalParameterResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -470,7 +469,7 @@ def create_or_update(
:rtype: ~azure.mgmt.datafactory.models.GlobalParameterResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -541,7 +540,7 @@ def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_nodes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_nodes_operations.py
index a925184863fb..ba3f67aa6420 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_nodes_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_nodes_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload
+from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -30,7 +29,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -302,7 +301,7 @@ def get(
:rtype: ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -364,7 +363,7 @@ def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -500,7 +499,7 @@ def update(
:rtype: ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -574,7 +573,7 @@ def get_ip_address(
:rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeNodeIpAddress
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_object_metadata_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_object_metadata_operations.py
index f3a6f5410db0..37ce22f8f639 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_object_metadata_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_object_metadata_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Iterator, Optional, Type, TypeVar, Union, cast, overload
+from typing import Any, Callable, Dict, IO, Iterator, Optional, TypeVar, Union, cast, overload
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -34,7 +33,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -163,7 +162,7 @@ def __init__(self, *args, **kwargs):
def _refresh_initial(
self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any
) -> Iterator[bytes]:
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -362,7 +361,7 @@ def get(
:rtype: ~azure.mgmt.datafactory.models.SsisObjectMetadataListResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtimes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtimes_operations.py
index b3fec9c18ddc..b24a1909873a 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtimes_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtimes_operations.py
@@ -1,4 +1,4 @@
-# pylint: disable=too-many-lines,too-many-statements
+# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +8,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload
+from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.exceptions import (
@@ -36,7 +36,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -923,7 +923,7 @@ def list_by_factory(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.IntegrationRuntimeListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1083,7 +1083,7 @@ def create_or_update(
:rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1164,7 +1164,7 @@ def get(
:rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource or None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1296,7 +1296,7 @@ def update(
:rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1367,7 +1367,7 @@ def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1422,7 +1422,7 @@ def get_status(
:rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1483,7 +1483,7 @@ def list_outbound_network_dependencies_endpoints( # pylint: disable=name-too-lo
~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1545,7 +1545,7 @@ def get_connection_info(
:rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeConnectionInfo
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1675,7 +1675,7 @@ def regenerate_auth_key(
:rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeys
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1746,7 +1746,7 @@ def list_auth_keys(
:rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeys
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1792,7 +1792,7 @@ def list_auth_keys(
def _start_initial(
self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any
) -> Iterator[bytes]:
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1906,7 +1906,7 @@ def get_long_running_output(pipeline_response):
def _stop_initial(
self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any
) -> Iterator[bytes]:
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2030,7 +2030,7 @@ def sync_credentials( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2086,7 +2086,7 @@ def get_monitoring_data(
:rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeMonitoringData
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2145,7 +2145,7 @@ def upgrade( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2185,7 +2185,7 @@ def upgrade( # pylint: disable=inconsistent-return-statements
return cls(pipeline_response, None, {}) # type: ignore
@overload
- def remove_links( # pylint: disable=inconsistent-return-statements
+ def remove_links(
self,
resource_group_name: str,
factory_name: str,
@@ -2217,7 +2217,7 @@ def remove_links( # pylint: disable=inconsistent-return-statements
"""
@overload
- def remove_links( # pylint: disable=inconsistent-return-statements
+ def remove_links(
self,
resource_group_name: str,
factory_name: str,
@@ -2273,7 +2273,7 @@ def remove_links( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2410,7 +2410,7 @@ def create_linked_integration_runtime(
:rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_linked_services_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_linked_services_operations.py
index 63d7ce310751..9a312ae3b166 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_linked_services_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_linked_services_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, overload
+from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core.exceptions import (
@@ -32,7 +31,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -283,7 +282,7 @@ def list_by_factory(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.LinkedServiceListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -442,7 +441,7 @@ def create_or_update(
:rtype: ~azure.mgmt.datafactory.models.LinkedServiceResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -523,7 +522,7 @@ def get(
:rtype: ~azure.mgmt.datafactory.models.LinkedServiceResource or None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -585,7 +584,7 @@ def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_private_endpoints_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_private_endpoints_operations.py
index 20257def3588..4b625d5f76f2 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_private_endpoints_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_private_endpoints_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, overload
+from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core.exceptions import (
@@ -32,7 +31,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -325,7 +324,7 @@ def list_by_factory(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ManagedPrivateEndpointListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -495,7 +494,7 @@ def create_or_update(
:rtype: ~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -580,7 +579,7 @@ def get(
:rtype: ~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -648,7 +647,7 @@ def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_virtual_networks_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_virtual_networks_operations.py
index f13244070d73..e3c117750e84 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_virtual_networks_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_virtual_networks_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, overload
+from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core.exceptions import (
@@ -32,7 +31,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -236,7 +235,7 @@ def list_by_factory(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ManagedVirtualNetworkListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -396,7 +395,7 @@ def create_or_update(
:rtype: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -477,7 +476,7 @@ def get(
:rtype: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_operations.py
index b6be3c7fe249..179e968805e5 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -7,7 +6,7 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import sys
-from typing import Any, Callable, Dict, Iterable, Optional, Type, TypeVar
+from typing import Any, Callable, Dict, Iterable, Optional, TypeVar
import urllib.parse
from azure.core.exceptions import (
@@ -31,7 +30,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -91,7 +90,7 @@ def list(self, **kwargs: Any) -> Iterable["_models.Operation"]:
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.OperationListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipeline_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipeline_runs_operations.py
index 9fc1d2636bd2..44a2624da0d7 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipeline_runs_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipeline_runs_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload
+from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -30,7 +29,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -263,7 +262,7 @@ def query_by_factory(
:rtype: ~azure.mgmt.datafactory.models.PipelineRunsQueryResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -331,7 +330,7 @@ def get(self, resource_group_name: str, factory_name: str, run_id: str, **kwargs
:rtype: ~azure.mgmt.datafactory.models.PipelineRun
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -398,7 +397,7 @@ def cancel( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py
index 573caa3617a6..9c93bdfb40cf 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, overload
+from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core.exceptions import (
@@ -32,7 +31,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -353,7 +352,7 @@ def list_by_factory(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.PipelineListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -509,7 +508,7 @@ def create_or_update(
:rtype: ~azure.mgmt.datafactory.models.PipelineResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -590,7 +589,7 @@ def get(
:rtype: ~azure.mgmt.datafactory.models.PipelineResource or None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -652,7 +651,7 @@ def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -830,7 +829,7 @@ def create_run(
:rtype: ~azure.mgmt.datafactory.models.CreateRunResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_end_point_connections_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_end_point_connections_operations.py
index 3976f19d464b..d24f099d15af 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_end_point_connections_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_end_point_connections_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -7,7 +6,7 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import sys
-from typing import Any, Callable, Dict, Iterable, Optional, Type, TypeVar
+from typing import Any, Callable, Dict, Iterable, Optional, TypeVar
import urllib.parse
from azure.core.exceptions import (
@@ -31,7 +30,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -120,7 +119,7 @@ def list_by_factory(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.PrivateEndpointConnectionListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_endpoint_connection_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_endpoint_connection_operations.py
index c731ff51c36d..cb2d188fd163 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_endpoint_connection_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_endpoint_connection_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload
+from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -30,7 +29,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -308,7 +307,7 @@ def create_or_update(
:rtype: ~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -389,7 +388,7 @@ def get(
:rtype: ~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -449,7 +448,7 @@ def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_link_resources_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_link_resources_operations.py
index 02ddf24d5991..8f4168491f37 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_link_resources_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_link_resources_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -7,7 +6,7 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import sys
-from typing import Any, Callable, Dict, Optional, Type, TypeVar
+from typing import Any, Callable, Dict, Optional, TypeVar
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -29,7 +28,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -106,7 +105,7 @@ def get(self, resource_group_name: str, factory_name: str, **kwargs: Any) -> _mo
:rtype: ~azure.mgmt.datafactory.models.PrivateLinkResourcesWrapper
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_trigger_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_trigger_runs_operations.py
index 885ad4224f88..1113486b8d40 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_trigger_runs_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_trigger_runs_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload
+from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -30,7 +29,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -216,7 +215,7 @@ def rerun( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -274,7 +273,7 @@ def cancel( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -387,7 +386,7 @@ def query_by_factory(
:rtype: ~azure.mgmt.datafactory.models.TriggerRunsQueryResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py
index 90d8ee35aea9..e52702431067 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py
@@ -1,4 +1,4 @@
-# pylint: disable=too-many-lines,too-many-statements
+# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +8,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload
+from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.exceptions import (
@@ -36,7 +36,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -569,7 +569,7 @@ def list_by_factory(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.TriggerListResponse] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -704,7 +704,7 @@ def query_by_factory(
:rtype: ~azure.mgmt.datafactory.models.TriggerQueryResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -852,7 +852,7 @@ def create_or_update(
:rtype: ~azure.mgmt.datafactory.models.TriggerResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -933,7 +933,7 @@ def get(
:rtype: ~azure.mgmt.datafactory.models.TriggerResource or None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -995,7 +995,7 @@ def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1037,7 +1037,7 @@ def delete( # pylint: disable=inconsistent-return-statements
def _subscribe_to_events_initial(
self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any
) -> Iterator[bytes]:
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1164,7 +1164,7 @@ def get_event_subscription_status(
:rtype: ~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1210,7 +1210,7 @@ def get_event_subscription_status(
def _unsubscribe_from_events_initial(
self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any
) -> Iterator[bytes]:
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1324,7 +1324,7 @@ def get_long_running_output(pipeline_response):
def _start_initial(
self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any
) -> Iterator[bytes]:
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1432,7 +1432,7 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
def _stop_initial(
self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any
) -> Iterator[bytes]:
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/activity_runs_query_by_pipeline_run.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/activity_runs_query_by_pipeline_run.py
index f7210d13bc49..fdaa39cd818f 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/activity_runs_query_by_pipeline_run.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/activity_runs_query_by_pipeline_run.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/approve_reject_private_endpoint_connection.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/approve_reject_private_endpoint_connection.py
index 0c5e7351c792..76308791c231 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/approve_reject_private_endpoint_connection.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/approve_reject_private_endpoint_connection.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_create.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_create.py
index 5a95fbdddf49..349a0789c6c2 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_create.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_create.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_update.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_update.py
index 1e6c4fb73591..b867cedcea7a 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_update.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_update.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/credentials_create.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/credentials_create.py
index 549cfaf78c76..a70172475881 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/credentials_create.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/credentials_create.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_add_data_flow.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_add_data_flow.py
index 9666302164e4..3d23c14295d3 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_add_data_flow.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_add_data_flow.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_create.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_create.py
index 5a4b016fe114..4e49e6ac2bd3 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_create.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_create.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_delete.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_delete.py
index 5993ff988b43..9301b76c91cb 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_delete.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_delete.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_execute_command.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_execute_command.py
index de450293e5cd..45d1dd3421f1 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_execute_command.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_execute_command.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flows_create.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flows_create.py
index 7b1066d40a93..1bcf28e2eac9 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flows_create.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flows_create.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flows_update.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flows_update.py
index a01884eaf599..9be25278b286 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flows_update.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flows_update.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/datasets_create.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/datasets_create.py
index b0b8d14dc287..a7bd124bf938 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/datasets_create.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/datasets_create.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/datasets_update.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/datasets_update.py
index 0a1e9ff1b746..d4e74e1b17e5 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/datasets_update.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/datasets_update.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/exposure_control_get_feature_value.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/exposure_control_get_feature_value.py
index 2c82e63bca10..d7b2b226a4d8 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/exposure_control_get_feature_value.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/exposure_control_get_feature_value.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/exposure_control_get_feature_value_by_factory.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/exposure_control_get_feature_value_by_factory.py
index f3482176d35a..6965f1ec6b98 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/exposure_control_get_feature_value_by_factory.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/exposure_control_get_feature_value_by_factory.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/exposure_control_query_feature_values_by_factory.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/exposure_control_query_feature_values_by_factory.py
index 7b455f13c50d..fa3a9ed11b31 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/exposure_control_query_feature_values_by_factory.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/exposure_control_query_feature_values_by_factory.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_configure_factory_repo.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_configure_factory_repo.py
index 662e787a01d0..594252a37e67 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_configure_factory_repo.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_configure_factory_repo.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_create_or_update.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_create_or_update.py
index 5aac01800329..b4bc70cb173e 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_create_or_update.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_create_or_update.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_get_data_plane_access.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_get_data_plane_access.py
index 14650f48bf98..b720d931d55b 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_get_data_plane_access.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_get_data_plane_access.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_get_git_hub_access_token.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_get_git_hub_access_token.py
index ee93ad533db9..b009174f37b7 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_get_git_hub_access_token.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_get_git_hub_access_token.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_update.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_update.py
index 3aa6f283e190..bb6131a7a85d 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_update.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_update.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/global_parameters_create.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/global_parameters_create.py
index bb23777a1d7b..7491d9d3554f 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/global_parameters_create.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/global_parameters_create.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/global_parameters_update.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/global_parameters_update.py
index ebecf6c2f69f..d47950c8bef2 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/global_parameters_update.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/global_parameters_update.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtime_nodes_update.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtime_nodes_update.py
index 71e8b2dfa9e1..fb410f48bb70 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtime_nodes_update.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtime_nodes_update.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_create.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_create.py
index cc095f5daa0e..687cf840b7fc 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_create.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_create.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_create_linked_integration_runtime.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_create_linked_integration_runtime.py
index 636ae714e35c..9bd9ea5feb65 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_create_linked_integration_runtime.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_create_linked_integration_runtime.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_regenerate_auth_key.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_regenerate_auth_key.py
index 59dc78a72d7f..121efc4938d0 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_regenerate_auth_key.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_regenerate_auth_key.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_remove_links.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_remove_links.py
index 2b73df707706..25320eb81748 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_remove_links.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_remove_links.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_update.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_update.py
index 5a6e753895c7..969dfc802366 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_update.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_update.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/linked_services_create.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/linked_services_create.py
index 54721852357a..5acc0528ae9c 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/linked_services_create.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/linked_services_create.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/linked_services_update.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/linked_services_update.py
index f3e42ef14904..eb068c8a9e81 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/linked_services_update.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/linked_services_update.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/managed_private_endpoints_create.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/managed_private_endpoints_create.py
index 78819449f547..409d7d263b84 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/managed_private_endpoints_create.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/managed_private_endpoints_create.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/managed_virtual_networks_create.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/managed_virtual_networks_create.py
index 4a51cc6f510e..9a6a5030c0bb 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/managed_virtual_networks_create.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/managed_virtual_networks_create.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipeline_runs_query_by_factory.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipeline_runs_query_by_factory.py
index 301e27780897..e1e7d62f42e8 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipeline_runs_query_by_factory.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipeline_runs_query_by_factory.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipelines_create.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipelines_create.py
index efe6b9116844..758ae41234ed 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipelines_create.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipelines_create.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipelines_update.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipelines_update.py
index 39c504b227bb..b7c5398a385d 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipelines_update.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipelines_update.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/trigger_runs_query_by_factory.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/trigger_runs_query_by_factory.py
index 564a942c9aa7..96e750dc81b2 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/trigger_runs_query_by_factory.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/trigger_runs_query_by_factory.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_create.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_create.py
index df21f8434926..d80e3503c95c 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_create.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_create.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_query_by_factory.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_query_by_factory.py
index 9f9ae0dc87e1..8b8c9993de2c 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_query_by_factory.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_query_by_factory.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_update.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_update.py
index 33aeccb309a8..a1f03c5c2bf9 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_update.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_update.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/conftest.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/conftest.py
index 6d9707a4e6e2..b0d8cda5b18d 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/conftest.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/conftest.py
@@ -18,7 +18,7 @@
load_dotenv()
-# aovid record sensitive identity information in recordings
+# For security, please avoid record sensitive identity information in recordings
@pytest.fixture(scope="session", autouse=True)
def add_sanitizers(test_proxy):
datafactorymanagement_subscription_id = os.environ.get(
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_activity_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_activity_runs_operations.py
index 30425facce34..9c1f8581c850 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_activity_runs_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_activity_runs_operations.py
@@ -20,7 +20,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_query_by_pipeline_run(self, resource_group):
+ def test_activity_runs_query_by_pipeline_run(self, resource_group):
response = self.client.activity_runs.query_by_pipeline_run(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_activity_runs_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_activity_runs_operations_async.py
index 26a8455f02de..f762becaeaec 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_activity_runs_operations_async.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_activity_runs_operations_async.py
@@ -21,7 +21,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_query_by_pipeline_run(self, resource_group):
+ async def test_activity_runs_query_by_pipeline_run(self, resource_group):
response = await self.client.activity_runs.query_by_pipeline_run(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_change_data_capture_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_change_data_capture_operations.py
index edee8c213e4d..30dacabcac6e 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_change_data_capture_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_change_data_capture_operations.py
@@ -20,7 +20,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_list_by_factory(self, resource_group):
+ def test_change_data_capture_list_by_factory(self, resource_group):
response = self.client.change_data_capture.list_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
@@ -32,7 +32,7 @@ def test_list_by_factory(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_create_or_update(self, resource_group):
+ def test_change_data_capture_create_or_update(self, resource_group):
response = self.client.change_data_capture.create_or_update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -126,7 +126,7 @@ def test_create_or_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_get(self, resource_group):
+ def test_change_data_capture_get(self, resource_group):
response = self.client.change_data_capture.get(
resource_group_name=resource_group.name,
factory_name="str",
@@ -139,7 +139,7 @@ def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_delete(self, resource_group):
+ def test_change_data_capture_delete(self, resource_group):
response = self.client.change_data_capture.delete(
resource_group_name=resource_group.name,
factory_name="str",
@@ -152,7 +152,7 @@ def test_delete(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_start(self, resource_group):
+ def test_change_data_capture_start(self, resource_group):
response = self.client.change_data_capture.start(
resource_group_name=resource_group.name,
factory_name="str",
@@ -165,7 +165,7 @@ def test_start(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_stop(self, resource_group):
+ def test_change_data_capture_stop(self, resource_group):
response = self.client.change_data_capture.stop(
resource_group_name=resource_group.name,
factory_name="str",
@@ -178,7 +178,7 @@ def test_stop(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_status(self, resource_group):
+ def test_change_data_capture_status(self, resource_group):
response = self.client.change_data_capture.status(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_change_data_capture_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_change_data_capture_operations_async.py
index 325a9f0e1cf9..d5f6b8f05e04 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_change_data_capture_operations_async.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_change_data_capture_operations_async.py
@@ -21,7 +21,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_list_by_factory(self, resource_group):
+ async def test_change_data_capture_list_by_factory(self, resource_group):
response = self.client.change_data_capture.list_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
@@ -33,7 +33,7 @@ async def test_list_by_factory(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_create_or_update(self, resource_group):
+ async def test_change_data_capture_create_or_update(self, resource_group):
response = await self.client.change_data_capture.create_or_update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -127,7 +127,7 @@ async def test_create_or_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_get(self, resource_group):
+ async def test_change_data_capture_get(self, resource_group):
response = await self.client.change_data_capture.get(
resource_group_name=resource_group.name,
factory_name="str",
@@ -140,7 +140,7 @@ async def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_delete(self, resource_group):
+ async def test_change_data_capture_delete(self, resource_group):
response = await self.client.change_data_capture.delete(
resource_group_name=resource_group.name,
factory_name="str",
@@ -153,7 +153,7 @@ async def test_delete(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_start(self, resource_group):
+ async def test_change_data_capture_start(self, resource_group):
response = await self.client.change_data_capture.start(
resource_group_name=resource_group.name,
factory_name="str",
@@ -166,7 +166,7 @@ async def test_start(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_stop(self, resource_group):
+ async def test_change_data_capture_stop(self, resource_group):
response = await self.client.change_data_capture.stop(
resource_group_name=resource_group.name,
factory_name="str",
@@ -179,7 +179,7 @@ async def test_stop(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_status(self, resource_group):
+ async def test_change_data_capture_status(self, resource_group):
response = await self.client.change_data_capture.status(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_credential_operations_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_credential_operations_operations.py
index aa12eb5bb295..62fcd56cb92d 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_credential_operations_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_credential_operations_operations.py
@@ -20,7 +20,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_list_by_factory(self, resource_group):
+ def test_credential_operations_list_by_factory(self, resource_group):
response = self.client.credential_operations.list_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
@@ -32,7 +32,7 @@ def test_list_by_factory(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_create_or_update(self, resource_group):
+ def test_credential_operations_create_or_update(self, resource_group):
response = self.client.credential_operations.create_or_update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -46,7 +46,7 @@ def test_create_or_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_get(self, resource_group):
+ def test_credential_operations_get(self, resource_group):
response = self.client.credential_operations.get(
resource_group_name=resource_group.name,
factory_name="str",
@@ -59,7 +59,7 @@ def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_delete(self, resource_group):
+ def test_credential_operations_delete(self, resource_group):
response = self.client.credential_operations.delete(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_credential_operations_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_credential_operations_operations_async.py
index bc363029eb58..84bd379bef8d 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_credential_operations_operations_async.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_credential_operations_operations_async.py
@@ -21,7 +21,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_list_by_factory(self, resource_group):
+ async def test_credential_operations_list_by_factory(self, resource_group):
response = self.client.credential_operations.list_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
@@ -33,7 +33,7 @@ async def test_list_by_factory(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_create_or_update(self, resource_group):
+ async def test_credential_operations_create_or_update(self, resource_group):
response = await self.client.credential_operations.create_or_update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -47,7 +47,7 @@ async def test_create_or_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_get(self, resource_group):
+ async def test_credential_operations_get(self, resource_group):
response = await self.client.credential_operations.get(
resource_group_name=resource_group.name,
factory_name="str",
@@ -60,7 +60,7 @@ async def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_delete(self, resource_group):
+ async def test_credential_operations_delete(self, resource_group):
response = await self.client.credential_operations.delete(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_data_flow_debug_session_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_data_flow_debug_session_operations.py
index 009a428155ed..0e92f93c060f 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_data_flow_debug_session_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_data_flow_debug_session_operations.py
@@ -20,7 +20,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_begin_create(self, resource_group):
+ def test_data_flow_debug_session_begin_create(self, resource_group):
response = self.client.data_flow_debug_session.begin_create(
resource_group_name=resource_group.name,
factory_name="str",
@@ -38,7 +38,7 @@ def test_begin_create(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_query_by_factory(self, resource_group):
+ def test_data_flow_debug_session_query_by_factory(self, resource_group):
response = self.client.data_flow_debug_session.query_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
@@ -50,7 +50,7 @@ def test_query_by_factory(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_add_data_flow(self, resource_group):
+ def test_data_flow_debug_session_add_data_flow(self, resource_group):
response = self.client.data_flow_debug_session.add_data_flow(
resource_group_name=resource_group.name,
factory_name="str",
@@ -78,7 +78,7 @@ def test_add_data_flow(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_delete(self, resource_group):
+ def test_data_flow_debug_session_delete(self, resource_group):
response = self.client.data_flow_debug_session.delete(
resource_group_name=resource_group.name,
factory_name="str",
@@ -91,7 +91,7 @@ def test_delete(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_begin_execute_command(self, resource_group):
+ def test_data_flow_debug_session_begin_execute_command(self, resource_group):
response = self.client.data_flow_debug_session.begin_execute_command(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_data_flow_debug_session_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_data_flow_debug_session_operations_async.py
index c8fc8f2c9780..09c1953e811c 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_data_flow_debug_session_operations_async.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_data_flow_debug_session_operations_async.py
@@ -21,7 +21,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_begin_create(self, resource_group):
+ async def test_data_flow_debug_session_begin_create(self, resource_group):
response = await (
await self.client.data_flow_debug_session.begin_create(
resource_group_name=resource_group.name,
@@ -41,7 +41,7 @@ async def test_begin_create(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_query_by_factory(self, resource_group):
+ async def test_data_flow_debug_session_query_by_factory(self, resource_group):
response = self.client.data_flow_debug_session.query_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
@@ -53,7 +53,7 @@ async def test_query_by_factory(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_add_data_flow(self, resource_group):
+ async def test_data_flow_debug_session_add_data_flow(self, resource_group):
response = await self.client.data_flow_debug_session.add_data_flow(
resource_group_name=resource_group.name,
factory_name="str",
@@ -81,7 +81,7 @@ async def test_add_data_flow(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_delete(self, resource_group):
+ async def test_data_flow_debug_session_delete(self, resource_group):
response = await self.client.data_flow_debug_session.delete(
resource_group_name=resource_group.name,
factory_name="str",
@@ -94,7 +94,7 @@ async def test_delete(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_begin_execute_command(self, resource_group):
+ async def test_data_flow_debug_session_begin_execute_command(self, resource_group):
response = await (
await self.client.data_flow_debug_session.begin_execute_command(
resource_group_name=resource_group.name,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_data_flows_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_data_flows_operations.py
index 3db90f62abd4..1a76b7824888 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_data_flows_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_data_flows_operations.py
@@ -20,7 +20,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_create_or_update(self, resource_group):
+ def test_data_flows_create_or_update(self, resource_group):
response = self.client.data_flows.create_or_update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -34,7 +34,7 @@ def test_create_or_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_get(self, resource_group):
+ def test_data_flows_get(self, resource_group):
response = self.client.data_flows.get(
resource_group_name=resource_group.name,
factory_name="str",
@@ -47,7 +47,7 @@ def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_delete(self, resource_group):
+ def test_data_flows_delete(self, resource_group):
response = self.client.data_flows.delete(
resource_group_name=resource_group.name,
factory_name="str",
@@ -60,7 +60,7 @@ def test_delete(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_list_by_factory(self, resource_group):
+ def test_data_flows_list_by_factory(self, resource_group):
response = self.client.data_flows.list_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_data_flows_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_data_flows_operations_async.py
index 41513ff09d7b..3555c1d4a8bf 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_data_flows_operations_async.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_data_flows_operations_async.py
@@ -21,7 +21,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_create_or_update(self, resource_group):
+ async def test_data_flows_create_or_update(self, resource_group):
response = await self.client.data_flows.create_or_update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -35,7 +35,7 @@ async def test_create_or_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_get(self, resource_group):
+ async def test_data_flows_get(self, resource_group):
response = await self.client.data_flows.get(
resource_group_name=resource_group.name,
factory_name="str",
@@ -48,7 +48,7 @@ async def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_delete(self, resource_group):
+ async def test_data_flows_delete(self, resource_group):
response = await self.client.data_flows.delete(
resource_group_name=resource_group.name,
factory_name="str",
@@ -61,7 +61,7 @@ async def test_delete(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_list_by_factory(self, resource_group):
+ async def test_data_flows_list_by_factory(self, resource_group):
response = self.client.data_flows.list_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_datasets_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_datasets_operations.py
index 14348641640b..2b26d08f5720 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_datasets_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_datasets_operations.py
@@ -20,7 +20,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_list_by_factory(self, resource_group):
+ def test_datasets_list_by_factory(self, resource_group):
response = self.client.datasets.list_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
@@ -32,7 +32,7 @@ def test_list_by_factory(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_create_or_update(self, resource_group):
+ def test_datasets_create_or_update(self, resource_group):
response = self.client.datasets.create_or_update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -46,7 +46,7 @@ def test_create_or_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_get(self, resource_group):
+ def test_datasets_get(self, resource_group):
response = self.client.datasets.get(
resource_group_name=resource_group.name,
factory_name="str",
@@ -59,7 +59,7 @@ def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_delete(self, resource_group):
+ def test_datasets_delete(self, resource_group):
response = self.client.datasets.delete(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_datasets_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_datasets_operations_async.py
index 97836ed4ceb5..e50d77ef3a0d 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_datasets_operations_async.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_datasets_operations_async.py
@@ -21,7 +21,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_list_by_factory(self, resource_group):
+ async def test_datasets_list_by_factory(self, resource_group):
response = self.client.datasets.list_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
@@ -33,7 +33,7 @@ async def test_list_by_factory(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_create_or_update(self, resource_group):
+ async def test_datasets_create_or_update(self, resource_group):
response = await self.client.datasets.create_or_update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -47,7 +47,7 @@ async def test_create_or_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_get(self, resource_group):
+ async def test_datasets_get(self, resource_group):
response = await self.client.datasets.get(
resource_group_name=resource_group.name,
factory_name="str",
@@ -60,7 +60,7 @@ async def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_delete(self, resource_group):
+ async def test_datasets_delete(self, resource_group):
response = await self.client.datasets.delete(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_exposure_control_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_exposure_control_operations.py
index 246081c8f53f..ad8b3652783e 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_exposure_control_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_exposure_control_operations.py
@@ -20,7 +20,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_get_feature_value(self, resource_group):
+ def test_exposure_control_get_feature_value(self, resource_group):
response = self.client.exposure_control.get_feature_value(
location_id="str",
exposure_control_request={"featureName": "str", "featureType": "str"},
@@ -32,7 +32,7 @@ def test_get_feature_value(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_get_feature_value_by_factory(self, resource_group):
+ def test_exposure_control_get_feature_value_by_factory(self, resource_group):
response = self.client.exposure_control.get_feature_value_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
@@ -45,7 +45,7 @@ def test_get_feature_value_by_factory(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_query_feature_values_by_factory(self, resource_group):
+ def test_exposure_control_query_feature_values_by_factory(self, resource_group):
response = self.client.exposure_control.query_feature_values_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_exposure_control_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_exposure_control_operations_async.py
index a0ceb77d0203..cee377a28ea8 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_exposure_control_operations_async.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_exposure_control_operations_async.py
@@ -21,7 +21,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_get_feature_value(self, resource_group):
+ async def test_exposure_control_get_feature_value(self, resource_group):
response = await self.client.exposure_control.get_feature_value(
location_id="str",
exposure_control_request={"featureName": "str", "featureType": "str"},
@@ -33,7 +33,7 @@ async def test_get_feature_value(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_get_feature_value_by_factory(self, resource_group):
+ async def test_exposure_control_get_feature_value_by_factory(self, resource_group):
response = await self.client.exposure_control.get_feature_value_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
@@ -46,7 +46,7 @@ async def test_get_feature_value_by_factory(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_query_feature_values_by_factory(self, resource_group):
+ async def test_exposure_control_query_feature_values_by_factory(self, resource_group):
response = await self.client.exposure_control.query_feature_values_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_factories_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_factories_operations.py
index 94dbeb5cf9fb..27215817e5e6 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_factories_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_factories_operations.py
@@ -20,7 +20,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_list(self, resource_group):
+ def test_factories_list(self, resource_group):
response = self.client.factories.list(
api_version="2018-06-01",
)
@@ -30,7 +30,7 @@ def test_list(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_configure_factory_repo(self, resource_group):
+ def test_factories_configure_factory_repo(self, resource_group):
response = self.client.factories.configure_factory_repo(
location_id="str",
factory_repo_update={"factoryResourceId": "str", "repoConfiguration": "factory_repo_configuration"},
@@ -42,7 +42,7 @@ def test_configure_factory_repo(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_list_by_resource_group(self, resource_group):
+ def test_factories_list_by_resource_group(self, resource_group):
response = self.client.factories.list_by_resource_group(
resource_group_name=resource_group.name,
api_version="2018-06-01",
@@ -53,7 +53,7 @@ def test_list_by_resource_group(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_create_or_update(self, resource_group):
+ def test_factories_create_or_update(self, resource_group):
response = self.client.factories.create_or_update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -92,7 +92,7 @@ def test_create_or_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_update(self, resource_group):
+ def test_factories_update(self, resource_group):
response = self.client.factories.update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -114,7 +114,7 @@ def test_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_get(self, resource_group):
+ def test_factories_get(self, resource_group):
response = self.client.factories.get(
resource_group_name=resource_group.name,
factory_name="str",
@@ -126,7 +126,7 @@ def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_delete(self, resource_group):
+ def test_factories_delete(self, resource_group):
response = self.client.factories.delete(
resource_group_name=resource_group.name,
factory_name="str",
@@ -138,7 +138,7 @@ def test_delete(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_get_git_hub_access_token(self, resource_group):
+ def test_factories_get_git_hub_access_token(self, resource_group):
response = self.client.factories.get_git_hub_access_token(
resource_group_name=resource_group.name,
factory_name="str",
@@ -156,7 +156,7 @@ def test_get_git_hub_access_token(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_get_data_plane_access(self, resource_group):
+ def test_factories_get_data_plane_access(self, resource_group):
response = self.client.factories.get_data_plane_access(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_factories_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_factories_operations_async.py
index fd08741d55f9..6a41ac09f6e9 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_factories_operations_async.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_factories_operations_async.py
@@ -21,7 +21,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_list(self, resource_group):
+ async def test_factories_list(self, resource_group):
response = self.client.factories.list(
api_version="2018-06-01",
)
@@ -31,7 +31,7 @@ async def test_list(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_configure_factory_repo(self, resource_group):
+ async def test_factories_configure_factory_repo(self, resource_group):
response = await self.client.factories.configure_factory_repo(
location_id="str",
factory_repo_update={"factoryResourceId": "str", "repoConfiguration": "factory_repo_configuration"},
@@ -43,7 +43,7 @@ async def test_configure_factory_repo(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_list_by_resource_group(self, resource_group):
+ async def test_factories_list_by_resource_group(self, resource_group):
response = self.client.factories.list_by_resource_group(
resource_group_name=resource_group.name,
api_version="2018-06-01",
@@ -54,7 +54,7 @@ async def test_list_by_resource_group(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_create_or_update(self, resource_group):
+ async def test_factories_create_or_update(self, resource_group):
response = await self.client.factories.create_or_update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -93,7 +93,7 @@ async def test_create_or_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_update(self, resource_group):
+ async def test_factories_update(self, resource_group):
response = await self.client.factories.update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -115,7 +115,7 @@ async def test_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_get(self, resource_group):
+ async def test_factories_get(self, resource_group):
response = await self.client.factories.get(
resource_group_name=resource_group.name,
factory_name="str",
@@ -127,7 +127,7 @@ async def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_delete(self, resource_group):
+ async def test_factories_delete(self, resource_group):
response = await self.client.factories.delete(
resource_group_name=resource_group.name,
factory_name="str",
@@ -139,7 +139,7 @@ async def test_delete(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_get_git_hub_access_token(self, resource_group):
+ async def test_factories_get_git_hub_access_token(self, resource_group):
response = await self.client.factories.get_git_hub_access_token(
resource_group_name=resource_group.name,
factory_name="str",
@@ -157,7 +157,7 @@ async def test_get_git_hub_access_token(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_get_data_plane_access(self, resource_group):
+ async def test_factories_get_data_plane_access(self, resource_group):
response = await self.client.factories.get_data_plane_access(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_global_parameters_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_global_parameters_operations.py
index 55324ca068c3..8ff8a47ccf97 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_global_parameters_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_global_parameters_operations.py
@@ -20,7 +20,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_list_by_factory(self, resource_group):
+ def test_global_parameters_list_by_factory(self, resource_group):
response = self.client.global_parameters.list_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
@@ -32,7 +32,7 @@ def test_list_by_factory(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_get(self, resource_group):
+ def test_global_parameters_get(self, resource_group):
response = self.client.global_parameters.get(
resource_group_name=resource_group.name,
factory_name="str",
@@ -45,7 +45,7 @@ def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_create_or_update(self, resource_group):
+ def test_global_parameters_create_or_update(self, resource_group):
response = self.client.global_parameters.create_or_update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -65,7 +65,7 @@ def test_create_or_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_delete(self, resource_group):
+ def test_global_parameters_delete(self, resource_group):
response = self.client.global_parameters.delete(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_global_parameters_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_global_parameters_operations_async.py
index 704e5a26b0cd..1ac449971500 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_global_parameters_operations_async.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_global_parameters_operations_async.py
@@ -21,7 +21,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_list_by_factory(self, resource_group):
+ async def test_global_parameters_list_by_factory(self, resource_group):
response = self.client.global_parameters.list_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
@@ -33,7 +33,7 @@ async def test_list_by_factory(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_get(self, resource_group):
+ async def test_global_parameters_get(self, resource_group):
response = await self.client.global_parameters.get(
resource_group_name=resource_group.name,
factory_name="str",
@@ -46,7 +46,7 @@ async def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_create_or_update(self, resource_group):
+ async def test_global_parameters_create_or_update(self, resource_group):
response = await self.client.global_parameters.create_or_update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -66,7 +66,7 @@ async def test_create_or_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_delete(self, resource_group):
+ async def test_global_parameters_delete(self, resource_group):
response = await self.client.global_parameters.delete(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtime_nodes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtime_nodes_operations.py
index f8966ee56c40..dfb1e006f00d 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtime_nodes_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtime_nodes_operations.py
@@ -20,7 +20,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_get(self, resource_group):
+ def test_integration_runtime_nodes_get(self, resource_group):
response = self.client.integration_runtime_nodes.get(
resource_group_name=resource_group.name,
factory_name="str",
@@ -34,7 +34,7 @@ def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_delete(self, resource_group):
+ def test_integration_runtime_nodes_delete(self, resource_group):
response = self.client.integration_runtime_nodes.delete(
resource_group_name=resource_group.name,
factory_name="str",
@@ -48,7 +48,7 @@ def test_delete(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_update(self, resource_group):
+ def test_integration_runtime_nodes_update(self, resource_group):
response = self.client.integration_runtime_nodes.update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -63,7 +63,7 @@ def test_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_get_ip_address(self, resource_group):
+ def test_integration_runtime_nodes_get_ip_address(self, resource_group):
response = self.client.integration_runtime_nodes.get_ip_address(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtime_nodes_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtime_nodes_operations_async.py
index 52b99df852d9..257a00d3228b 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtime_nodes_operations_async.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtime_nodes_operations_async.py
@@ -21,7 +21,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_get(self, resource_group):
+ async def test_integration_runtime_nodes_get(self, resource_group):
response = await self.client.integration_runtime_nodes.get(
resource_group_name=resource_group.name,
factory_name="str",
@@ -35,7 +35,7 @@ async def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_delete(self, resource_group):
+ async def test_integration_runtime_nodes_delete(self, resource_group):
response = await self.client.integration_runtime_nodes.delete(
resource_group_name=resource_group.name,
factory_name="str",
@@ -49,7 +49,7 @@ async def test_delete(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_update(self, resource_group):
+ async def test_integration_runtime_nodes_update(self, resource_group):
response = await self.client.integration_runtime_nodes.update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -64,7 +64,7 @@ async def test_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_get_ip_address(self, resource_group):
+ async def test_integration_runtime_nodes_get_ip_address(self, resource_group):
response = await self.client.integration_runtime_nodes.get_ip_address(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtime_object_metadata_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtime_object_metadata_operations.py
index 8b3f0ea1ecf6..e7d0acc9f8d3 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtime_object_metadata_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtime_object_metadata_operations.py
@@ -20,7 +20,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_begin_refresh(self, resource_group):
+ def test_integration_runtime_object_metadata_begin_refresh(self, resource_group):
response = self.client.integration_runtime_object_metadata.begin_refresh(
resource_group_name=resource_group.name,
factory_name="str",
@@ -33,7 +33,7 @@ def test_begin_refresh(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_get(self, resource_group):
+ def test_integration_runtime_object_metadata_get(self, resource_group):
response = self.client.integration_runtime_object_metadata.get(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtime_object_metadata_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtime_object_metadata_operations_async.py
index 1d095b566c43..a75917eb1e5f 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtime_object_metadata_operations_async.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtime_object_metadata_operations_async.py
@@ -21,7 +21,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_begin_refresh(self, resource_group):
+ async def test_integration_runtime_object_metadata_begin_refresh(self, resource_group):
response = await (
await self.client.integration_runtime_object_metadata.begin_refresh(
resource_group_name=resource_group.name,
@@ -36,7 +36,7 @@ async def test_begin_refresh(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_get(self, resource_group):
+ async def test_integration_runtime_object_metadata_get(self, resource_group):
response = await self.client.integration_runtime_object_metadata.get(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtimes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtimes_operations.py
index 7816caf1974b..427a4c39c1e2 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtimes_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtimes_operations.py
@@ -20,7 +20,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_list_by_factory(self, resource_group):
+ def test_integration_runtimes_list_by_factory(self, resource_group):
response = self.client.integration_runtimes.list_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
@@ -32,7 +32,7 @@ def test_list_by_factory(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_create_or_update(self, resource_group):
+ def test_integration_runtimes_create_or_update(self, resource_group):
response = self.client.integration_runtimes.create_or_update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -52,7 +52,7 @@ def test_create_or_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_get(self, resource_group):
+ def test_integration_runtimes_get(self, resource_group):
response = self.client.integration_runtimes.get(
resource_group_name=resource_group.name,
factory_name="str",
@@ -65,7 +65,7 @@ def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_update(self, resource_group):
+ def test_integration_runtimes_update(self, resource_group):
response = self.client.integration_runtimes.update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -79,7 +79,7 @@ def test_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_delete(self, resource_group):
+ def test_integration_runtimes_delete(self, resource_group):
response = self.client.integration_runtimes.delete(
resource_group_name=resource_group.name,
factory_name="str",
@@ -92,7 +92,7 @@ def test_delete(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_get_status(self, resource_group):
+ def test_integration_runtimes_get_status(self, resource_group):
response = self.client.integration_runtimes.get_status(
resource_group_name=resource_group.name,
factory_name="str",
@@ -105,7 +105,7 @@ def test_get_status(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_list_outbound_network_dependencies_endpoints(self, resource_group):
+ def test_integration_runtimes_list_outbound_network_dependencies_endpoints(self, resource_group):
response = self.client.integration_runtimes.list_outbound_network_dependencies_endpoints(
resource_group_name=resource_group.name,
factory_name="str",
@@ -118,7 +118,7 @@ def test_list_outbound_network_dependencies_endpoints(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_get_connection_info(self, resource_group):
+ def test_integration_runtimes_get_connection_info(self, resource_group):
response = self.client.integration_runtimes.get_connection_info(
resource_group_name=resource_group.name,
factory_name="str",
@@ -131,7 +131,7 @@ def test_get_connection_info(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_regenerate_auth_key(self, resource_group):
+ def test_integration_runtimes_regenerate_auth_key(self, resource_group):
response = self.client.integration_runtimes.regenerate_auth_key(
resource_group_name=resource_group.name,
factory_name="str",
@@ -145,7 +145,7 @@ def test_regenerate_auth_key(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_list_auth_keys(self, resource_group):
+ def test_integration_runtimes_list_auth_keys(self, resource_group):
response = self.client.integration_runtimes.list_auth_keys(
resource_group_name=resource_group.name,
factory_name="str",
@@ -158,7 +158,7 @@ def test_list_auth_keys(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_begin_start(self, resource_group):
+ def test_integration_runtimes_begin_start(self, resource_group):
response = self.client.integration_runtimes.begin_start(
resource_group_name=resource_group.name,
factory_name="str",
@@ -171,7 +171,7 @@ def test_begin_start(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_begin_stop(self, resource_group):
+ def test_integration_runtimes_begin_stop(self, resource_group):
response = self.client.integration_runtimes.begin_stop(
resource_group_name=resource_group.name,
factory_name="str",
@@ -184,7 +184,7 @@ def test_begin_stop(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_sync_credentials(self, resource_group):
+ def test_integration_runtimes_sync_credentials(self, resource_group):
response = self.client.integration_runtimes.sync_credentials(
resource_group_name=resource_group.name,
factory_name="str",
@@ -197,7 +197,7 @@ def test_sync_credentials(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_get_monitoring_data(self, resource_group):
+ def test_integration_runtimes_get_monitoring_data(self, resource_group):
response = self.client.integration_runtimes.get_monitoring_data(
resource_group_name=resource_group.name,
factory_name="str",
@@ -210,7 +210,7 @@ def test_get_monitoring_data(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_upgrade(self, resource_group):
+ def test_integration_runtimes_upgrade(self, resource_group):
response = self.client.integration_runtimes.upgrade(
resource_group_name=resource_group.name,
factory_name="str",
@@ -223,7 +223,7 @@ def test_upgrade(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_remove_links(self, resource_group):
+ def test_integration_runtimes_remove_links(self, resource_group):
response = self.client.integration_runtimes.remove_links(
resource_group_name=resource_group.name,
factory_name="str",
@@ -237,7 +237,7 @@ def test_remove_links(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_create_linked_integration_runtime(self, resource_group):
+ def test_integration_runtimes_create_linked_integration_runtime(self, resource_group):
response = self.client.integration_runtimes.create_linked_integration_runtime(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtimes_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtimes_operations_async.py
index 50675e534860..8731e351f692 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtimes_operations_async.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtimes_operations_async.py
@@ -21,7 +21,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_list_by_factory(self, resource_group):
+ async def test_integration_runtimes_list_by_factory(self, resource_group):
response = self.client.integration_runtimes.list_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
@@ -33,7 +33,7 @@ async def test_list_by_factory(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_create_or_update(self, resource_group):
+ async def test_integration_runtimes_create_or_update(self, resource_group):
response = await self.client.integration_runtimes.create_or_update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -53,7 +53,7 @@ async def test_create_or_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_get(self, resource_group):
+ async def test_integration_runtimes_get(self, resource_group):
response = await self.client.integration_runtimes.get(
resource_group_name=resource_group.name,
factory_name="str",
@@ -66,7 +66,7 @@ async def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_update(self, resource_group):
+ async def test_integration_runtimes_update(self, resource_group):
response = await self.client.integration_runtimes.update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -80,7 +80,7 @@ async def test_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_delete(self, resource_group):
+ async def test_integration_runtimes_delete(self, resource_group):
response = await self.client.integration_runtimes.delete(
resource_group_name=resource_group.name,
factory_name="str",
@@ -93,7 +93,7 @@ async def test_delete(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_get_status(self, resource_group):
+ async def test_integration_runtimes_get_status(self, resource_group):
response = await self.client.integration_runtimes.get_status(
resource_group_name=resource_group.name,
factory_name="str",
@@ -106,7 +106,7 @@ async def test_get_status(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_list_outbound_network_dependencies_endpoints(self, resource_group):
+ async def test_integration_runtimes_list_outbound_network_dependencies_endpoints(self, resource_group):
response = await self.client.integration_runtimes.list_outbound_network_dependencies_endpoints(
resource_group_name=resource_group.name,
factory_name="str",
@@ -119,7 +119,7 @@ async def test_list_outbound_network_dependencies_endpoints(self, resource_group
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_get_connection_info(self, resource_group):
+ async def test_integration_runtimes_get_connection_info(self, resource_group):
response = await self.client.integration_runtimes.get_connection_info(
resource_group_name=resource_group.name,
factory_name="str",
@@ -132,7 +132,7 @@ async def test_get_connection_info(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_regenerate_auth_key(self, resource_group):
+ async def test_integration_runtimes_regenerate_auth_key(self, resource_group):
response = await self.client.integration_runtimes.regenerate_auth_key(
resource_group_name=resource_group.name,
factory_name="str",
@@ -146,7 +146,7 @@ async def test_regenerate_auth_key(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_list_auth_keys(self, resource_group):
+ async def test_integration_runtimes_list_auth_keys(self, resource_group):
response = await self.client.integration_runtimes.list_auth_keys(
resource_group_name=resource_group.name,
factory_name="str",
@@ -159,7 +159,7 @@ async def test_list_auth_keys(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_begin_start(self, resource_group):
+ async def test_integration_runtimes_begin_start(self, resource_group):
response = await (
await self.client.integration_runtimes.begin_start(
resource_group_name=resource_group.name,
@@ -174,7 +174,7 @@ async def test_begin_start(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_begin_stop(self, resource_group):
+ async def test_integration_runtimes_begin_stop(self, resource_group):
response = await (
await self.client.integration_runtimes.begin_stop(
resource_group_name=resource_group.name,
@@ -189,7 +189,7 @@ async def test_begin_stop(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_sync_credentials(self, resource_group):
+ async def test_integration_runtimes_sync_credentials(self, resource_group):
response = await self.client.integration_runtimes.sync_credentials(
resource_group_name=resource_group.name,
factory_name="str",
@@ -202,7 +202,7 @@ async def test_sync_credentials(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_get_monitoring_data(self, resource_group):
+ async def test_integration_runtimes_get_monitoring_data(self, resource_group):
response = await self.client.integration_runtimes.get_monitoring_data(
resource_group_name=resource_group.name,
factory_name="str",
@@ -215,7 +215,7 @@ async def test_get_monitoring_data(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_upgrade(self, resource_group):
+ async def test_integration_runtimes_upgrade(self, resource_group):
response = await self.client.integration_runtimes.upgrade(
resource_group_name=resource_group.name,
factory_name="str",
@@ -228,7 +228,7 @@ async def test_upgrade(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_remove_links(self, resource_group):
+ async def test_integration_runtimes_remove_links(self, resource_group):
response = await self.client.integration_runtimes.remove_links(
resource_group_name=resource_group.name,
factory_name="str",
@@ -242,7 +242,7 @@ async def test_remove_links(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_create_linked_integration_runtime(self, resource_group):
+ async def test_integration_runtimes_create_linked_integration_runtime(self, resource_group):
response = await self.client.integration_runtimes.create_linked_integration_runtime(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_linked_services_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_linked_services_operations.py
index 860fca87ee4d..65078aca2062 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_linked_services_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_linked_services_operations.py
@@ -20,7 +20,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_list_by_factory(self, resource_group):
+ def test_linked_services_list_by_factory(self, resource_group):
response = self.client.linked_services.list_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
@@ -32,7 +32,7 @@ def test_list_by_factory(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_create_or_update(self, resource_group):
+ def test_linked_services_create_or_update(self, resource_group):
response = self.client.linked_services.create_or_update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -46,7 +46,7 @@ def test_create_or_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_get(self, resource_group):
+ def test_linked_services_get(self, resource_group):
response = self.client.linked_services.get(
resource_group_name=resource_group.name,
factory_name="str",
@@ -59,7 +59,7 @@ def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_delete(self, resource_group):
+ def test_linked_services_delete(self, resource_group):
response = self.client.linked_services.delete(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_linked_services_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_linked_services_operations_async.py
index 4e7660259368..872f3b0af917 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_linked_services_operations_async.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_linked_services_operations_async.py
@@ -21,7 +21,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_list_by_factory(self, resource_group):
+ async def test_linked_services_list_by_factory(self, resource_group):
response = self.client.linked_services.list_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
@@ -33,7 +33,7 @@ async def test_list_by_factory(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_create_or_update(self, resource_group):
+ async def test_linked_services_create_or_update(self, resource_group):
response = await self.client.linked_services.create_or_update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -47,7 +47,7 @@ async def test_create_or_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_get(self, resource_group):
+ async def test_linked_services_get(self, resource_group):
response = await self.client.linked_services.get(
resource_group_name=resource_group.name,
factory_name="str",
@@ -60,7 +60,7 @@ async def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_delete(self, resource_group):
+ async def test_linked_services_delete(self, resource_group):
response = await self.client.linked_services.delete(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_managed_private_endpoints_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_managed_private_endpoints_operations.py
index 14df0f72198b..71fcdcd450ed 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_managed_private_endpoints_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_managed_private_endpoints_operations.py
@@ -20,7 +20,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_list_by_factory(self, resource_group):
+ def test_managed_private_endpoints_list_by_factory(self, resource_group):
response = self.client.managed_private_endpoints.list_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
@@ -33,7 +33,7 @@ def test_list_by_factory(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_create_or_update(self, resource_group):
+ def test_managed_private_endpoints_create_or_update(self, resource_group):
response = self.client.managed_private_endpoints.create_or_update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -61,7 +61,7 @@ def test_create_or_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_get(self, resource_group):
+ def test_managed_private_endpoints_get(self, resource_group):
response = self.client.managed_private_endpoints.get(
resource_group_name=resource_group.name,
factory_name="str",
@@ -75,7 +75,7 @@ def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_delete(self, resource_group):
+ def test_managed_private_endpoints_delete(self, resource_group):
response = self.client.managed_private_endpoints.delete(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_managed_private_endpoints_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_managed_private_endpoints_operations_async.py
index 5fbd619b8871..e4c7cdeaabf9 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_managed_private_endpoints_operations_async.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_managed_private_endpoints_operations_async.py
@@ -21,7 +21,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_list_by_factory(self, resource_group):
+ async def test_managed_private_endpoints_list_by_factory(self, resource_group):
response = self.client.managed_private_endpoints.list_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
@@ -34,7 +34,7 @@ async def test_list_by_factory(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_create_or_update(self, resource_group):
+ async def test_managed_private_endpoints_create_or_update(self, resource_group):
response = await self.client.managed_private_endpoints.create_or_update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -62,7 +62,7 @@ async def test_create_or_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_get(self, resource_group):
+ async def test_managed_private_endpoints_get(self, resource_group):
response = await self.client.managed_private_endpoints.get(
resource_group_name=resource_group.name,
factory_name="str",
@@ -76,7 +76,7 @@ async def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_delete(self, resource_group):
+ async def test_managed_private_endpoints_delete(self, resource_group):
response = await self.client.managed_private_endpoints.delete(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_managed_virtual_networks_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_managed_virtual_networks_operations.py
index fb8000b8e985..c5b1e65e4da2 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_managed_virtual_networks_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_managed_virtual_networks_operations.py
@@ -20,7 +20,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_list_by_factory(self, resource_group):
+ def test_managed_virtual_networks_list_by_factory(self, resource_group):
response = self.client.managed_virtual_networks.list_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
@@ -32,7 +32,7 @@ def test_list_by_factory(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_create_or_update(self, resource_group):
+ def test_managed_virtual_networks_create_or_update(self, resource_group):
response = self.client.managed_virtual_networks.create_or_update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -52,7 +52,7 @@ def test_create_or_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_get(self, resource_group):
+ def test_managed_virtual_networks_get(self, resource_group):
response = self.client.managed_virtual_networks.get(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_managed_virtual_networks_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_managed_virtual_networks_operations_async.py
index 73dfcaa3240b..68be41b1315f 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_managed_virtual_networks_operations_async.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_managed_virtual_networks_operations_async.py
@@ -21,7 +21,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_list_by_factory(self, resource_group):
+ async def test_managed_virtual_networks_list_by_factory(self, resource_group):
response = self.client.managed_virtual_networks.list_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
@@ -33,7 +33,7 @@ async def test_list_by_factory(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_create_or_update(self, resource_group):
+ async def test_managed_virtual_networks_create_or_update(self, resource_group):
response = await self.client.managed_virtual_networks.create_or_update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -53,7 +53,7 @@ async def test_create_or_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_get(self, resource_group):
+ async def test_managed_virtual_networks_get(self, resource_group):
response = await self.client.managed_virtual_networks.get(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_operations.py
index cc44ace480d2..f17d65db1400 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_operations.py
@@ -20,7 +20,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_list(self, resource_group):
+ def test_operations_list(self, resource_group):
response = self.client.operations.list(
api_version="2018-06-01",
)
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_operations_async.py
index 17590fc1dfca..6111f071f272 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_operations_async.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_operations_async.py
@@ -21,7 +21,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_list(self, resource_group):
+ async def test_operations_list(self, resource_group):
response = self.client.operations.list(
api_version="2018-06-01",
)
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_pipeline_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_pipeline_runs_operations.py
index 5836d281d1f3..619281b5d14e 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_pipeline_runs_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_pipeline_runs_operations.py
@@ -20,7 +20,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_query_by_factory(self, resource_group):
+ def test_pipeline_runs_query_by_factory(self, resource_group):
response = self.client.pipeline_runs.query_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
@@ -39,7 +39,7 @@ def test_query_by_factory(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_get(self, resource_group):
+ def test_pipeline_runs_get(self, resource_group):
response = self.client.pipeline_runs.get(
resource_group_name=resource_group.name,
factory_name="str",
@@ -52,7 +52,7 @@ def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_cancel(self, resource_group):
+ def test_pipeline_runs_cancel(self, resource_group):
response = self.client.pipeline_runs.cancel(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_pipeline_runs_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_pipeline_runs_operations_async.py
index acd95502267f..0fb5484d1266 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_pipeline_runs_operations_async.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_pipeline_runs_operations_async.py
@@ -21,7 +21,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_query_by_factory(self, resource_group):
+ async def test_pipeline_runs_query_by_factory(self, resource_group):
response = await self.client.pipeline_runs.query_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
@@ -40,7 +40,7 @@ async def test_query_by_factory(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_get(self, resource_group):
+ async def test_pipeline_runs_get(self, resource_group):
response = await self.client.pipeline_runs.get(
resource_group_name=resource_group.name,
factory_name="str",
@@ -53,7 +53,7 @@ async def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_cancel(self, resource_group):
+ async def test_pipeline_runs_cancel(self, resource_group):
response = await self.client.pipeline_runs.cancel(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_pipelines_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_pipelines_operations.py
index 055cc41ae31e..48adaf4e55cc 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_pipelines_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_pipelines_operations.py
@@ -20,7 +20,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_list_by_factory(self, resource_group):
+ def test_pipelines_list_by_factory(self, resource_group):
response = self.client.pipelines.list_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
@@ -32,7 +32,7 @@ def test_list_by_factory(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_create_or_update(self, resource_group):
+ def test_pipelines_create_or_update(self, resource_group):
response = self.client.pipelines.create_or_update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -60,7 +60,7 @@ def test_create_or_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_get(self, resource_group):
+ def test_pipelines_get(self, resource_group):
response = self.client.pipelines.get(
resource_group_name=resource_group.name,
factory_name="str",
@@ -73,7 +73,7 @@ def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_delete(self, resource_group):
+ def test_pipelines_delete(self, resource_group):
response = self.client.pipelines.delete(
resource_group_name=resource_group.name,
factory_name="str",
@@ -86,7 +86,7 @@ def test_delete(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_create_run(self, resource_group):
+ def test_pipelines_create_run(self, resource_group):
response = self.client.pipelines.create_run(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_pipelines_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_pipelines_operations_async.py
index 2be64cd4e6ab..db8b625c8d09 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_pipelines_operations_async.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_pipelines_operations_async.py
@@ -21,7 +21,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_list_by_factory(self, resource_group):
+ async def test_pipelines_list_by_factory(self, resource_group):
response = self.client.pipelines.list_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
@@ -33,7 +33,7 @@ async def test_list_by_factory(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_create_or_update(self, resource_group):
+ async def test_pipelines_create_or_update(self, resource_group):
response = await self.client.pipelines.create_or_update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -61,7 +61,7 @@ async def test_create_or_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_get(self, resource_group):
+ async def test_pipelines_get(self, resource_group):
response = await self.client.pipelines.get(
resource_group_name=resource_group.name,
factory_name="str",
@@ -74,7 +74,7 @@ async def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_delete(self, resource_group):
+ async def test_pipelines_delete(self, resource_group):
response = await self.client.pipelines.delete(
resource_group_name=resource_group.name,
factory_name="str",
@@ -87,7 +87,7 @@ async def test_delete(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_create_run(self, resource_group):
+ async def test_pipelines_create_run(self, resource_group):
response = await self.client.pipelines.create_run(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_end_point_connections_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_end_point_connections_operations.py
index 9cfadc91770c..574a46cb1dc4 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_end_point_connections_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_end_point_connections_operations.py
@@ -20,7 +20,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_list_by_factory(self, resource_group):
+ def test_private_end_point_connections_list_by_factory(self, resource_group):
response = self.client.private_end_point_connections.list_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_end_point_connections_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_end_point_connections_operations_async.py
index c152ee8bc113..304939c9ad82 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_end_point_connections_operations_async.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_end_point_connections_operations_async.py
@@ -21,7 +21,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_list_by_factory(self, resource_group):
+ async def test_private_end_point_connections_list_by_factory(self, resource_group):
response = self.client.private_end_point_connections.list_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_endpoint_connection_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_endpoint_connection_operations.py
index 6ac9901e27e2..e8682089587d 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_endpoint_connection_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_endpoint_connection_operations.py
@@ -20,7 +20,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_create_or_update(self, resource_group):
+ def test_private_endpoint_connection_create_or_update(self, resource_group):
response = self.client.private_endpoint_connection.create_or_update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -47,7 +47,7 @@ def test_create_or_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_get(self, resource_group):
+ def test_private_endpoint_connection_get(self, resource_group):
response = self.client.private_endpoint_connection.get(
resource_group_name=resource_group.name,
factory_name="str",
@@ -60,7 +60,7 @@ def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_delete(self, resource_group):
+ def test_private_endpoint_connection_delete(self, resource_group):
response = self.client.private_endpoint_connection.delete(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_endpoint_connection_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_endpoint_connection_operations_async.py
index f2cda1711bc2..89925dabee4d 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_endpoint_connection_operations_async.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_endpoint_connection_operations_async.py
@@ -21,7 +21,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_create_or_update(self, resource_group):
+ async def test_private_endpoint_connection_create_or_update(self, resource_group):
response = await self.client.private_endpoint_connection.create_or_update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -48,7 +48,7 @@ async def test_create_or_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_get(self, resource_group):
+ async def test_private_endpoint_connection_get(self, resource_group):
response = await self.client.private_endpoint_connection.get(
resource_group_name=resource_group.name,
factory_name="str",
@@ -61,7 +61,7 @@ async def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_delete(self, resource_group):
+ async def test_private_endpoint_connection_delete(self, resource_group):
response = await self.client.private_endpoint_connection.delete(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_link_resources_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_link_resources_operations.py
index 30ec19b28be7..1a967950fd4e 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_link_resources_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_link_resources_operations.py
@@ -20,7 +20,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_get(self, resource_group):
+ def test_private_link_resources_get(self, resource_group):
response = self.client.private_link_resources.get(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_link_resources_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_link_resources_operations_async.py
index 1625dcd3f94a..0770e7c9b475 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_link_resources_operations_async.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_link_resources_operations_async.py
@@ -21,7 +21,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_get(self, resource_group):
+ async def test_private_link_resources_get(self, resource_group):
response = await self.client.private_link_resources.get(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_trigger_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_trigger_runs_operations.py
index 2f0c2a0fc27f..fc0a1e4dcdb3 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_trigger_runs_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_trigger_runs_operations.py
@@ -20,7 +20,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_rerun(self, resource_group):
+ def test_trigger_runs_rerun(self, resource_group):
response = self.client.trigger_runs.rerun(
resource_group_name=resource_group.name,
factory_name="str",
@@ -34,7 +34,7 @@ def test_rerun(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_cancel(self, resource_group):
+ def test_trigger_runs_cancel(self, resource_group):
response = self.client.trigger_runs.cancel(
resource_group_name=resource_group.name,
factory_name="str",
@@ -48,7 +48,7 @@ def test_cancel(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_query_by_factory(self, resource_group):
+ def test_trigger_runs_query_by_factory(self, resource_group):
response = self.client.trigger_runs.query_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_trigger_runs_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_trigger_runs_operations_async.py
index cc17d0de01c9..ff9d6271ebb3 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_trigger_runs_operations_async.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_trigger_runs_operations_async.py
@@ -21,7 +21,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_rerun(self, resource_group):
+ async def test_trigger_runs_rerun(self, resource_group):
response = await self.client.trigger_runs.rerun(
resource_group_name=resource_group.name,
factory_name="str",
@@ -35,7 +35,7 @@ async def test_rerun(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_cancel(self, resource_group):
+ async def test_trigger_runs_cancel(self, resource_group):
response = await self.client.trigger_runs.cancel(
resource_group_name=resource_group.name,
factory_name="str",
@@ -49,7 +49,7 @@ async def test_cancel(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_query_by_factory(self, resource_group):
+ async def test_trigger_runs_query_by_factory(self, resource_group):
response = await self.client.trigger_runs.query_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_triggers_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_triggers_operations.py
index 24be0b35dec4..bc2ec6b1e081 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_triggers_operations.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_triggers_operations.py
@@ -20,7 +20,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_list_by_factory(self, resource_group):
+ def test_triggers_list_by_factory(self, resource_group):
response = self.client.triggers.list_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
@@ -32,7 +32,7 @@ def test_list_by_factory(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_query_by_factory(self, resource_group):
+ def test_triggers_query_by_factory(self, resource_group):
response = self.client.triggers.query_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
@@ -45,7 +45,7 @@ def test_query_by_factory(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_create_or_update(self, resource_group):
+ def test_triggers_create_or_update(self, resource_group):
response = self.client.triggers.create_or_update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -59,7 +59,7 @@ def test_create_or_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_get(self, resource_group):
+ def test_triggers_get(self, resource_group):
response = self.client.triggers.get(
resource_group_name=resource_group.name,
factory_name="str",
@@ -72,7 +72,7 @@ def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_delete(self, resource_group):
+ def test_triggers_delete(self, resource_group):
response = self.client.triggers.delete(
resource_group_name=resource_group.name,
factory_name="str",
@@ -85,7 +85,7 @@ def test_delete(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_begin_subscribe_to_events(self, resource_group):
+ def test_triggers_begin_subscribe_to_events(self, resource_group):
response = self.client.triggers.begin_subscribe_to_events(
resource_group_name=resource_group.name,
factory_name="str",
@@ -98,7 +98,7 @@ def test_begin_subscribe_to_events(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_get_event_subscription_status(self, resource_group):
+ def test_triggers_get_event_subscription_status(self, resource_group):
response = self.client.triggers.get_event_subscription_status(
resource_group_name=resource_group.name,
factory_name="str",
@@ -111,7 +111,7 @@ def test_get_event_subscription_status(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_begin_unsubscribe_from_events(self, resource_group):
+ def test_triggers_begin_unsubscribe_from_events(self, resource_group):
response = self.client.triggers.begin_unsubscribe_from_events(
resource_group_name=resource_group.name,
factory_name="str",
@@ -124,7 +124,7 @@ def test_begin_unsubscribe_from_events(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_begin_start(self, resource_group):
+ def test_triggers_begin_start(self, resource_group):
response = self.client.triggers.begin_start(
resource_group_name=resource_group.name,
factory_name="str",
@@ -137,7 +137,7 @@ def test_begin_start(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_begin_stop(self, resource_group):
+ def test_triggers_begin_stop(self, resource_group):
response = self.client.triggers.begin_stop(
resource_group_name=resource_group.name,
factory_name="str",
diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_triggers_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_triggers_operations_async.py
index 53e316ff588e..3b23cc9060b9 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_triggers_operations_async.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_triggers_operations_async.py
@@ -21,7 +21,7 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_list_by_factory(self, resource_group):
+ async def test_triggers_list_by_factory(self, resource_group):
response = self.client.triggers.list_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
@@ -33,7 +33,7 @@ async def test_list_by_factory(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_query_by_factory(self, resource_group):
+ async def test_triggers_query_by_factory(self, resource_group):
response = await self.client.triggers.query_by_factory(
resource_group_name=resource_group.name,
factory_name="str",
@@ -46,7 +46,7 @@ async def test_query_by_factory(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_create_or_update(self, resource_group):
+ async def test_triggers_create_or_update(self, resource_group):
response = await self.client.triggers.create_or_update(
resource_group_name=resource_group.name,
factory_name="str",
@@ -60,7 +60,7 @@ async def test_create_or_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_get(self, resource_group):
+ async def test_triggers_get(self, resource_group):
response = await self.client.triggers.get(
resource_group_name=resource_group.name,
factory_name="str",
@@ -73,7 +73,7 @@ async def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_delete(self, resource_group):
+ async def test_triggers_delete(self, resource_group):
response = await self.client.triggers.delete(
resource_group_name=resource_group.name,
factory_name="str",
@@ -86,7 +86,7 @@ async def test_delete(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_begin_subscribe_to_events(self, resource_group):
+ async def test_triggers_begin_subscribe_to_events(self, resource_group):
response = await (
await self.client.triggers.begin_subscribe_to_events(
resource_group_name=resource_group.name,
@@ -101,7 +101,7 @@ async def test_begin_subscribe_to_events(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_get_event_subscription_status(self, resource_group):
+ async def test_triggers_get_event_subscription_status(self, resource_group):
response = await self.client.triggers.get_event_subscription_status(
resource_group_name=resource_group.name,
factory_name="str",
@@ -114,7 +114,7 @@ async def test_get_event_subscription_status(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_begin_unsubscribe_from_events(self, resource_group):
+ async def test_triggers_begin_unsubscribe_from_events(self, resource_group):
response = await (
await self.client.triggers.begin_unsubscribe_from_events(
resource_group_name=resource_group.name,
@@ -129,7 +129,7 @@ async def test_begin_unsubscribe_from_events(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_begin_start(self, resource_group):
+ async def test_triggers_begin_start(self, resource_group):
response = await (
await self.client.triggers.begin_start(
resource_group_name=resource_group.name,
@@ -144,7 +144,7 @@ async def test_begin_start(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_begin_stop(self, resource_group):
+ async def test_triggers_begin_stop(self, resource_group):
response = await (
await self.client.triggers.begin_stop(
resource_group_name=resource_group.name,
diff --git a/sdk/datafactory/azure-mgmt-datafactory/setup.py b/sdk/datafactory/azure-mgmt-datafactory/setup.py
index 907fd55998d6..570262824209 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/setup.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/setup.py
@@ -22,9 +22,11 @@
# Version extraction inspired from 'requests'
with open(
- os.path.join(package_folder_path, "version.py")
- if os.path.exists(os.path.join(package_folder_path, "version.py"))
- else os.path.join(package_folder_path, "_version.py"),
+ (
+ os.path.join(package_folder_path, "version.py")
+ if os.path.exists(os.path.join(package_folder_path, "version.py"))
+ else os.path.join(package_folder_path, "_version.py")
+ ),
"r",
) as fd:
version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE).group(1)
diff --git a/sdk/datafactory/azure-mgmt-datafactory/tests/test_data_factory_management_factories_operations_async_test.py b/sdk/datafactory/azure-mgmt-datafactory/tests/test_data_factory_management_factories_operations_async_test.py
index 6d0d067a1260..1a76296ca432 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/tests/test_data_factory_management_factories_operations_async_test.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/tests/test_data_factory_management_factories_operations_async_test.py
@@ -28,4 +28,3 @@ async def test_list_by_resource_group(self, resource_group):
)
result = [r async for r in response]
assert result == []
-
\ No newline at end of file
diff --git a/sdk/datafactory/azure-mgmt-datafactory/tests/test_data_factory_management_operations_async_test.py b/sdk/datafactory/azure-mgmt-datafactory/tests/test_data_factory_management_operations_async_test.py
index aa94e85853da..e9ad37295254 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/tests/test_data_factory_management_operations_async_test.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/tests/test_data_factory_management_operations_async_test.py
@@ -27,4 +27,3 @@ async def test_list(self, resource_group):
)
result = [r async for r in response]
assert result
-
diff --git a/sdk/datafactory/azure-mgmt-datafactory/tests/test_data_factory_management_operations_test.py b/sdk/datafactory/azure-mgmt-datafactory/tests/test_data_factory_management_operations_test.py
index e01cacf32602..99e1d618ad98 100644
--- a/sdk/datafactory/azure-mgmt-datafactory/tests/test_data_factory_management_operations_test.py
+++ b/sdk/datafactory/azure-mgmt-datafactory/tests/test_data_factory_management_operations_test.py
@@ -26,4 +26,3 @@ def test_list(self, resource_group):
)
result = [r for r in response]
assert result
-