diff --git a/sdk/storage/azure-storage-blob/CHANGELOG.md b/sdk/storage/azure-storage-blob/CHANGELOG.md
index 3e0ccd164e84..848f1490f71b 100644
--- a/sdk/storage/azure-storage-blob/CHANGELOG.md
+++ b/sdk/storage/azure-storage-blob/CHANGELOG.md
@@ -1,11 +1,11 @@
# Release History
-## 12.24.0b1 (2024-10-09)
+## 12.24.0b1 (2024-10-10)
### Features Added
- Added support for service version 2025-01-05.
- Added support for passing metadata to `upload_blob_from_url` via the new `metadata` keyword.
-- Added support for `set_immutability_policy`, `delete_immutability_policy` and `set_legal_hold` for Blob Snapshots and Versions.
+- Added support for `set_immutability_policy`, `delete_immutability_policy` and `set_legal_hold` for Blob snapshots and versions.
## 12.23.1 (2024-09-25)
diff --git a/sdk/storage/azure-storage-blob/assets.json b/sdk/storage/azure-storage-blob/assets.json
index 0df5cf19caa8..5f62daf58985 100644
--- a/sdk/storage/azure-storage-blob/assets.json
+++ b/sdk/storage/azure-storage-blob/assets.json
@@ -2,5 +2,5 @@
"AssetsRepo": "Azure/azure-sdk-assets",
"AssetsRepoPrefixPath": "python",
"TagPrefix": "python/storage/azure-storage-blob",
- "Tag": "python/storage/azure-storage-blob_7df5687d1f"
+ "Tag": "python/storage/azure-storage-blob_b11831f46e"
}
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py
index 8d2d122543e2..ce4f738e1a48 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py
@@ -1228,6 +1228,9 @@ def set_immutability_policy(
.. versionadded:: 12.10.0
This was introduced in API version '2020-10-02'.
+ :keyword str version_id:
+ The version id parameter is an opaque DateTime
+ value that, when present, specifies the version of the blob to check if it exists.
:keyword int timeout:
Sets the server-side timeout for the operation in seconds. For more details see
https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations.
@@ -1238,9 +1241,11 @@ def set_immutability_policy(
:rtype: Dict[str, str]
"""
+ version_id = get_version_id(self.version_id, kwargs)
kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time
kwargs['immutability_policy_mode'] = immutability_policy.policy_mode
- return cast(Dict[str, str], self._client.blob.set_immutability_policy(cls=return_response_headers, **kwargs))
+ return cast(Dict[str, str], self._client.blob.set_immutability_policy(
+ cls=return_response_headers, version_id=version_id, **kwargs))
@distributed_trace
def delete_immutability_policy(self, **kwargs: Any) -> None:
@@ -1249,6 +1254,9 @@ def delete_immutability_policy(self, **kwargs: Any) -> None:
.. versionadded:: 12.10.0
This operation was introduced in API version '2020-10-02'.
+ :keyword str version_id:
+ The version id parameter is an opaque DateTime
+ value that, when present, specifies the version of the blob to check if it exists.
:keyword int timeout:
Sets the server-side timeout for the operation in seconds. For more details see
https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations.
@@ -1259,7 +1267,8 @@ def delete_immutability_policy(self, **kwargs: Any) -> None:
:rtype: Dict[str, str]
"""
- self._client.blob.delete_immutability_policy(**kwargs)
+ version_id = get_version_id(self.version_id, kwargs)
+ self._client.blob.delete_immutability_policy(version_id=version_id, **kwargs)
@distributed_trace
def set_legal_hold(self, legal_hold: bool, **kwargs: Any) -> Dict[str, Union[str, datetime, bool]]:
@@ -1270,6 +1279,9 @@ def set_legal_hold(self, legal_hold: bool, **kwargs: Any) -> Dict[str, Union[str
:param bool legal_hold:
Specified if a legal hold should be set on the blob.
+ :keyword str version_id:
+ The version id parameter is an opaque DateTime
+ value that, when present, specifies the version of the blob to check if it exists.
:keyword int timeout:
Sets the server-side timeout for the operation in seconds. For more details see
https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations.
@@ -1280,8 +1292,9 @@ def set_legal_hold(self, legal_hold: bool, **kwargs: Any) -> Dict[str, Union[str
:rtype: Dict[str, Union[str, datetime, bool]]
"""
- return cast(Dict[str, Union[str, datetime, bool]],
- self._client.blob.set_legal_hold(legal_hold, cls=return_response_headers, **kwargs))
+ version_id = get_version_id(self.version_id, kwargs)
+ return cast(Dict[str, Union[str, datetime, bool]], self._client.blob.set_legal_hold(
+ legal_hold, version_id=version_id, cls=return_response_headers, **kwargs))
@distributed_trace
def create_page_blob(
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_azure_blob_storage.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_azure_blob_storage.py
index cabfed8f0666..a429b713b744 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_azure_blob_storage.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_azure_blob_storage.py
@@ -48,7 +48,7 @@ class AzureBlobStorage: # pylint: disable=client-accepts-api-version-keyword
:param base_url: Service URL. Required. Default value is "".
:type base_url: str
:keyword version: Specifies the version of the operation to use for this request. Default value
- is "2024-08-04". Note that overriding this default value may result in unsupported behavior.
+ is "2025-01-05". Note that overriding this default value may result in unsupported behavior.
:paramtype version: str
"""
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_configuration.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_configuration.py
index 4de4871f14b6..80772c6e3aa2 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_configuration.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_configuration.py
@@ -23,12 +23,12 @@ class AzureBlobStorageConfiguration: # pylint: disable=too-many-instance-attrib
desired operation. Required.
:type url: str
:keyword version: Specifies the version of the operation to use for this request. Default value
- is "2024-08-04". Note that overriding this default value may result in unsupported behavior.
+ is "2025-01-05". Note that overriding this default value may result in unsupported behavior.
:paramtype version: str
"""
def __init__(self, url: str, **kwargs: Any) -> None:
- version: Literal["2024-08-04"] = kwargs.pop("version", "2024-08-04")
+ version: Literal["2025-01-05"] = kwargs.pop("version", "2025-01-05")
if url is None:
raise ValueError("Parameter 'url' must not be None.")
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_serialization.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_serialization.py
index 8139854b97bb..01a226bd7f14 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_serialization.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_serialization.py
@@ -1,3 +1,4 @@
+# pylint: disable=too-many-lines
# --------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -24,7 +25,6 @@
#
# --------------------------------------------------------------------------
-# pylint: skip-file
# pyright: reportUnnecessaryTypeIgnoreComment=false
from base64 import b64decode, b64encode
@@ -52,7 +52,6 @@
MutableMapping,
Type,
List,
- Mapping,
)
try:
@@ -91,6 +90,8 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
:param data: Input, could be bytes or stream (will be decoded with UTF8) or text
:type data: str or bytes or IO
:param str content_type: The content type.
+ :return: The deserialized data.
+ :rtype: object
"""
if hasattr(data, "read"):
# Assume a stream
@@ -112,7 +113,7 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
try:
return json.loads(data_as_str)
except ValueError as err:
- raise DeserializationError("JSON is invalid: {}".format(err), err)
+ raise DeserializationError("JSON is invalid: {}".format(err), err) from err
elif "xml" in (content_type or []):
try:
@@ -155,6 +156,11 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]],
Use bytes and headers to NOT use any requests/aiohttp or whatever
specific implementation.
Headers will tested for "content-type"
+
+ :param bytes body_bytes: The body of the response.
+ :param dict headers: The headers of the response.
+ :returns: The deserialized data.
+ :rtype: object
"""
# Try to use content-type from headers if available
content_type = None
@@ -184,15 +190,30 @@ class UTC(datetime.tzinfo):
"""Time Zone info for handling UTC"""
def utcoffset(self, dt):
- """UTF offset for UTC is 0."""
+ """UTF offset for UTC is 0.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The offset
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(0)
def tzname(self, dt):
- """Timestamp representation."""
+ """Timestamp representation.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The timestamp representation
+ :rtype: str
+ """
return "Z"
def dst(self, dt):
- """No daylight saving for UTC."""
+ """No daylight saving for UTC.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The daylight saving time
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(hours=1)
@@ -235,24 +256,26 @@ def __getinitargs__(self):
_FLATTEN = re.compile(r"(? None:
self.additional_properties: Optional[Dict[str, Any]] = {}
- for k in kwargs:
+ for k in kwargs: # pylint: disable=consider-using-dict-items
if k not in self._attribute_map:
_LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__)
elif k in self._validation and self._validation[k].get("readonly", False):
@@ -300,13 +330,23 @@ def __init__(self, **kwargs: Any) -> None:
setattr(self, k, kwargs[k])
def __eq__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes."""
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are equal
+ :rtype: bool
+ """
if isinstance(other, self.__class__):
return self.__dict__ == other.__dict__
return False
def __ne__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes."""
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are not equal
+ :rtype: bool
+ """
return not self.__eq__(other)
def __str__(self) -> str:
@@ -326,7 +366,11 @@ def is_xml_model(cls) -> bool:
@classmethod
def _create_xml_node(cls):
- """Create XML node."""
+ """Create XML node.
+
+ :returns: The XML node
+ :rtype: xml.etree.ElementTree.Element
+ """
try:
xml_map = cls._xml_map # type: ignore
except AttributeError:
@@ -346,7 +390,9 @@ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON:
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) # type: ignore
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, keep_readonly=keep_readonly, **kwargs
+ )
def as_dict(
self,
@@ -380,12 +426,15 @@ def my_key_transformer(key, attr_desc, value):
If you want XML serialization, you can pass the kwargs is_xml=True.
+ :param bool keep_readonly: If you want to serialize the readonly attributes
:param function key_transformer: A key transformer function.
:returns: A dict JSON compatible object
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs) # type: ignore
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs
+ )
@classmethod
def _infer_class_models(cls):
@@ -395,7 +444,7 @@ def _infer_class_models(cls):
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
if cls.__name__ not in client_models:
raise ValueError("Not Autorest generated code")
- except Exception:
+ except Exception: # pylint: disable=broad-exception-caught
# Assume it's not Autorest generated (tests?). Add ourselves as dependencies.
client_models = {cls.__name__: cls}
return client_models
@@ -408,6 +457,7 @@ def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = N
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
@@ -426,9 +476,11 @@ def from_dict(
and last_rest_key_case_insensitive_extractor)
:param dict data: A dict using RestAPI structure
+ :param function key_extractors: A key extractor function.
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
deserializer.key_extractors = ( # type: ignore
@@ -448,7 +500,7 @@ def _flatten_subtype(cls, key, objects):
return {}
result = dict(cls._subtype_map[key])
for valuetype in cls._subtype_map[key].values():
- result.update(objects[valuetype]._flatten_subtype(key, objects))
+ result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access
return result
@classmethod
@@ -456,6 +508,11 @@ def _classify(cls, response, objects):
"""Check the class _subtype_map for any child classes.
We want to ignore any inherited _subtype_maps.
Remove the polymorphic key from the initial data.
+
+ :param dict response: The initial data
+ :param dict objects: The class objects
+ :returns: The class to be used
+ :rtype: class
"""
for subtype_key in cls.__dict__.get("_subtype_map", {}).keys():
subtype_value = None
@@ -501,11 +558,13 @@ def _decode_attribute_map_key(key):
inside the received data.
:param str key: A key string from the generated code
+ :returns: The decoded key
+ :rtype: str
"""
return key.replace("\\.", ".")
-class Serializer(object):
+class Serializer(object): # pylint: disable=too-many-public-methods
"""Request object model serializer."""
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
@@ -560,13 +619,16 @@ def __init__(self, classes: Optional[Mapping[str, type]] = None):
self.key_transformer = full_restapi_key_transformer
self.client_side_validation = True
- def _serialize(self, target_obj, data_type=None, **kwargs):
+ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
+ self, target_obj, data_type=None, **kwargs
+ ):
"""Serialize data into a string according to type.
- :param target_obj: The data to be serialized.
+ :param object target_obj: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str, dict
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
"""
key_transformer = kwargs.get("key_transformer", self.key_transformer)
keep_readonly = kwargs.get("keep_readonly", False)
@@ -592,12 +654,14 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
serialized = {}
if is_xml_model_serialization:
- serialized = target_obj._create_xml_node()
+ serialized = target_obj._create_xml_node() # pylint: disable=protected-access
try:
- attributes = target_obj._attribute_map
+ attributes = target_obj._attribute_map # pylint: disable=protected-access
for attr, attr_desc in attributes.items():
attr_name = attr
- if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False):
+ if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access
+ attr_name, {}
+ ).get("readonly", False):
continue
if attr_name == "additional_properties" and attr_desc["key"] == "":
@@ -633,7 +697,8 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
if isinstance(new_attr, list):
serialized.extend(new_attr) # type: ignore
elif isinstance(new_attr, ET.Element):
- # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces.
+ # If the down XML has no XML/Name,
+ # we MUST replace the tag with the local tag. But keeping the namespaces.
if "name" not in getattr(orig_attr, "_xml_map", {}):
splitted_tag = new_attr.tag.split("}")
if len(splitted_tag) == 2: # Namespace
@@ -664,17 +729,17 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
except (AttributeError, KeyError, TypeError) as err:
msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj))
raise SerializationError(msg) from err
- else:
- return serialized
+ return serialized
def body(self, data, data_type, **kwargs):
"""Serialize data intended for a request body.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: dict
:raises: SerializationError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized request body
"""
# Just in case this is a dict
@@ -703,7 +768,7 @@ def body(self, data, data_type, **kwargs):
attribute_key_case_insensitive_extractor,
last_rest_key_case_insensitive_extractor,
]
- data = deserializer._deserialize(data_type, data)
+ data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access
except DeserializationError as err:
raise SerializationError("Unable to build a model: " + str(err)) from err
@@ -712,9 +777,11 @@ def body(self, data, data_type, **kwargs):
def url(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL path.
- :param data: The data to be serialized.
+ :param str name: The name of the URL path parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
+ :returns: The serialized URL path
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
"""
@@ -728,21 +795,20 @@ def url(self, name, data, data_type, **kwargs):
output = output.replace("{", quote("{")).replace("}", quote("}"))
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return output
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return output
def query(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL query.
- :param data: The data to be serialized.
+ :param str name: The name of the query parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :keyword bool skip_quote: Whether to skip quote the serialized result.
- Defaults to False.
:rtype: str, list
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized query parameter
"""
try:
# Treat the list aside, since we don't want to encode the div separator
@@ -759,19 +825,20 @@ def query(self, name, data, data_type, **kwargs):
output = str(output)
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def header(self, name, data, data_type, **kwargs):
"""Serialize data intended for a request header.
- :param data: The data to be serialized.
+ :param str name: The name of the header.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized header
"""
try:
if data_type in ["[str]"]:
@@ -780,21 +847,20 @@ def header(self, name, data, data_type, **kwargs):
output = self.serialize_data(data, data_type, **kwargs)
if data_type == "bool":
output = json.dumps(output)
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def serialize_data(self, data, data_type, **kwargs):
"""Serialize generic data according to supplied data type.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :param bool required: Whether it's essential that the data not be
- empty or None
:raises: AttributeError if required data is None.
:raises: ValueError if data is None
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
+ :rtype: str, int, float, bool, dict, list
"""
if data is None:
raise ValueError("No value for given attribute")
@@ -805,7 +871,7 @@ def serialize_data(self, data, data_type, **kwargs):
if data_type in self.basic_types.values():
return self.serialize_basic(data, data_type, **kwargs)
- elif data_type in self.serialize_type:
+ if data_type in self.serialize_type:
return self.serialize_type[data_type](data, **kwargs)
# If dependencies is empty, try with current data class
@@ -821,11 +887,10 @@ def serialize_data(self, data, data_type, **kwargs):
except (ValueError, TypeError) as err:
msg = "Unable to serialize value: {!r} as type: {!r}."
raise SerializationError(msg.format(data, data_type)) from err
- else:
- return self._serialize(data, **kwargs)
+ return self._serialize(data, **kwargs)
@classmethod
- def _get_custom_serializers(cls, data_type, **kwargs):
+ def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements
custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type)
if custom_serializer:
return custom_serializer
@@ -841,23 +906,26 @@ def serialize_basic(cls, data, data_type, **kwargs):
- basic_types_serializers dict[str, callable] : If set, use the callable as serializer
- is_xml bool : If set, use xml_basic_types_serializers
- :param data: Object to be serialized.
+ :param obj data: Object to be serialized.
:param str data_type: Type of object in the iterable.
+ :rtype: str, int, float, bool
+ :return: serialized object
"""
custom_serializer = cls._get_custom_serializers(data_type, **kwargs)
if custom_serializer:
return custom_serializer(data)
if data_type == "str":
return cls.serialize_unicode(data)
- return eval(data_type)(data) # nosec
+ return eval(data_type)(data) # nosec # pylint: disable=eval-used
@classmethod
def serialize_unicode(cls, data):
"""Special handling for serializing unicode strings in Py2.
Encode to UTF-8 if unicode, otherwise handle as a str.
- :param data: Object to be serialized.
+ :param str data: Object to be serialized.
:rtype: str
+ :return: serialized object
"""
try: # If I received an enum, return its value
return data.value
@@ -871,8 +939,7 @@ def serialize_unicode(cls, data):
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
def serialize_iter(self, data, iter_type, div=None, **kwargs):
"""Serialize iterable.
@@ -882,15 +949,13 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs):
serialization_ctxt['type'] should be same as data_type.
- is_xml bool : If set, serialize as XML
- :param list attr: Object to be serialized.
+ :param list data: Object to be serialized.
:param str iter_type: Type of object in the iterable.
- :param bool required: Whether the objects in the iterable must
- not be None or empty.
:param str div: If set, this str will be used to combine the elements
in the iterable into a combined string. Default is 'None'.
- :keyword bool do_quote: Whether to quote the serialized result of each iterable element.
Defaults to False.
:rtype: list, str
+ :return: serialized iterable
"""
if isinstance(data, str):
raise SerializationError("Refuse str type as a valid iter type.")
@@ -945,9 +1010,8 @@ def serialize_dict(self, attr, dict_type, **kwargs):
:param dict attr: Object to be serialized.
:param str dict_type: Type of object in the dictionary.
- :param bool required: Whether the objects in the dictionary must
- not be None or empty.
:rtype: dict
+ :return: serialized dictionary
"""
serialization_ctxt = kwargs.get("serialization_ctxt", {})
serialized = {}
@@ -971,7 +1035,7 @@ def serialize_dict(self, attr, dict_type, **kwargs):
return serialized
- def serialize_object(self, attr, **kwargs):
+ def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Serialize a generic object.
This will be handled as a dictionary. If object passed in is not
a basic type (str, int, float, dict, list) it will simply be
@@ -979,6 +1043,7 @@ def serialize_object(self, attr, **kwargs):
:param dict attr: Object to be serialized.
:rtype: dict or str
+ :return: serialized object
"""
if attr is None:
return None
@@ -1003,7 +1068,7 @@ def serialize_object(self, attr, **kwargs):
return self.serialize_decimal(attr)
# If it's a model or I know this dependency, serialize as a Model
- elif obj_type in self.dependencies.values() or isinstance(attr, Model):
+ if obj_type in self.dependencies.values() or isinstance(attr, Model):
return self._serialize(attr)
if obj_type == dict:
@@ -1034,56 +1099,61 @@ def serialize_enum(attr, enum_obj=None):
try:
enum_obj(result) # type: ignore
return result
- except ValueError:
+ except ValueError as exc:
for enum_value in enum_obj: # type: ignore
if enum_value.value.lower() == str(attr).lower():
return enum_value.value
error = "{!r} is not valid value for enum {!r}"
- raise SerializationError(error.format(attr, enum_obj))
+ raise SerializationError(error.format(attr, enum_obj)) from exc
@staticmethod
- def serialize_bytearray(attr, **kwargs):
+ def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize bytearray into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
return b64encode(attr).decode()
@staticmethod
- def serialize_base64(attr, **kwargs):
+ def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize str into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
encoded = b64encode(attr).decode("ascii")
return encoded.strip("=").replace("+", "-").replace("/", "_")
@staticmethod
- def serialize_decimal(attr, **kwargs):
+ def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Decimal object to float.
- :param attr: Object to be serialized.
+ :param decimal attr: Object to be serialized.
:rtype: float
+ :return: serialized decimal
"""
return float(attr)
@staticmethod
- def serialize_long(attr, **kwargs):
+ def serialize_long(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize long (Py2) or int (Py3).
- :param attr: Object to be serialized.
+ :param int attr: Object to be serialized.
:rtype: int/long
+ :return: serialized long
"""
return _long_type(attr)
@staticmethod
- def serialize_date(attr, **kwargs):
+ def serialize_date(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Date object into ISO-8601 formatted string.
:param Date attr: Object to be serialized.
:rtype: str
+ :return: serialized date
"""
if isinstance(attr, str):
attr = isodate.parse_date(attr)
@@ -1091,11 +1161,12 @@ def serialize_date(attr, **kwargs):
return t
@staticmethod
- def serialize_time(attr, **kwargs):
+ def serialize_time(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Time object into ISO-8601 formatted string.
:param datetime.time attr: Object to be serialized.
:rtype: str
+ :return: serialized time
"""
if isinstance(attr, str):
attr = isodate.parse_time(attr)
@@ -1105,30 +1176,32 @@ def serialize_time(attr, **kwargs):
return t
@staticmethod
- def serialize_duration(attr, **kwargs):
+ def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize TimeDelta object into ISO-8601 formatted string.
:param TimeDelta attr: Object to be serialized.
:rtype: str
+ :return: serialized duration
"""
if isinstance(attr, str):
attr = isodate.parse_duration(attr)
return isodate.duration_isoformat(attr)
@staticmethod
- def serialize_rfc(attr, **kwargs):
+ def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into RFC-1123 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: TypeError if format invalid.
+ :return: serialized rfc
"""
try:
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
utc = attr.utctimetuple()
- except AttributeError:
- raise TypeError("RFC1123 object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("RFC1123 object must be valid Datetime object.") from exc
return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format(
Serializer.days[utc.tm_wday],
@@ -1141,12 +1214,13 @@ def serialize_rfc(attr, **kwargs):
)
@staticmethod
- def serialize_iso(attr, **kwargs):
+ def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into ISO-8601 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: SerializationError if format invalid.
+ :return: serialized iso
"""
if isinstance(attr, str):
attr = isodate.parse_datetime(attr)
@@ -1172,13 +1246,14 @@ def serialize_iso(attr, **kwargs):
raise TypeError(msg) from err
@staticmethod
- def serialize_unix(attr, **kwargs):
+ def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into IntTime format.
This is represented as seconds.
:param Datetime attr: Object to be serialized.
:rtype: int
:raises: SerializationError if format invalid
+ :return: serialied unix
"""
if isinstance(attr, int):
return attr
@@ -1186,11 +1261,11 @@ def serialize_unix(attr, **kwargs):
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
return int(calendar.timegm(attr.utctimetuple()))
- except AttributeError:
- raise TypeError("Unix time object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("Unix time object must be valid Datetime object.") from exc
-def rest_key_extractor(attr, attr_desc, data):
+def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
key = attr_desc["key"]
working_data = data
@@ -1211,7 +1286,9 @@ def rest_key_extractor(attr, attr_desc, data):
return working_data.get(key)
-def rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements
+ attr, attr_desc, data
+):
key = attr_desc["key"]
working_data = data
@@ -1232,17 +1309,29 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data):
return attribute_key_case_insensitive_extractor(key, None, working_data)
-def last_rest_key_extractor(attr, attr_desc, data):
- """Extract the attribute in "data" based on the last part of the JSON path key."""
+def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
+ """Extract the attribute in "data" based on the last part of the JSON path key.
+
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
+ """
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
return attribute_key_extractor(dict_keys[-1], None, data)
-def last_rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
"""Extract the attribute in "data" based on the last part of the JSON path key.
This is the case insensitive version of "last_rest_key_extractor"
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
"""
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
@@ -1279,7 +1368,7 @@ def _extract_name_from_internal_type(internal_type):
return xml_name
-def xml_key_extractor(attr, attr_desc, data):
+def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements
if isinstance(data, dict):
return None
@@ -1331,22 +1420,21 @@ def xml_key_extractor(attr, attr_desc, data):
if is_iter_type:
if is_wrapped:
return None # is_wrapped no node, we want None
- else:
- return [] # not wrapped, assume empty list
+ return [] # not wrapped, assume empty list
return None # Assume it's not there, maybe an optional node.
# If is_iter_type and not wrapped, return all found children
if is_iter_type:
if not is_wrapped:
return children
- else: # Iter and wrapped, should have found one node only (the wrap one)
- if len(children) != 1:
- raise DeserializationError(
- "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format(
- xml_name
- )
+ # Iter and wrapped, should have found one node only (the wrap one)
+ if len(children) != 1:
+ raise DeserializationError(
+ "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long
+ xml_name
)
- return list(children[0]) # Might be empty list and that's ok.
+ )
+ return list(children[0]) # Might be empty list and that's ok.
# Here it's not a itertype, we should have found one element only or empty
if len(children) > 1:
@@ -1363,7 +1451,7 @@ class Deserializer(object):
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
- valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
+ valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
def __init__(self, classes: Optional[Mapping[str, type]] = None):
self.deserialize_type = {
@@ -1403,11 +1491,12 @@ def __call__(self, target_obj, response_data, content_type=None):
:param str content_type: Swagger "produces" if available.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
data = self._unpack_content(response_data, content_type)
return self._deserialize(target_obj, data)
- def _deserialize(self, target_obj, data):
+ def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements
"""Call the deserializer on a model.
Data needs to be already deserialized as JSON or XML ElementTree
@@ -1416,12 +1505,13 @@ def _deserialize(self, target_obj, data):
:param object data: Object to deserialize.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
# This is already a model, go recursive just in case
if hasattr(data, "_attribute_map"):
constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")]
try:
- for attr, mapconfig in data._attribute_map.items():
+ for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access
if attr in constants:
continue
value = getattr(data, attr)
@@ -1440,13 +1530,13 @@ def _deserialize(self, target_obj, data):
if isinstance(response, str):
return self.deserialize_data(data, response)
- elif isinstance(response, type) and issubclass(response, Enum):
+ if isinstance(response, type) and issubclass(response, Enum):
return self.deserialize_enum(data, response)
if data is None or data is CoreNull:
return data
try:
- attributes = response._attribute_map # type: ignore
+ attributes = response._attribute_map # type: ignore # pylint: disable=protected-access
d_attrs = {}
for attr, attr_desc in attributes.items():
# Check empty string. If it's not empty, someone has a real "additionalProperties"...
@@ -1476,9 +1566,8 @@ def _deserialize(self, target_obj, data):
except (AttributeError, TypeError, KeyError) as err:
msg = "Unable to deserialize to object: " + class_name # type: ignore
raise DeserializationError(msg) from err
- else:
- additional_properties = self._build_additional_properties(attributes, data)
- return self._instantiate_model(response, d_attrs, additional_properties)
+ additional_properties = self._build_additional_properties(attributes, data)
+ return self._instantiate_model(response, d_attrs, additional_properties)
def _build_additional_properties(self, attribute_map, data):
if not self.additional_properties_detection:
@@ -1505,6 +1594,8 @@ def _classify_target(self, target, data):
:param str target: The target object type to deserialize to.
:param str/dict data: The response data to deserialize.
+ :return: The classified target object and its class name.
+ :rtype: tuple
"""
if target is None:
return None, None
@@ -1516,7 +1607,7 @@ def _classify_target(self, target, data):
return target, target
try:
- target = target._classify(data, self.dependencies) # type: ignore
+ target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access
except AttributeError:
pass # Target is not a Model, no classify
return target, target.__class__.__name__ # type: ignore
@@ -1531,10 +1622,12 @@ def failsafe_deserialize(self, target_obj, data, content_type=None):
:param str target_obj: The target object type to deserialize to.
:param str/dict data: The response data to deserialize.
:param str content_type: Swagger "produces" if available.
+ :return: Deserialized object.
+ :rtype: object
"""
try:
return self(target_obj, data, content_type=content_type)
- except:
+ except: # pylint: disable=bare-except
_LOGGER.debug(
"Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
)
@@ -1552,10 +1645,12 @@ def _unpack_content(raw_data, content_type=None):
If raw_data is something else, bypass all logic and return it directly.
- :param raw_data: Data to be processed.
- :param content_type: How to parse if raw_data is a string/bytes.
+ :param obj raw_data: Data to be processed.
+ :param str content_type: How to parse if raw_data is a string/bytes.
:raises JSONDecodeError: If JSON is requested and parsing is impossible.
:raises UnicodeDecodeError: If bytes is not UTF8
+ :rtype: object
+ :return: Unpacked content.
"""
# Assume this is enough to detect a Pipeline Response without importing it
context = getattr(raw_data, "context", {})
@@ -1579,14 +1674,21 @@ def _unpack_content(raw_data, content_type=None):
def _instantiate_model(self, response, attrs, additional_properties=None):
"""Instantiate a response model passing in deserialized args.
- :param response: The response model class.
- :param d_attrs: The deserialized response attributes.
+ :param Response response: The response model class.
+ :param dict attrs: The deserialized response attributes.
+ :param dict additional_properties: Additional properties to be set.
+ :rtype: Response
+ :return: The instantiated response model.
"""
if callable(response):
subtype = getattr(response, "_subtype_map", {})
try:
- readonly = [k for k, v in response._validation.items() if v.get("readonly")]
- const = [k for k, v in response._validation.items() if v.get("constant")]
+ readonly = [
+ k for k, v in response._validation.items() if v.get("readonly") # pylint: disable=protected-access
+ ]
+ const = [
+ k for k, v in response._validation.items() if v.get("constant") # pylint: disable=protected-access
+ ]
kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const}
response_obj = response(**kwargs)
for attr in readonly:
@@ -1596,7 +1698,7 @@ def _instantiate_model(self, response, attrs, additional_properties=None):
return response_obj
except TypeError as err:
msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore
- raise DeserializationError(msg + str(err))
+ raise DeserializationError(msg + str(err)) from err
else:
try:
for attr, value in attrs.items():
@@ -1605,15 +1707,16 @@ def _instantiate_model(self, response, attrs, additional_properties=None):
except Exception as exp:
msg = "Unable to populate response model. "
msg += "Type: {}, Error: {}".format(type(response), exp)
- raise DeserializationError(msg)
+ raise DeserializationError(msg) from exp
- def deserialize_data(self, data, data_type):
+ def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements
"""Process data for deserialization according to data type.
:param str data: The response string to be deserialized.
:param str data_type: The type to deserialize to.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
if data is None:
return data
@@ -1627,7 +1730,11 @@ def deserialize_data(self, data, data_type):
if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())):
return data
- is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"]
+ is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment
+ "object",
+ "[]",
+ r"{}",
+ ]
if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text:
return None
data_val = self.deserialize_type[data_type](data)
@@ -1647,14 +1754,14 @@ def deserialize_data(self, data, data_type):
msg = "Unable to deserialize response data."
msg += " Data: {}, {}".format(data, data_type)
raise DeserializationError(msg) from err
- else:
- return self._deserialize(obj_type, data)
+ return self._deserialize(obj_type, data)
def deserialize_iter(self, attr, iter_type):
"""Deserialize an iterable.
:param list attr: Iterable to be deserialized.
:param str iter_type: The type of object in the iterable.
+ :return: Deserialized iterable.
:rtype: list
"""
if attr is None:
@@ -1671,6 +1778,7 @@ def deserialize_dict(self, attr, dict_type):
:param dict/list attr: Dictionary to be deserialized. Also accepts
a list of key, value pairs.
:param str dict_type: The object type of the items in the dictionary.
+ :return: Deserialized dictionary.
:rtype: dict
"""
if isinstance(attr, list):
@@ -1681,11 +1789,12 @@ def deserialize_dict(self, attr, dict_type):
attr = {el.tag: el.text for el in attr}
return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()}
- def deserialize_object(self, attr, **kwargs):
+ def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Deserialize a generic object.
This will be handled as a dictionary.
:param dict attr: Dictionary to be deserialized.
+ :return: Deserialized object.
:rtype: dict
:raises: TypeError if non-builtin datatype encountered.
"""
@@ -1720,11 +1829,10 @@ def deserialize_object(self, attr, **kwargs):
pass
return deserialized
- else:
- error = "Cannot deserialize generic object with type: "
- raise TypeError(error + str(obj_type))
+ error = "Cannot deserialize generic object with type: "
+ raise TypeError(error + str(obj_type))
- def deserialize_basic(self, attr, data_type):
+ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements
"""Deserialize basic builtin data type from string.
Will attempt to convert to str, int, float and bool.
This function will also accept '1', '0', 'true' and 'false' as
@@ -1732,6 +1840,7 @@ def deserialize_basic(self, attr, data_type):
:param str attr: response string to be deserialized.
:param str data_type: deserialization data type.
+ :return: Deserialized basic type.
:rtype: str, int, float or bool
:raises: TypeError if string format is not valid.
"""
@@ -1743,24 +1852,23 @@ def deserialize_basic(self, attr, data_type):
if data_type == "str":
# None or '', node is empty string.
return ""
- else:
- # None or '', node with a strong type is None.
- # Don't try to model "empty bool" or "empty int"
- return None
+ # None or '', node with a strong type is None.
+ # Don't try to model "empty bool" or "empty int"
+ return None
if data_type == "bool":
if attr in [True, False, 1, 0]:
return bool(attr)
- elif isinstance(attr, str):
+ if isinstance(attr, str):
if attr.lower() in ["true", "1"]:
return True
- elif attr.lower() in ["false", "0"]:
+ if attr.lower() in ["false", "0"]:
return False
raise TypeError("Invalid boolean value: {}".format(attr))
if data_type == "str":
return self.deserialize_unicode(attr)
- return eval(data_type)(attr) # nosec
+ return eval(data_type)(attr) # nosec # pylint: disable=eval-used
@staticmethod
def deserialize_unicode(data):
@@ -1768,6 +1876,7 @@ def deserialize_unicode(data):
as a string.
:param str data: response string to be deserialized.
+ :return: Deserialized string.
:rtype: str or unicode
"""
# We might be here because we have an enum modeled as string,
@@ -1781,8 +1890,7 @@ def deserialize_unicode(data):
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
@staticmethod
def deserialize_enum(data, enum_obj):
@@ -1794,6 +1902,7 @@ def deserialize_enum(data, enum_obj):
:param str data: Response string to be deserialized. If this value is
None or invalid it will be returned as-is.
:param Enum enum_obj: Enum object to deserialize to.
+ :return: Deserialized enum object.
:rtype: Enum
"""
if isinstance(data, enum_obj) or data is None:
@@ -1804,9 +1913,9 @@ def deserialize_enum(data, enum_obj):
# Workaround. We might consider remove it in the future.
try:
return list(enum_obj.__members__.values())[data]
- except IndexError:
+ except IndexError as exc:
error = "{!r} is not a valid index for enum {!r}"
- raise DeserializationError(error.format(data, enum_obj))
+ raise DeserializationError(error.format(data, enum_obj)) from exc
try:
return enum_obj(str(data))
except ValueError:
@@ -1822,6 +1931,7 @@ def deserialize_bytearray(attr):
"""Deserialize string into bytearray.
:param str attr: response string to be deserialized.
+ :return: Deserialized bytearray
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
@@ -1834,6 +1944,7 @@ def deserialize_base64(attr):
"""Deserialize base64 encoded string into string.
:param str attr: response string to be deserialized.
+ :return: Deserialized base64 string
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
@@ -1849,8 +1960,9 @@ def deserialize_decimal(attr):
"""Deserialize string into Decimal object.
:param str attr: response string to be deserialized.
- :rtype: Decimal
+ :return: Deserialized decimal
:raises: DeserializationError if string format invalid.
+ :rtype: decimal
"""
if isinstance(attr, ET.Element):
attr = attr.text
@@ -1865,6 +1977,7 @@ def deserialize_long(attr):
"""Deserialize string into long (Py2) or int (Py3).
:param str attr: response string to be deserialized.
+ :return: Deserialized int
:rtype: long or int
:raises: ValueError if string format invalid.
"""
@@ -1877,6 +1990,7 @@ def deserialize_duration(attr):
"""Deserialize ISO-8601 formatted string into TimeDelta object.
:param str attr: response string to be deserialized.
+ :return: Deserialized duration
:rtype: TimeDelta
:raises: DeserializationError if string format invalid.
"""
@@ -1887,14 +2001,14 @@ def deserialize_duration(attr):
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize duration object."
raise DeserializationError(msg) from err
- else:
- return duration
+ return duration
@staticmethod
def deserialize_date(attr):
"""Deserialize ISO-8601 formatted string into Date object.
:param str attr: response string to be deserialized.
+ :return: Deserialized date
:rtype: Date
:raises: DeserializationError if string format invalid.
"""
@@ -1910,6 +2024,7 @@ def deserialize_time(attr):
"""Deserialize ISO-8601 formatted string into time object.
:param str attr: response string to be deserialized.
+ :return: Deserialized time
:rtype: datetime.time
:raises: DeserializationError if string format invalid.
"""
@@ -1924,6 +2039,7 @@ def deserialize_rfc(attr):
"""Deserialize RFC-1123 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized RFC datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
@@ -1939,14 +2055,14 @@ def deserialize_rfc(attr):
except ValueError as err:
msg = "Cannot deserialize to rfc datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
@staticmethod
def deserialize_iso(attr):
"""Deserialize ISO-8601 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized ISO datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
@@ -1976,8 +2092,7 @@ def deserialize_iso(attr):
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
@staticmethod
def deserialize_unix(attr):
@@ -1985,6 +2100,7 @@ def deserialize_unix(attr):
This is represented as seconds.
:param int attr: Object to be serialized.
+ :return: Deserialized datetime
:rtype: Datetime
:raises: DeserializationError if format invalid
"""
@@ -1996,5 +2112,4 @@ def deserialize_unix(attr):
except ValueError as err:
msg = "Cannot deserialize to unix datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_azure_blob_storage.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_azure_blob_storage.py
index c76a291f3c6a..9a06e367a4d2 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_azure_blob_storage.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_azure_blob_storage.py
@@ -48,7 +48,7 @@ class AzureBlobStorage: # pylint: disable=client-accepts-api-version-keyword
:param base_url: Service URL. Required. Default value is "".
:type base_url: str
:keyword version: Specifies the version of the operation to use for this request. Default value
- is "2024-08-04". Note that overriding this default value may result in unsupported behavior.
+ is "2025-01-05". Note that overriding this default value may result in unsupported behavior.
:paramtype version: str
"""
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_configuration.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_configuration.py
index 5960c22f03f2..5128a4f98b2a 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_configuration.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_configuration.py
@@ -23,12 +23,12 @@ class AzureBlobStorageConfiguration: # pylint: disable=too-many-instance-attrib
desired operation. Required.
:type url: str
:keyword version: Specifies the version of the operation to use for this request. Default value
- is "2024-08-04". Note that overriding this default value may result in unsupported behavior.
+ is "2025-01-05". Note that overriding this default value may result in unsupported behavior.
:paramtype version: str
"""
def __init__(self, url: str, **kwargs: Any) -> None:
- version: Literal["2024-08-04"] = kwargs.pop("version", "2024-08-04")
+ version: Literal["2025-01-05"] = kwargs.pop("version", "2025-01-05")
if url is None:
raise ValueError("Parameter 'url' must not be None.")
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_append_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_append_blob_operations.py
index 70b5d865e6d2..e466bdaba168 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_append_blob_operations.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_append_blob_operations.py
@@ -76,6 +76,7 @@ async def create( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Create Append Blob operation creates a new append blob.
:param content_length: The length of the request. Required.
@@ -122,7 +123,7 @@ async def create( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -252,6 +253,8 @@ async def append_block( # pylint: disable=inconsistent-return-statements
transactional_content_md5: Optional[bytes] = None,
transactional_content_crc64: Optional[bytes] = None,
request_id_parameter: Optional[str] = None,
+ structured_body_type: Optional[str] = None,
+ structured_content_length: Optional[int] = None,
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
append_position_access_conditions: Optional[_models.AppendPositionAccessConditions] = None,
cpk_info: Optional[_models.CpkInfo] = None,
@@ -259,6 +262,7 @@ async def append_block( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Append Block operation commits a new block of data to the end of an existing append blob.
The Append Block operation is permitted only if the blob was created with x-ms-blob-type set to
AppendBlob. Append Block is supported only on version 2015-02-21 version or later.
@@ -282,6 +286,13 @@ async def append_block( # pylint: disable=inconsistent-return-statements
limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
value is None.
:type request_id_parameter: str
+ :param structured_body_type: Required if the request body is a structured message. Specifies
+ the message schema version and properties. Default value is None.
+ :type structured_body_type: str
+ :param structured_content_length: Required if the request body is a structured message.
+ Specifies the length of the blob/file content inside the message body. Will always be smaller
+ than Content-Length. Default value is None.
+ :type structured_content_length: int
:param lease_access_conditions: Parameter group. Default value is None.
:type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions
:param append_position_access_conditions: Parameter group. Default value is None.
@@ -297,7 +308,7 @@ async def append_block( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -362,6 +373,8 @@ async def append_block( # pylint: disable=inconsistent-return-statements
if_none_match=_if_none_match,
if_tags=_if_tags,
request_id_parameter=request_id_parameter,
+ structured_body_type=structured_body_type,
+ structured_content_length=structured_content_length,
comp=comp,
content_type=content_type,
version=self._config.version,
@@ -411,6 +424,9 @@ async def append_block( # pylint: disable=inconsistent-return-statements
response_headers["x-ms-encryption-scope"] = self._deserialize(
"str", response.headers.get("x-ms-encryption-scope")
)
+ response_headers["x-ms-structured-body"] = self._deserialize(
+ "str", response.headers.get("x-ms-structured-body")
+ )
if cls:
return cls(pipeline_response, None, response_headers) # type: ignore
@@ -435,6 +451,7 @@ async def append_block_from_url( # pylint: disable=inconsistent-return-statemen
source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Append Block operation commits a new block of data to the end of an existing append blob
where the contents are read from a source url. The Append Block operation is permitted only if
the blob was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on
@@ -485,7 +502,7 @@ async def append_block_from_url( # pylint: disable=inconsistent-return-statemen
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -623,6 +640,7 @@ async def seal( # pylint: disable=inconsistent-return-statements
append_position_access_conditions: Optional[_models.AppendPositionAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Seal operation seals the Append Blob to make it read-only. Seal is supported only on
version 2019-12-12 version or later.
@@ -646,7 +664,7 @@ async def seal( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py
index 9cf0b47f035d..94f1b103bd71 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py
@@ -89,12 +89,14 @@ async def download(
range: Optional[str] = None,
range_get_content_md5: Optional[bool] = None,
range_get_content_crc64: Optional[bool] = None,
+ structured_body_type: Optional[str] = None,
request_id_parameter: Optional[str] = None,
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
cpk_info: Optional[_models.CpkInfo] = None,
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> AsyncIterator[bytes]:
+ # pylint: disable=line-too-long
"""The Download operation reads or downloads a blob from the system, including its metadata and
properties. You can also call Download to read a snapshot.
@@ -123,6 +125,9 @@ async def download(
service returns the CRC64 hash for the range, as long as the range is less than or equal to 4
MB in size. Default value is None.
:type range_get_content_crc64: bool
+ :param structured_body_type: Specifies the response content should be returned as a structured
+ message and specifies the message schema version and properties. Default value is None.
+ :type structured_body_type: str
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
value is None.
@@ -137,7 +142,7 @@ async def download(
:rtype: AsyncIterator[bytes]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -181,6 +186,7 @@ async def download(
lease_id=_lease_id,
range_get_content_md5=range_get_content_md5,
range_get_content_crc64=range_get_content_crc64,
+ structured_body_type=structured_body_type,
encryption_key=_encryption_key,
encryption_key_sha256=_encryption_key_sha256,
encryption_algorithm=_encryption_algorithm,
@@ -292,6 +298,12 @@ async def download(
"str", response.headers.get("x-ms-immutability-policy-mode")
)
response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold"))
+ response_headers["x-ms-structured-body"] = self._deserialize(
+ "str", response.headers.get("x-ms-structured-body")
+ )
+ response_headers["x-ms-structured-content-length"] = self._deserialize(
+ "int", response.headers.get("x-ms-structured-content-length")
+ )
if response.status_code == 206:
response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
@@ -374,6 +386,12 @@ async def download(
"str", response.headers.get("x-ms-immutability-policy-mode")
)
response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold"))
+ response_headers["x-ms-structured-body"] = self._deserialize(
+ "str", response.headers.get("x-ms-structured-body")
+ )
+ response_headers["x-ms-structured-content-length"] = self._deserialize(
+ "int", response.headers.get("x-ms-structured-content-length")
+ )
deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
@@ -394,6 +412,7 @@ async def get_properties( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Get Properties operation returns all user-defined metadata, standard HTTP properties, and
system properties for the blob. It does not return the content of the blob.
@@ -426,7 +445,7 @@ async def get_properties( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -597,6 +616,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""If the storage account's soft delete feature is disabled then, when a blob is deleted, it is
permanently removed from the storage account. If the storage account's soft delete feature is
enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible
@@ -646,7 +666,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -721,6 +741,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
async def undelete( # pylint: disable=inconsistent-return-statements
self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Undelete a blob that was previously soft deleted.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -736,7 +757,7 @@ async def undelete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -793,6 +814,7 @@ async def set_expiry( # pylint: disable=inconsistent-return-statements
expires_on: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Sets the time a blob will expire and be deleted.
:param expiry_options: Required. Indicates mode of the expiry time. Known values are:
@@ -813,7 +835,7 @@ async def set_expiry( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -875,6 +897,7 @@ async def set_http_headers( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Set HTTP Headers operation sets system properties on the blob.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -896,7 +919,7 @@ async def set_http_headers( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -996,9 +1019,12 @@ async def set_immutability_policy( # pylint: disable=inconsistent-return-statem
request_id_parameter: Optional[str] = None,
immutability_policy_expiry: Optional[datetime.datetime] = None,
immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None,
+ snapshot: Optional[str] = None,
+ version_id: Optional[str] = None,
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Set Immutability Policy operation sets the immutability policy on the blob.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -1016,13 +1042,23 @@ async def set_immutability_policy( # pylint: disable=inconsistent-return-statem
:param immutability_policy_mode: Specifies the immutability policy mode to set on the blob.
Known values are: "Mutable", "Unlocked", and "Locked". Default value is None.
:type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode
+ :param snapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the blob snapshot to retrieve. For more information on working with blob snapshots,
+ see :code:`Creating
+ a Snapshot of a Blob.`. Default value is None.
+ :type snapshot: str
+ :param version_id: The version id parameter is an opaque DateTime value that, when present,
+ specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer.
+ Default value is None.
+ :type version_id: str
:param modified_access_conditions: Parameter group. Default value is None.
:type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1047,6 +1083,8 @@ async def set_immutability_policy( # pylint: disable=inconsistent-return-statem
if_unmodified_since=_if_unmodified_since,
immutability_policy_expiry=immutability_policy_expiry,
immutability_policy_mode=immutability_policy_mode,
+ snapshot=snapshot,
+ version_id=version_id,
comp=comp,
version=self._config.version,
headers=_headers,
@@ -1085,8 +1123,14 @@ async def set_immutability_policy( # pylint: disable=inconsistent-return-statem
@distributed_trace_async
async def delete_immutability_policy( # pylint: disable=inconsistent-return-statements
- self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any
+ self,
+ timeout: Optional[int] = None,
+ request_id_parameter: Optional[str] = None,
+ snapshot: Optional[str] = None,
+ version_id: Optional[str] = None,
+ **kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Delete Immutability Policy operation deletes the immutability policy on the blob.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -1098,11 +1142,21 @@ async def delete_immutability_policy( # pylint: disable=inconsistent-return-sta
limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
value is None.
:type request_id_parameter: str
+ :param snapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the blob snapshot to retrieve. For more information on working with blob snapshots,
+ see :code:`Creating
+ a Snapshot of a Blob.`. Default value is None.
+ :type snapshot: str
+ :param version_id: The version id parameter is an opaque DateTime value that, when present,
+ specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer.
+ Default value is None.
+ :type version_id: str
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1120,6 +1174,8 @@ async def delete_immutability_policy( # pylint: disable=inconsistent-return-sta
url=self._config.url,
timeout=timeout,
request_id_parameter=request_id_parameter,
+ snapshot=snapshot,
+ version_id=version_id,
comp=comp,
version=self._config.version,
headers=_headers,
@@ -1152,8 +1208,15 @@ async def delete_immutability_policy( # pylint: disable=inconsistent-return-sta
@distributed_trace_async
async def set_legal_hold( # pylint: disable=inconsistent-return-statements
- self, legal_hold: bool, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any
+ self,
+ legal_hold: bool,
+ timeout: Optional[int] = None,
+ request_id_parameter: Optional[str] = None,
+ snapshot: Optional[str] = None,
+ version_id: Optional[str] = None,
+ **kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Set Legal Hold operation sets a legal hold on the blob.
:param legal_hold: Specified if a legal hold should be set on the blob. Required.
@@ -1167,11 +1230,21 @@ async def set_legal_hold( # pylint: disable=inconsistent-return-statements
limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
value is None.
:type request_id_parameter: str
+ :param snapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the blob snapshot to retrieve. For more information on working with blob snapshots,
+ see :code:`Creating
+ a Snapshot of a Blob.`. Default value is None.
+ :type snapshot: str
+ :param version_id: The version id parameter is an opaque DateTime value that, when present,
+ specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer.
+ Default value is None.
+ :type version_id: str
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1190,6 +1263,8 @@ async def set_legal_hold( # pylint: disable=inconsistent-return-statements
legal_hold=legal_hold,
timeout=timeout,
request_id_parameter=request_id_parameter,
+ snapshot=snapshot,
+ version_id=version_id,
comp=comp,
version=self._config.version,
headers=_headers,
@@ -1233,6 +1308,7 @@ async def set_metadata( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or
more name-value pairs.
@@ -1265,7 +1341,7 @@ async def set_metadata( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1371,6 +1447,7 @@ async def acquire_lease( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete
operations.
@@ -1397,7 +1474,7 @@ async def acquire_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1478,6 +1555,7 @@ async def release_lease( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete
operations.
@@ -1498,7 +1576,7 @@ async def release_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1577,6 +1655,7 @@ async def renew_lease( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete
operations.
@@ -1597,7 +1676,7 @@ async def renew_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1678,6 +1757,7 @@ async def change_lease( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete
operations.
@@ -1702,7 +1782,7 @@ async def change_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1783,6 +1863,7 @@ async def break_lease( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete
operations.
@@ -1809,7 +1890,7 @@ async def break_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1892,6 +1973,7 @@ async def create_snapshot( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Create Snapshot operation creates a read-only snapshot of a blob.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -1923,7 +2005,7 @@ async def create_snapshot( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2033,6 +2115,7 @@ async def start_copy_from_url( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Start Copy From URL operation copies a blob or an internet resource to a new blob.
:param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of
@@ -2089,7 +2172,7 @@ async def start_copy_from_url( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2207,6 +2290,7 @@ async def copy_from_url( # pylint: disable=inconsistent-return-statements
cpk_scope_info: Optional[_models.CpkScopeInfo] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Copy From URL operation copies a blob or an internet resource to a new blob. It will not
return a response until the copy is complete.
@@ -2270,7 +2354,7 @@ async def copy_from_url( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2389,6 +2473,7 @@ async def abort_copy_from_url( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a
destination blob with zero length and full metadata.
@@ -2410,7 +2495,7 @@ async def abort_copy_from_url( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2481,6 +2566,7 @@ async def set_tier( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a
premium storage account and on a block blob in a blob storage account (locally redundant
storage only). A premium page blob's tier determines the allowed size, IOPS, and bandwidth of
@@ -2521,7 +2607,7 @@ async def set_tier( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2585,6 +2671,7 @@ async def set_tier( # pylint: disable=inconsistent-return-statements
async def get_account_info( # pylint: disable=inconsistent-return-statements
self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Returns the sku name and account kind.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -2600,7 +2687,7 @@ async def get_account_info( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2665,6 +2752,7 @@ async def query(
query_request: Optional[_models.QueryRequest] = None,
**kwargs: Any
) -> AsyncIterator[bytes]:
+ # pylint: disable=line-too-long
"""The Query operation enables users to select/project on blob data by providing simple query
expressions.
@@ -2695,7 +2783,7 @@ async def query(
:rtype: AsyncIterator[bytes]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2914,6 +3002,7 @@ async def get_tags(
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> _models.BlobTags:
+ # pylint: disable=line-too-long
"""The Get Tags operation enables users to get the tags associated with a blob.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -2943,7 +3032,7 @@ async def get_tags(
:rtype: ~azure.storage.blob.models.BlobTags
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -3019,6 +3108,7 @@ async def set_tags( # pylint: disable=inconsistent-return-statements
tags: Optional[_models.BlobTags] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Set Tags operation enables users to set tags on a blob.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -3050,7 +3140,7 @@ async def set_tags( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_block_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_block_blob_operations.py
index d833c25c0eec..45419c03cd47 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_block_blob_operations.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_block_blob_operations.py
@@ -75,6 +75,8 @@ async def upload( # pylint: disable=inconsistent-return-statements
immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None,
legal_hold: Optional[bool] = None,
transactional_content_crc64: Optional[bytes] = None,
+ structured_body_type: Optional[str] = None,
+ structured_content_length: Optional[int] = None,
blob_http_headers: Optional[_models.BlobHTTPHeaders] = None,
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
cpk_info: Optional[_models.CpkInfo] = None,
@@ -82,6 +84,7 @@ async def upload( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Upload Block Blob operation updates the content of an existing block blob. Updating an
existing block blob overwrites any existing metadata on the blob. Partial updates are not
supported with Put Blob; the content of the existing blob is overwritten with the content of
@@ -130,6 +133,13 @@ async def upload( # pylint: disable=inconsistent-return-statements
:param transactional_content_crc64: Specify the transactional crc64 for the body, to be
validated by the service. Default value is None.
:type transactional_content_crc64: bytes
+ :param structured_body_type: Required if the request body is a structured message. Specifies
+ the message schema version and properties. Default value is None.
+ :type structured_body_type: str
+ :param structured_content_length: Required if the request body is a structured message.
+ Specifies the length of the blob/file content inside the message body. Will always be smaller
+ than Content-Length. Default value is None.
+ :type structured_content_length: int
:param blob_http_headers: Parameter group. Default value is None.
:type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders
:param lease_access_conditions: Parameter group. Default value is None.
@@ -144,7 +154,7 @@ async def upload( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -227,6 +237,8 @@ async def upload( # pylint: disable=inconsistent-return-statements
immutability_policy_mode=immutability_policy_mode,
legal_hold=legal_hold,
transactional_content_crc64=transactional_content_crc64,
+ structured_body_type=structured_body_type,
+ structured_content_length=structured_content_length,
blob_type=blob_type,
content_type=content_type,
version=self._config.version,
@@ -268,6 +280,9 @@ async def upload( # pylint: disable=inconsistent-return-statements
response_headers["x-ms-encryption-scope"] = self._deserialize(
"str", response.headers.get("x-ms-encryption-scope")
)
+ response_headers["x-ms-structured-body"] = self._deserialize(
+ "str", response.headers.get("x-ms-structured-body")
+ )
if cls:
return cls(pipeline_response, None, response_headers) # type: ignore
@@ -295,6 +310,7 @@ async def put_blob_from_url( # pylint: disable=inconsistent-return-statements
source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Put Blob from URL operation creates a new Block Blob where the contents of the blob are
read from a given URL. This API is supported beginning with the 2020-04-08 version. Partial
updates are not supported with Put Blob from URL; the content of an existing blob is
@@ -365,7 +381,7 @@ async def put_blob_from_url( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -516,11 +532,14 @@ async def stage_block( # pylint: disable=inconsistent-return-statements
transactional_content_crc64: Optional[bytes] = None,
timeout: Optional[int] = None,
request_id_parameter: Optional[str] = None,
+ structured_body_type: Optional[str] = None,
+ structured_content_length: Optional[int] = None,
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
cpk_info: Optional[_models.CpkInfo] = None,
cpk_scope_info: Optional[_models.CpkScopeInfo] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Stage Block operation creates a new block to be committed as part of a blob.
:param block_id: A valid Base64 string value that identifies the block. Prior to encoding, the
@@ -546,6 +565,13 @@ async def stage_block( # pylint: disable=inconsistent-return-statements
limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
value is None.
:type request_id_parameter: str
+ :param structured_body_type: Required if the request body is a structured message. Specifies
+ the message schema version and properties. Default value is None.
+ :type structured_body_type: str
+ :param structured_content_length: Required if the request body is a structured message.
+ Specifies the length of the blob/file content inside the message body. Will always be smaller
+ than Content-Length. Default value is None.
+ :type structured_content_length: int
:param lease_access_conditions: Parameter group. Default value is None.
:type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions
:param cpk_info: Parameter group. Default value is None.
@@ -556,7 +582,7 @@ async def stage_block( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -599,6 +625,8 @@ async def stage_block( # pylint: disable=inconsistent-return-statements
encryption_algorithm=_encryption_algorithm,
encryption_scope=_encryption_scope,
request_id_parameter=request_id_parameter,
+ structured_body_type=structured_body_type,
+ structured_content_length=structured_content_length,
comp=comp,
content_type=content_type,
version=self._config.version,
@@ -640,6 +668,9 @@ async def stage_block( # pylint: disable=inconsistent-return-statements
response_headers["x-ms-encryption-scope"] = self._deserialize(
"str", response.headers.get("x-ms-encryption-scope")
)
+ response_headers["x-ms-structured-body"] = self._deserialize(
+ "str", response.headers.get("x-ms-structured-body")
+ )
if cls:
return cls(pipeline_response, None, response_headers) # type: ignore
@@ -662,6 +693,7 @@ async def stage_block_from_url( # pylint: disable=inconsistent-return-statement
source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Stage Block operation creates a new block to be committed as part of a blob where the
contents are read from a URL.
@@ -706,7 +738,7 @@ async def stage_block_from_url( # pylint: disable=inconsistent-return-statement
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -827,6 +859,7 @@ async def commit_block_list( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Commit Block List operation writes a blob by specifying the list of block IDs that make up
the blob. In order to be written as part of a blob, a block must have been successfully written
to the server in a prior Put Block operation. You can call Put Block List to update a blob by
@@ -889,7 +922,7 @@ async def commit_block_list( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1030,6 +1063,7 @@ async def get_block_list(
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> _models.BlockList:
+ # pylint: disable=line-too-long
"""The Get Block List operation retrieves the list of blocks that have been uploaded as part of a
block blob.
@@ -1060,7 +1094,7 @@ async def get_block_list(
:rtype: ~azure.storage.blob.models.BlockList
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_container_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_container_operations.py
index 48a1a14749c7..c2bc375a197f 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_container_operations.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_container_operations.py
@@ -83,6 +83,7 @@ async def create( # pylint: disable=inconsistent-return-statements
container_cpk_scope_info: Optional[_models.ContainerCpkScopeInfo] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""creates a new container under the specified account. If the container with the same name
already exists, the operation fails.
@@ -112,7 +113,7 @@ async def create( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -180,6 +181,7 @@ async def get_properties( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""returns all user-defined metadata and system properties for the specified container. The data
returned does not include the container's list of blobs.
@@ -198,7 +200,7 @@ async def get_properties( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -282,6 +284,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""operation marks the specified container for deletion. The container and any blobs contained
within it are later deleted during garbage collection.
@@ -302,7 +305,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -372,6 +375,7 @@ async def set_metadata( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""operation sets one or more user-defined name-value pairs for the specified container.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -399,7 +403,7 @@ async def set_metadata( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -469,6 +473,7 @@ async def get_access_policy(
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> List[_models.SignedIdentifier]:
+ # pylint: disable=line-too-long
"""gets the permissions for the specified container. The permissions indicate whether container
data may be accessed publicly.
@@ -487,7 +492,7 @@ async def get_access_policy(
:rtype: list[~azure.storage.blob.models.SignedIdentifier]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -562,6 +567,7 @@ async def set_access_policy( # pylint: disable=inconsistent-return-statements
container_acl: Optional[List[_models.SignedIdentifier]] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""sets the permissions for the specified container. The permissions indicate whether blobs in a
container may be accessed publicly.
@@ -587,7 +593,7 @@ async def set_access_policy( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -671,6 +677,7 @@ async def restore( # pylint: disable=inconsistent-return-statements
deleted_container_version: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Restores a previously-deleted container.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -692,7 +699,7 @@ async def restore( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -753,6 +760,7 @@ async def rename( # pylint: disable=inconsistent-return-statements
source_lease_id: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Renames an existing container.
:param source_container_name: Required. Specifies the name of the container to rename.
@@ -774,7 +782,7 @@ async def rename( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -835,6 +843,7 @@ async def submit_batch(
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> AsyncIterator[bytes]:
+ # pylint: disable=line-too-long
"""The Batch operation allows multiple API calls to be embedded into a single HTTP request.
:param content_length: The length of the request. Required.
@@ -854,7 +863,7 @@ async def submit_batch(
:rtype: AsyncIterator[bytes]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -929,6 +938,7 @@ async def filter_blobs(
include: Optional[List[Union[str, _models.FilterBlobsIncludeItem]]] = None,
**kwargs: Any
) -> _models.FilterBlobSegment:
+ # pylint: disable=line-too-long
"""The Filter Blobs operation enables callers to list blobs in a container whose tags match a
given search expression. Filter blobs searches within the given container.
@@ -965,7 +975,7 @@ async def filter_blobs(
:rtype: ~azure.storage.blob.models.FilterBlobSegment
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1033,6 +1043,7 @@ async def acquire_lease( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""[Update] establishes and manages a lock on a container for delete operations. The lock duration
can be 15 to 60 seconds, or can be infinite.
@@ -1059,7 +1070,7 @@ async def acquire_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1133,6 +1144,7 @@ async def release_lease( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""[Update] establishes and manages a lock on a container for delete operations. The lock duration
can be 15 to 60 seconds, or can be infinite.
@@ -1153,7 +1165,7 @@ async def release_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1225,6 +1237,7 @@ async def renew_lease( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""[Update] establishes and manages a lock on a container for delete operations. The lock duration
can be 15 to 60 seconds, or can be infinite.
@@ -1245,7 +1258,7 @@ async def renew_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1318,6 +1331,7 @@ async def break_lease( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""[Update] establishes and manages a lock on a container for delete operations. The lock duration
can be 15 to 60 seconds, or can be infinite.
@@ -1344,7 +1358,7 @@ async def break_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1418,6 +1432,7 @@ async def change_lease( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""[Update] establishes and manages a lock on a container for delete operations. The lock duration
can be 15 to 60 seconds, or can be infinite.
@@ -1442,7 +1457,7 @@ async def change_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1518,6 +1533,7 @@ async def list_blob_flat_segment(
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> _models.ListBlobsFlatSegmentResponse:
+ # pylint: disable=line-too-long
"""[Update] The List Blobs operation returns a list of the blobs under the specified container.
:param prefix: Filters the results to return only containers whose name begins with the
@@ -1553,7 +1569,7 @@ async def list_blob_flat_segment(
:rtype: ~azure.storage.blob.models.ListBlobsFlatSegmentResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1624,6 +1640,7 @@ async def list_blob_hierarchy_segment(
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> _models.ListBlobsHierarchySegmentResponse:
+ # pylint: disable=line-too-long
"""[Update] The List Blobs operation returns a list of the blobs under the specified container.
:param delimiter: When the request includes this parameter, the operation returns a BlobPrefix
@@ -1664,7 +1681,7 @@ async def list_blob_hierarchy_segment(
:rtype: ~azure.storage.blob.models.ListBlobsHierarchySegmentResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1728,6 +1745,7 @@ async def list_blob_hierarchy_segment(
async def get_account_info( # pylint: disable=inconsistent-return-statements
self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Returns the sku name and account kind.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -1743,7 +1761,7 @@ async def get_account_info( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_page_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_page_blob_operations.py
index bf77639fe40e..38e66803e85b 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_page_blob_operations.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_page_blob_operations.py
@@ -84,6 +84,7 @@ async def create( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Create operation creates a new page blob.
:param content_length: The length of the request. Required.
@@ -140,7 +141,7 @@ async def create( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -274,6 +275,8 @@ async def upload_pages( # pylint: disable=inconsistent-return-statements
timeout: Optional[int] = None,
range: Optional[str] = None,
request_id_parameter: Optional[str] = None,
+ structured_body_type: Optional[str] = None,
+ structured_content_length: Optional[int] = None,
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
cpk_info: Optional[_models.CpkInfo] = None,
cpk_scope_info: Optional[_models.CpkScopeInfo] = None,
@@ -281,6 +284,7 @@ async def upload_pages( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Upload Pages operation writes a range of pages to a page blob.
:param content_length: The length of the request. Required.
@@ -304,6 +308,13 @@ async def upload_pages( # pylint: disable=inconsistent-return-statements
limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
value is None.
:type request_id_parameter: str
+ :param structured_body_type: Required if the request body is a structured message. Specifies
+ the message schema version and properties. Default value is None.
+ :type structured_body_type: str
+ :param structured_content_length: Required if the request body is a structured message.
+ Specifies the length of the blob/file content inside the message body. Will always be smaller
+ than Content-Length. Default value is None.
+ :type structured_content_length: int
:param lease_access_conditions: Parameter group. Default value is None.
:type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions
:param cpk_info: Parameter group. Default value is None.
@@ -319,7 +330,7 @@ async def upload_pages( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -391,6 +402,8 @@ async def upload_pages( # pylint: disable=inconsistent-return-statements
if_none_match=_if_none_match,
if_tags=_if_tags,
request_id_parameter=request_id_parameter,
+ structured_body_type=structured_body_type,
+ structured_content_length=structured_content_length,
comp=comp,
page_write=page_write,
content_type=content_type,
@@ -438,6 +451,9 @@ async def upload_pages( # pylint: disable=inconsistent-return-statements
response_headers["x-ms-encryption-scope"] = self._deserialize(
"str", response.headers.get("x-ms-encryption-scope")
)
+ response_headers["x-ms-structured-body"] = self._deserialize(
+ "str", response.headers.get("x-ms-structured-body")
+ )
if cls:
return cls(pipeline_response, None, response_headers) # type: ignore
@@ -456,6 +472,7 @@ async def clear_pages( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Clear Pages operation clears a set of pages from a page blob.
:param content_length: The length of the request. Required.
@@ -486,7 +503,7 @@ async def clear_pages( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -614,6 +631,7 @@ async def upload_pages_from_url( # pylint: disable=inconsistent-return-statemen
source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Upload Pages operation writes a range of pages to a page blob where the contents are read
from a URL.
@@ -663,7 +681,7 @@ async def upload_pages_from_url( # pylint: disable=inconsistent-return-statemen
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -808,6 +826,7 @@ async def get_page_ranges(
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> _models.PageList:
+ # pylint: disable=line-too-long
"""The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot
of a page blob.
@@ -850,7 +869,7 @@ async def get_page_ranges(
:rtype: ~azure.storage.blob.models.PageList
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -947,6 +966,7 @@ async def get_page_ranges_diff(
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> _models.PageList:
+ # pylint: disable=line-too-long
"""The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that
were changed between target blob and previous snapshot.
@@ -1001,7 +1021,7 @@ async def get_page_ranges_diff(
:rtype: ~azure.storage.blob.models.PageList
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1097,6 +1117,7 @@ async def resize( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Resize the Blob.
:param blob_content_length: This header specifies the maximum size for the page blob, up to 1
@@ -1123,7 +1144,7 @@ async def resize( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1223,6 +1244,7 @@ async def update_sequence_number( # pylint: disable=inconsistent-return-stateme
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Update the sequence number of the blob.
:param sequence_number_action: Required if the x-ms-blob-sequence-number header is set for the
@@ -1251,7 +1273,7 @@ async def update_sequence_number( # pylint: disable=inconsistent-return-stateme
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1336,6 +1358,7 @@ async def copy_incremental( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Copy Incremental operation copies a snapshot of the source page blob to a destination page
blob. The snapshot is copied such that only the differential changes between the previously
copied snapshot are transferred to the destination. The copied snapshots are complete copies of
@@ -1362,7 +1385,7 @@ async def copy_incremental( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_service_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_service_operations.py
index f9f8ff0be86c..0f0e61805e05 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_service_operations.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_service_operations.py
@@ -71,6 +71,7 @@ async def set_properties( # pylint: disable=inconsistent-return-statements
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Sets properties for a storage account's Blob service endpoint, including properties for Storage
Analytics and CORS (Cross-Origin Resource Sharing) rules.
@@ -89,7 +90,7 @@ async def set_properties( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -147,6 +148,7 @@ async def set_properties( # pylint: disable=inconsistent-return-statements
async def get_properties(
self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any
) -> _models.StorageServiceProperties:
+ # pylint: disable=line-too-long
"""gets the properties of a storage account's Blob service, including properties for Storage
Analytics and CORS (Cross-Origin Resource Sharing) rules.
@@ -163,7 +165,7 @@ async def get_properties(
:rtype: ~azure.storage.blob.models.StorageServiceProperties
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -220,6 +222,7 @@ async def get_properties(
async def get_statistics(
self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any
) -> _models.StorageServiceStats:
+ # pylint: disable=line-too-long
"""Retrieves statistics related to replication for the Blob service. It is only available on the
secondary location endpoint when read-access geo-redundant replication is enabled for the
storage account.
@@ -237,7 +240,7 @@ async def get_statistics(
:rtype: ~azure.storage.blob.models.StorageServiceStats
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -302,6 +305,7 @@ async def list_containers_segment(
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> _models.ListContainersSegmentResponse:
+ # pylint: disable=line-too-long
"""The List Containers Segment operation returns a list of the containers under the specified
account.
@@ -338,7 +342,7 @@ async def list_containers_segment(
:rtype: ~azure.storage.blob.models.ListContainersSegmentResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -401,6 +405,7 @@ async def get_user_delegation_key(
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> _models.UserDelegationKey:
+ # pylint: disable=line-too-long
"""Retrieves a user delegation key for the Blob service. This is only a valid operation when using
bearer token authentication.
@@ -419,7 +424,7 @@ async def get_user_delegation_key(
:rtype: ~azure.storage.blob.models.UserDelegationKey
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -482,6 +487,7 @@ async def get_user_delegation_key(
async def get_account_info( # pylint: disable=inconsistent-return-statements
self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Returns the sku name and account kind.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -497,7 +503,7 @@ async def get_account_info( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -559,6 +565,7 @@ async def submit_batch(
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> AsyncIterator[bytes]:
+ # pylint: disable=line-too-long
"""The Batch operation allows multiple API calls to be embedded into a single HTTP request.
:param content_length: The length of the request. Required.
@@ -578,7 +585,7 @@ async def submit_batch(
:rtype: AsyncIterator[bytes]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -651,6 +658,7 @@ async def filter_blobs(
include: Optional[List[Union[str, _models.FilterBlobsIncludeItem]]] = None,
**kwargs: Any
) -> _models.FilterBlobSegment:
+ # pylint: disable=line-too-long
"""The Filter Blobs operation enables callers to list blobs across all containers whose tags match
a given search expression. Filter blobs searches across all containers within a storage
account but can be scoped within the expression to a single container.
@@ -688,7 +696,7 @@ async def filter_blobs(
:rtype: ~azure.storage.blob.models.FilterBlobSegment
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py
index 8fb7691ceb85..12ccbf7312f9 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py
@@ -389,3 +389,4 @@ class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta):
AUTHORIZATION_PERMISSION_MISMATCH = "AuthorizationPermissionMismatch"
AUTHORIZATION_SERVICE_MISMATCH = "AuthorizationServiceMismatch"
AUTHORIZATION_RESOURCE_TYPE_MISMATCH = "AuthorizationResourceTypeMismatch"
+ BLOB_ACCESS_TIER_NOT_SUPPORTED_FOR_ACCOUNT_TYPE = "BlobAccessTierNotSupportedForAccountType"
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_append_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_append_blob_operations.py
index 91b8dd56c7f7..d07c5c181bf3 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_append_blob_operations.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_append_blob_operations.py
@@ -70,7 +70,7 @@ def build_create_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
blob_type: Literal["AppendBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "AppendBlob"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -171,6 +171,8 @@ def build_append_block_request(
if_none_match: Optional[str] = None,
if_tags: Optional[str] = None,
request_id_parameter: Optional[str] = None,
+ structured_body_type: Optional[str] = None,
+ structured_content_length: Optional[int] = None,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
@@ -178,7 +180,7 @@ def build_append_block_request(
comp: Literal["appendblock"] = kwargs.pop("comp", _params.pop("comp", "appendblock"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -233,6 +235,12 @@ def build_append_block_request(
_headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
if request_id_parameter is not None:
_headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str")
+ if structured_body_type is not None:
+ _headers["x-ms-structured-body"] = _SERIALIZER.header("structured_body_type", structured_body_type, "str")
+ if structured_content_length is not None:
+ _headers["x-ms-structured-content-length"] = _SERIALIZER.header(
+ "structured_content_length", structured_content_length, "int"
+ )
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
@@ -274,7 +282,7 @@ def build_append_block_from_url_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["appendblock"] = kwargs.pop("comp", _params.pop("comp", "appendblock"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -372,7 +380,7 @@ def build_seal_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["seal"] = kwargs.pop("comp", _params.pop("comp", "seal"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -446,6 +454,7 @@ def create( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Create Append Blob operation creates a new append blob.
:param content_length: The length of the request. Required.
@@ -492,7 +501,7 @@ def create( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -622,6 +631,8 @@ def append_block( # pylint: disable=inconsistent-return-statements
transactional_content_md5: Optional[bytes] = None,
transactional_content_crc64: Optional[bytes] = None,
request_id_parameter: Optional[str] = None,
+ structured_body_type: Optional[str] = None,
+ structured_content_length: Optional[int] = None,
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
append_position_access_conditions: Optional[_models.AppendPositionAccessConditions] = None,
cpk_info: Optional[_models.CpkInfo] = None,
@@ -629,6 +640,7 @@ def append_block( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Append Block operation commits a new block of data to the end of an existing append blob.
The Append Block operation is permitted only if the blob was created with x-ms-blob-type set to
AppendBlob. Append Block is supported only on version 2015-02-21 version or later.
@@ -652,6 +664,13 @@ def append_block( # pylint: disable=inconsistent-return-statements
limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
value is None.
:type request_id_parameter: str
+ :param structured_body_type: Required if the request body is a structured message. Specifies
+ the message schema version and properties. Default value is None.
+ :type structured_body_type: str
+ :param structured_content_length: Required if the request body is a structured message.
+ Specifies the length of the blob/file content inside the message body. Will always be smaller
+ than Content-Length. Default value is None.
+ :type structured_content_length: int
:param lease_access_conditions: Parameter group. Default value is None.
:type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions
:param append_position_access_conditions: Parameter group. Default value is None.
@@ -667,7 +686,7 @@ def append_block( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -732,6 +751,8 @@ def append_block( # pylint: disable=inconsistent-return-statements
if_none_match=_if_none_match,
if_tags=_if_tags,
request_id_parameter=request_id_parameter,
+ structured_body_type=structured_body_type,
+ structured_content_length=structured_content_length,
comp=comp,
content_type=content_type,
version=self._config.version,
@@ -781,6 +802,9 @@ def append_block( # pylint: disable=inconsistent-return-statements
response_headers["x-ms-encryption-scope"] = self._deserialize(
"str", response.headers.get("x-ms-encryption-scope")
)
+ response_headers["x-ms-structured-body"] = self._deserialize(
+ "str", response.headers.get("x-ms-structured-body")
+ )
if cls:
return cls(pipeline_response, None, response_headers) # type: ignore
@@ -805,6 +829,7 @@ def append_block_from_url( # pylint: disable=inconsistent-return-statements
source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Append Block operation commits a new block of data to the end of an existing append blob
where the contents are read from a source url. The Append Block operation is permitted only if
the blob was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on
@@ -855,7 +880,7 @@ def append_block_from_url( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -993,6 +1018,7 @@ def seal( # pylint: disable=inconsistent-return-statements
append_position_access_conditions: Optional[_models.AppendPositionAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Seal operation seals the Append Blob to make it read-only. Seal is supported only on
version 2019-12-12 version or later.
@@ -1016,7 +1042,7 @@ def seal( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py
index 83033f6b9f4c..5da0b2b033a9 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py
@@ -49,6 +49,7 @@ def build_download_request(
lease_id: Optional[str] = None,
range_get_content_md5: Optional[bool] = None,
range_get_content_crc64: Optional[bool] = None,
+ structured_body_type: Optional[str] = None,
encryption_key: Optional[str] = None,
encryption_key_sha256: Optional[str] = None,
encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None,
@@ -63,7 +64,7 @@ def build_download_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -95,6 +96,8 @@ def build_download_request(
_headers["x-ms-range-get-content-crc64"] = _SERIALIZER.header(
"range_get_content_crc64", range_get_content_crc64, "bool"
)
+ if structured_body_type is not None:
+ _headers["x-ms-structured-body"] = _SERIALIZER.header("structured_body_type", structured_body_type, "str")
if encryption_key is not None:
_headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str")
if encryption_key_sha256 is not None:
@@ -142,7 +145,7 @@ def build_get_properties_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -210,7 +213,7 @@ def build_delete_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -261,7 +264,7 @@ def build_undelete_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -299,7 +302,7 @@ def build_set_expiry_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["expiry"] = kwargs.pop("comp", _params.pop("comp", "expiry"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -350,7 +353,7 @@ def build_set_http_headers_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -413,13 +416,15 @@ def build_set_immutability_policy_request(
if_unmodified_since: Optional[datetime.datetime] = None,
immutability_policy_expiry: Optional[datetime.datetime] = None,
immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None,
+ snapshot: Optional[str] = None,
+ version_id: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["immutabilityPolicies"] = kwargs.pop("comp", _params.pop("comp", "immutabilityPolicies"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -434,6 +439,10 @@ def build_set_immutability_policy_request(
_params["comp"] = _SERIALIZER.query("comp", comp, "str")
if timeout is not None:
_params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+ if snapshot is not None:
+ _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str")
+ if version_id is not None:
+ _params["versionid"] = _SERIALIZER.query("version_id", version_id, "str")
# Construct headers
_headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
@@ -455,13 +464,19 @@ def build_set_immutability_policy_request(
def build_delete_immutability_policy_request(
- url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any
+ url: str,
+ *,
+ timeout: Optional[int] = None,
+ request_id_parameter: Optional[str] = None,
+ snapshot: Optional[str] = None,
+ version_id: Optional[str] = None,
+ **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["immutabilityPolicies"] = kwargs.pop("comp", _params.pop("comp", "immutabilityPolicies"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -476,6 +491,10 @@ def build_delete_immutability_policy_request(
_params["comp"] = _SERIALIZER.query("comp", comp, "str")
if timeout is not None:
_params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+ if snapshot is not None:
+ _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str")
+ if version_id is not None:
+ _params["versionid"] = _SERIALIZER.query("version_id", version_id, "str")
# Construct headers
_headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
@@ -492,13 +511,15 @@ def build_set_legal_hold_request(
legal_hold: bool,
timeout: Optional[int] = None,
request_id_parameter: Optional[str] = None,
+ snapshot: Optional[str] = None,
+ version_id: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["legalhold"] = kwargs.pop("comp", _params.pop("comp", "legalhold"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -513,6 +534,10 @@ def build_set_legal_hold_request(
_params["comp"] = _SERIALIZER.query("comp", comp, "str")
if timeout is not None:
_params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+ if snapshot is not None:
+ _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str")
+ if version_id is not None:
+ _params["versionid"] = _SERIALIZER.query("version_id", version_id, "str")
# Construct headers
_headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
@@ -546,7 +571,7 @@ def build_set_metadata_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -614,7 +639,7 @@ def build_acquire_lease_request(
comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -672,7 +697,7 @@ def build_release_lease_request(
comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -727,7 +752,7 @@ def build_renew_lease_request(
comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -783,7 +808,7 @@ def build_change_lease_request(
comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -839,7 +864,7 @@ def build_break_lease_request(
comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -899,7 +924,7 @@ def build_create_snapshot_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["snapshot"] = kwargs.pop("comp", _params.pop("comp", "snapshot"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -978,7 +1003,7 @@ def build_start_copy_from_url_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -1081,7 +1106,7 @@ def build_copy_from_url_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
x_ms_requires_sync: Literal["true"] = kwargs.pop("x_ms_requires_sync", _headers.pop("x-ms-requires-sync", "true"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -1173,7 +1198,7 @@ def build_abort_copy_from_url_request(
copy_action_abort_constant: Literal["abort"] = kwargs.pop(
"copy_action_abort_constant", _headers.pop("x-ms-copy-action", "abort")
)
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -1219,7 +1244,7 @@ def build_set_tier_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["tier"] = kwargs.pop("comp", _params.pop("comp", "tier"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -1263,7 +1288,7 @@ def build_get_account_info_request(
restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account"))
comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -1312,7 +1337,7 @@ def build_query_request(
comp: Literal["query"] = kwargs.pop("comp", _params.pop("comp", "query"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -1376,7 +1401,7 @@ def build_get_tags_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["tags"] = kwargs.pop("comp", _params.pop("comp", "tags"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -1427,7 +1452,7 @@ def build_set_tags_request(
comp: Literal["tags"] = kwargs.pop("comp", _params.pop("comp", "tags"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -1496,12 +1521,14 @@ def download(
range: Optional[str] = None,
range_get_content_md5: Optional[bool] = None,
range_get_content_crc64: Optional[bool] = None,
+ structured_body_type: Optional[str] = None,
request_id_parameter: Optional[str] = None,
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
cpk_info: Optional[_models.CpkInfo] = None,
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> Iterator[bytes]:
+ # pylint: disable=line-too-long
"""The Download operation reads or downloads a blob from the system, including its metadata and
properties. You can also call Download to read a snapshot.
@@ -1530,6 +1557,9 @@ def download(
service returns the CRC64 hash for the range, as long as the range is less than or equal to 4
MB in size. Default value is None.
:type range_get_content_crc64: bool
+ :param structured_body_type: Specifies the response content should be returned as a structured
+ message and specifies the message schema version and properties. Default value is None.
+ :type structured_body_type: str
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
value is None.
@@ -1544,7 +1574,7 @@ def download(
:rtype: Iterator[bytes]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1588,6 +1618,7 @@ def download(
lease_id=_lease_id,
range_get_content_md5=range_get_content_md5,
range_get_content_crc64=range_get_content_crc64,
+ structured_body_type=structured_body_type,
encryption_key=_encryption_key,
encryption_key_sha256=_encryption_key_sha256,
encryption_algorithm=_encryption_algorithm,
@@ -1699,6 +1730,12 @@ def download(
"str", response.headers.get("x-ms-immutability-policy-mode")
)
response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold"))
+ response_headers["x-ms-structured-body"] = self._deserialize(
+ "str", response.headers.get("x-ms-structured-body")
+ )
+ response_headers["x-ms-structured-content-length"] = self._deserialize(
+ "int", response.headers.get("x-ms-structured-content-length")
+ )
if response.status_code == 206:
response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
@@ -1781,6 +1818,12 @@ def download(
"str", response.headers.get("x-ms-immutability-policy-mode")
)
response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold"))
+ response_headers["x-ms-structured-body"] = self._deserialize(
+ "str", response.headers.get("x-ms-structured-body")
+ )
+ response_headers["x-ms-structured-content-length"] = self._deserialize(
+ "int", response.headers.get("x-ms-structured-content-length")
+ )
deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
@@ -1801,6 +1844,7 @@ def get_properties( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Get Properties operation returns all user-defined metadata, standard HTTP properties, and
system properties for the blob. It does not return the content of the blob.
@@ -1833,7 +1877,7 @@ def get_properties( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2004,6 +2048,7 @@ def delete( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""If the storage account's soft delete feature is disabled then, when a blob is deleted, it is
permanently removed from the storage account. If the storage account's soft delete feature is
enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible
@@ -2053,7 +2098,7 @@ def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2128,6 +2173,7 @@ def delete( # pylint: disable=inconsistent-return-statements
def undelete( # pylint: disable=inconsistent-return-statements
self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Undelete a blob that was previously soft deleted.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -2143,7 +2189,7 @@ def undelete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2200,6 +2246,7 @@ def set_expiry( # pylint: disable=inconsistent-return-statements
expires_on: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Sets the time a blob will expire and be deleted.
:param expiry_options: Required. Indicates mode of the expiry time. Known values are:
@@ -2220,7 +2267,7 @@ def set_expiry( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2282,6 +2329,7 @@ def set_http_headers( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Set HTTP Headers operation sets system properties on the blob.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -2303,7 +2351,7 @@ def set_http_headers( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2403,9 +2451,12 @@ def set_immutability_policy( # pylint: disable=inconsistent-return-statements
request_id_parameter: Optional[str] = None,
immutability_policy_expiry: Optional[datetime.datetime] = None,
immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None,
+ snapshot: Optional[str] = None,
+ version_id: Optional[str] = None,
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Set Immutability Policy operation sets the immutability policy on the blob.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -2423,13 +2474,23 @@ def set_immutability_policy( # pylint: disable=inconsistent-return-statements
:param immutability_policy_mode: Specifies the immutability policy mode to set on the blob.
Known values are: "Mutable", "Unlocked", and "Locked". Default value is None.
:type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode
+ :param snapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the blob snapshot to retrieve. For more information on working with blob snapshots,
+ see :code:`Creating
+ a Snapshot of a Blob.`. Default value is None.
+ :type snapshot: str
+ :param version_id: The version id parameter is an opaque DateTime value that, when present,
+ specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer.
+ Default value is None.
+ :type version_id: str
:param modified_access_conditions: Parameter group. Default value is None.
:type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2454,6 +2515,8 @@ def set_immutability_policy( # pylint: disable=inconsistent-return-statements
if_unmodified_since=_if_unmodified_since,
immutability_policy_expiry=immutability_policy_expiry,
immutability_policy_mode=immutability_policy_mode,
+ snapshot=snapshot,
+ version_id=version_id,
comp=comp,
version=self._config.version,
headers=_headers,
@@ -2492,8 +2555,14 @@ def set_immutability_policy( # pylint: disable=inconsistent-return-statements
@distributed_trace
def delete_immutability_policy( # pylint: disable=inconsistent-return-statements
- self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any
+ self,
+ timeout: Optional[int] = None,
+ request_id_parameter: Optional[str] = None,
+ snapshot: Optional[str] = None,
+ version_id: Optional[str] = None,
+ **kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Delete Immutability Policy operation deletes the immutability policy on the blob.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -2505,11 +2574,21 @@ def delete_immutability_policy( # pylint: disable=inconsistent-return-statement
limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
value is None.
:type request_id_parameter: str
+ :param snapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the blob snapshot to retrieve. For more information on working with blob snapshots,
+ see :code:`Creating
+ a Snapshot of a Blob.`. Default value is None.
+ :type snapshot: str
+ :param version_id: The version id parameter is an opaque DateTime value that, when present,
+ specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer.
+ Default value is None.
+ :type version_id: str
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2527,6 +2606,8 @@ def delete_immutability_policy( # pylint: disable=inconsistent-return-statement
url=self._config.url,
timeout=timeout,
request_id_parameter=request_id_parameter,
+ snapshot=snapshot,
+ version_id=version_id,
comp=comp,
version=self._config.version,
headers=_headers,
@@ -2559,8 +2640,15 @@ def delete_immutability_policy( # pylint: disable=inconsistent-return-statement
@distributed_trace
def set_legal_hold( # pylint: disable=inconsistent-return-statements
- self, legal_hold: bool, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any
+ self,
+ legal_hold: bool,
+ timeout: Optional[int] = None,
+ request_id_parameter: Optional[str] = None,
+ snapshot: Optional[str] = None,
+ version_id: Optional[str] = None,
+ **kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Set Legal Hold operation sets a legal hold on the blob.
:param legal_hold: Specified if a legal hold should be set on the blob. Required.
@@ -2574,11 +2662,21 @@ def set_legal_hold( # pylint: disable=inconsistent-return-statements
limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
value is None.
:type request_id_parameter: str
+ :param snapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the blob snapshot to retrieve. For more information on working with blob snapshots,
+ see :code:`Creating
+ a Snapshot of a Blob.`. Default value is None.
+ :type snapshot: str
+ :param version_id: The version id parameter is an opaque DateTime value that, when present,
+ specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer.
+ Default value is None.
+ :type version_id: str
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2597,6 +2695,8 @@ def set_legal_hold( # pylint: disable=inconsistent-return-statements
legal_hold=legal_hold,
timeout=timeout,
request_id_parameter=request_id_parameter,
+ snapshot=snapshot,
+ version_id=version_id,
comp=comp,
version=self._config.version,
headers=_headers,
@@ -2640,6 +2740,7 @@ def set_metadata( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or
more name-value pairs.
@@ -2672,7 +2773,7 @@ def set_metadata( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2778,6 +2879,7 @@ def acquire_lease( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete
operations.
@@ -2804,7 +2906,7 @@ def acquire_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2885,6 +2987,7 @@ def release_lease( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete
operations.
@@ -2905,7 +3008,7 @@ def release_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2984,6 +3087,7 @@ def renew_lease( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete
operations.
@@ -3004,7 +3108,7 @@ def renew_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -3085,6 +3189,7 @@ def change_lease( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete
operations.
@@ -3109,7 +3214,7 @@ def change_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -3190,6 +3295,7 @@ def break_lease( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete
operations.
@@ -3216,7 +3322,7 @@ def break_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -3299,6 +3405,7 @@ def create_snapshot( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Create Snapshot operation creates a read-only snapshot of a blob.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -3330,7 +3437,7 @@ def create_snapshot( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -3440,6 +3547,7 @@ def start_copy_from_url( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Start Copy From URL operation copies a blob or an internet resource to a new blob.
:param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of
@@ -3496,7 +3604,7 @@ def start_copy_from_url( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -3614,6 +3722,7 @@ def copy_from_url( # pylint: disable=inconsistent-return-statements
cpk_scope_info: Optional[_models.CpkScopeInfo] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Copy From URL operation copies a blob or an internet resource to a new blob. It will not
return a response until the copy is complete.
@@ -3677,7 +3786,7 @@ def copy_from_url( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -3796,6 +3905,7 @@ def abort_copy_from_url( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a
destination blob with zero length and full metadata.
@@ -3817,7 +3927,7 @@ def abort_copy_from_url( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -3888,6 +3998,7 @@ def set_tier( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a
premium storage account and on a block blob in a blob storage account (locally redundant
storage only). A premium page blob's tier determines the allowed size, IOPS, and bandwidth of
@@ -3928,7 +4039,7 @@ def set_tier( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -3992,6 +4103,7 @@ def set_tier( # pylint: disable=inconsistent-return-statements
def get_account_info( # pylint: disable=inconsistent-return-statements
self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Returns the sku name and account kind.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -4007,7 +4119,7 @@ def get_account_info( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -4072,6 +4184,7 @@ def query(
query_request: Optional[_models.QueryRequest] = None,
**kwargs: Any
) -> Iterator[bytes]:
+ # pylint: disable=line-too-long
"""The Query operation enables users to select/project on blob data by providing simple query
expressions.
@@ -4102,7 +4215,7 @@ def query(
:rtype: Iterator[bytes]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -4321,6 +4434,7 @@ def get_tags(
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> _models.BlobTags:
+ # pylint: disable=line-too-long
"""The Get Tags operation enables users to get the tags associated with a blob.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -4350,7 +4464,7 @@ def get_tags(
:rtype: ~azure.storage.blob.models.BlobTags
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -4426,6 +4540,7 @@ def set_tags( # pylint: disable=inconsistent-return-statements
tags: Optional[_models.BlobTags] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Set Tags operation enables users to set tags on a blob.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -4457,7 +4572,7 @@ def set_tags( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_block_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_block_blob_operations.py
index ab161913ceb0..b0c20b665f1e 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_block_blob_operations.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_block_blob_operations.py
@@ -68,6 +68,8 @@ def build_upload_request(
immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None,
legal_hold: Optional[bool] = None,
transactional_content_crc64: Optional[bytes] = None,
+ structured_body_type: Optional[str] = None,
+ structured_content_length: Optional[int] = None,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
@@ -75,7 +77,7 @@ def build_upload_request(
blob_type: Literal["BlockBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "BlockBlob"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -160,6 +162,12 @@ def build_upload_request(
_headers["x-ms-content-crc64"] = _SERIALIZER.header(
"transactional_content_crc64", transactional_content_crc64, "bytearray"
)
+ if structured_body_type is not None:
+ _headers["x-ms-structured-body"] = _SERIALIZER.header("structured_body_type", structured_body_type, "str")
+ if structured_content_length is not None:
+ _headers["x-ms-structured-content-length"] = _SERIALIZER.header(
+ "structured_content_length", structured_content_length, "int"
+ )
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
@@ -209,7 +217,7 @@ def build_put_blob_from_url_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
blob_type: Literal["BlockBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "BlockBlob"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -327,6 +335,8 @@ def build_stage_block_request(
encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None,
encryption_scope: Optional[str] = None,
request_id_parameter: Optional[str] = None,
+ structured_body_type: Optional[str] = None,
+ structured_content_length: Optional[int] = None,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
@@ -334,7 +344,7 @@ def build_stage_block_request(
comp: Literal["block"] = kwargs.pop("comp", _params.pop("comp", "block"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -376,6 +386,12 @@ def build_stage_block_request(
_headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
if request_id_parameter is not None:
_headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str")
+ if structured_body_type is not None:
+ _headers["x-ms-structured-body"] = _SERIALIZER.header("structured_body_type", structured_body_type, "str")
+ if structured_content_length is not None:
+ _headers["x-ms-structured-content-length"] = _SERIALIZER.header(
+ "structured_content_length", structured_content_length, "int"
+ )
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
@@ -410,7 +426,7 @@ def build_stage_block_from_url_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["block"] = kwargs.pop("comp", _params.pop("comp", "block"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -511,7 +527,7 @@ def build_commit_block_list_request(
comp: Literal["blocklist"] = kwargs.pop("comp", _params.pop("comp", "blocklist"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -617,7 +633,7 @@ def build_get_block_list_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["blocklist"] = kwargs.pop("comp", _params.pop("comp", "blocklist"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -683,6 +699,8 @@ def upload( # pylint: disable=inconsistent-return-statements
immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None,
legal_hold: Optional[bool] = None,
transactional_content_crc64: Optional[bytes] = None,
+ structured_body_type: Optional[str] = None,
+ structured_content_length: Optional[int] = None,
blob_http_headers: Optional[_models.BlobHTTPHeaders] = None,
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
cpk_info: Optional[_models.CpkInfo] = None,
@@ -690,6 +708,7 @@ def upload( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Upload Block Blob operation updates the content of an existing block blob. Updating an
existing block blob overwrites any existing metadata on the blob. Partial updates are not
supported with Put Blob; the content of the existing blob is overwritten with the content of
@@ -738,6 +757,13 @@ def upload( # pylint: disable=inconsistent-return-statements
:param transactional_content_crc64: Specify the transactional crc64 for the body, to be
validated by the service. Default value is None.
:type transactional_content_crc64: bytes
+ :param structured_body_type: Required if the request body is a structured message. Specifies
+ the message schema version and properties. Default value is None.
+ :type structured_body_type: str
+ :param structured_content_length: Required if the request body is a structured message.
+ Specifies the length of the blob/file content inside the message body. Will always be smaller
+ than Content-Length. Default value is None.
+ :type structured_content_length: int
:param blob_http_headers: Parameter group. Default value is None.
:type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders
:param lease_access_conditions: Parameter group. Default value is None.
@@ -752,7 +778,7 @@ def upload( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -835,6 +861,8 @@ def upload( # pylint: disable=inconsistent-return-statements
immutability_policy_mode=immutability_policy_mode,
legal_hold=legal_hold,
transactional_content_crc64=transactional_content_crc64,
+ structured_body_type=structured_body_type,
+ structured_content_length=structured_content_length,
blob_type=blob_type,
content_type=content_type,
version=self._config.version,
@@ -876,6 +904,9 @@ def upload( # pylint: disable=inconsistent-return-statements
response_headers["x-ms-encryption-scope"] = self._deserialize(
"str", response.headers.get("x-ms-encryption-scope")
)
+ response_headers["x-ms-structured-body"] = self._deserialize(
+ "str", response.headers.get("x-ms-structured-body")
+ )
if cls:
return cls(pipeline_response, None, response_headers) # type: ignore
@@ -903,6 +934,7 @@ def put_blob_from_url( # pylint: disable=inconsistent-return-statements
source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Put Blob from URL operation creates a new Block Blob where the contents of the blob are
read from a given URL. This API is supported beginning with the 2020-04-08 version. Partial
updates are not supported with Put Blob from URL; the content of an existing blob is
@@ -973,7 +1005,7 @@ def put_blob_from_url( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1124,11 +1156,14 @@ def stage_block( # pylint: disable=inconsistent-return-statements
transactional_content_crc64: Optional[bytes] = None,
timeout: Optional[int] = None,
request_id_parameter: Optional[str] = None,
+ structured_body_type: Optional[str] = None,
+ structured_content_length: Optional[int] = None,
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
cpk_info: Optional[_models.CpkInfo] = None,
cpk_scope_info: Optional[_models.CpkScopeInfo] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Stage Block operation creates a new block to be committed as part of a blob.
:param block_id: A valid Base64 string value that identifies the block. Prior to encoding, the
@@ -1154,6 +1189,13 @@ def stage_block( # pylint: disable=inconsistent-return-statements
limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
value is None.
:type request_id_parameter: str
+ :param structured_body_type: Required if the request body is a structured message. Specifies
+ the message schema version and properties. Default value is None.
+ :type structured_body_type: str
+ :param structured_content_length: Required if the request body is a structured message.
+ Specifies the length of the blob/file content inside the message body. Will always be smaller
+ than Content-Length. Default value is None.
+ :type structured_content_length: int
:param lease_access_conditions: Parameter group. Default value is None.
:type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions
:param cpk_info: Parameter group. Default value is None.
@@ -1164,7 +1206,7 @@ def stage_block( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1207,6 +1249,8 @@ def stage_block( # pylint: disable=inconsistent-return-statements
encryption_algorithm=_encryption_algorithm,
encryption_scope=_encryption_scope,
request_id_parameter=request_id_parameter,
+ structured_body_type=structured_body_type,
+ structured_content_length=structured_content_length,
comp=comp,
content_type=content_type,
version=self._config.version,
@@ -1248,6 +1292,9 @@ def stage_block( # pylint: disable=inconsistent-return-statements
response_headers["x-ms-encryption-scope"] = self._deserialize(
"str", response.headers.get("x-ms-encryption-scope")
)
+ response_headers["x-ms-structured-body"] = self._deserialize(
+ "str", response.headers.get("x-ms-structured-body")
+ )
if cls:
return cls(pipeline_response, None, response_headers) # type: ignore
@@ -1270,6 +1317,7 @@ def stage_block_from_url( # pylint: disable=inconsistent-return-statements
source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Stage Block operation creates a new block to be committed as part of a blob where the
contents are read from a URL.
@@ -1314,7 +1362,7 @@ def stage_block_from_url( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1435,6 +1483,7 @@ def commit_block_list( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Commit Block List operation writes a blob by specifying the list of block IDs that make up
the blob. In order to be written as part of a blob, a block must have been successfully written
to the server in a prior Put Block operation. You can call Put Block List to update a blob by
@@ -1497,7 +1546,7 @@ def commit_block_list( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1638,6 +1687,7 @@ def get_block_list(
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> _models.BlockList:
+ # pylint: disable=line-too-long
"""The Get Block List operation retrieves the list of blocks that have been uploaded as part of a
block blob.
@@ -1668,7 +1718,7 @@ def get_block_list(
:rtype: ~azure.storage.blob.models.BlockList
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_container_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_container_operations.py
index 719059977673..e68bf2cdb49a 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_container_operations.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_container_operations.py
@@ -54,7 +54,7 @@ def build_create_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -103,7 +103,7 @@ def build_get_properties_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -144,7 +144,7 @@ def build_delete_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -190,7 +190,7 @@ def build_set_metadata_request(
restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container"))
comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -235,7 +235,7 @@ def build_get_access_policy_request(
restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container"))
comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -281,7 +281,7 @@ def build_set_access_policy_request(
restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container"))
comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -331,7 +331,7 @@ def build_restore_request(
restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container"))
comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -379,7 +379,7 @@ def build_rename_request(
restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container"))
comp: Literal["rename"] = kwargs.pop("comp", _params.pop("comp", "rename"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -423,7 +423,7 @@ def build_submit_batch_request(
restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container"))
comp: Literal["batch"] = kwargs.pop("comp", _params.pop("comp", "batch"))
multipart_content_type: Optional[str] = kwargs.pop("multipart_content_type", _headers.pop("Content-Type", None))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -468,7 +468,7 @@ def build_filter_blobs_request(
restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container"))
comp: Literal["blobs"] = kwargs.pop("comp", _params.pop("comp", "blobs"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -519,7 +519,7 @@ def build_acquire_lease_request(
comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container"))
action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -570,7 +570,7 @@ def build_release_lease_request(
comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container"))
action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -618,7 +618,7 @@ def build_renew_lease_request(
comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container"))
action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -666,7 +666,7 @@ def build_break_lease_request(
comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container"))
action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -716,7 +716,7 @@ def build_change_lease_request(
comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container"))
action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -765,7 +765,7 @@ def build_list_blob_flat_segment_request(
restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container"))
comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -816,7 +816,7 @@ def build_list_blob_hierarchy_segment_request( # pylint: disable=name-too-long
restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container"))
comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -859,7 +859,7 @@ def build_get_account_info_request(
restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account"))
comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -914,6 +914,7 @@ def create( # pylint: disable=inconsistent-return-statements
container_cpk_scope_info: Optional[_models.ContainerCpkScopeInfo] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""creates a new container under the specified account. If the container with the same name
already exists, the operation fails.
@@ -943,7 +944,7 @@ def create( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1011,6 +1012,7 @@ def get_properties( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""returns all user-defined metadata and system properties for the specified container. The data
returned does not include the container's list of blobs.
@@ -1029,7 +1031,7 @@ def get_properties( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1113,6 +1115,7 @@ def delete( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""operation marks the specified container for deletion. The container and any blobs contained
within it are later deleted during garbage collection.
@@ -1133,7 +1136,7 @@ def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1203,6 +1206,7 @@ def set_metadata( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""operation sets one or more user-defined name-value pairs for the specified container.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -1230,7 +1234,7 @@ def set_metadata( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1300,6 +1304,7 @@ def get_access_policy(
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> List[_models.SignedIdentifier]:
+ # pylint: disable=line-too-long
"""gets the permissions for the specified container. The permissions indicate whether container
data may be accessed publicly.
@@ -1318,7 +1323,7 @@ def get_access_policy(
:rtype: list[~azure.storage.blob.models.SignedIdentifier]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1393,6 +1398,7 @@ def set_access_policy( # pylint: disable=inconsistent-return-statements
container_acl: Optional[List[_models.SignedIdentifier]] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""sets the permissions for the specified container. The permissions indicate whether blobs in a
container may be accessed publicly.
@@ -1418,7 +1424,7 @@ def set_access_policy( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1502,6 +1508,7 @@ def restore( # pylint: disable=inconsistent-return-statements
deleted_container_version: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Restores a previously-deleted container.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -1523,7 +1530,7 @@ def restore( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1584,6 +1591,7 @@ def rename( # pylint: disable=inconsistent-return-statements
source_lease_id: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Renames an existing container.
:param source_container_name: Required. Specifies the name of the container to rename.
@@ -1605,7 +1613,7 @@ def rename( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1666,6 +1674,7 @@ def submit_batch(
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> Iterator[bytes]:
+ # pylint: disable=line-too-long
"""The Batch operation allows multiple API calls to be embedded into a single HTTP request.
:param content_length: The length of the request. Required.
@@ -1685,7 +1694,7 @@ def submit_batch(
:rtype: Iterator[bytes]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1760,6 +1769,7 @@ def filter_blobs(
include: Optional[List[Union[str, _models.FilterBlobsIncludeItem]]] = None,
**kwargs: Any
) -> _models.FilterBlobSegment:
+ # pylint: disable=line-too-long
"""The Filter Blobs operation enables callers to list blobs in a container whose tags match a
given search expression. Filter blobs searches within the given container.
@@ -1796,7 +1806,7 @@ def filter_blobs(
:rtype: ~azure.storage.blob.models.FilterBlobSegment
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1864,6 +1874,7 @@ def acquire_lease( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""[Update] establishes and manages a lock on a container for delete operations. The lock duration
can be 15 to 60 seconds, or can be infinite.
@@ -1890,7 +1901,7 @@ def acquire_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1964,6 +1975,7 @@ def release_lease( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""[Update] establishes and manages a lock on a container for delete operations. The lock duration
can be 15 to 60 seconds, or can be infinite.
@@ -1984,7 +1996,7 @@ def release_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2056,6 +2068,7 @@ def renew_lease( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""[Update] establishes and manages a lock on a container for delete operations. The lock duration
can be 15 to 60 seconds, or can be infinite.
@@ -2076,7 +2089,7 @@ def renew_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2149,6 +2162,7 @@ def break_lease( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""[Update] establishes and manages a lock on a container for delete operations. The lock duration
can be 15 to 60 seconds, or can be infinite.
@@ -2175,7 +2189,7 @@ def break_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2249,6 +2263,7 @@ def change_lease( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""[Update] establishes and manages a lock on a container for delete operations. The lock duration
can be 15 to 60 seconds, or can be infinite.
@@ -2273,7 +2288,7 @@ def change_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2349,6 +2364,7 @@ def list_blob_flat_segment(
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> _models.ListBlobsFlatSegmentResponse:
+ # pylint: disable=line-too-long
"""[Update] The List Blobs operation returns a list of the blobs under the specified container.
:param prefix: Filters the results to return only containers whose name begins with the
@@ -2384,7 +2400,7 @@ def list_blob_flat_segment(
:rtype: ~azure.storage.blob.models.ListBlobsFlatSegmentResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2455,6 +2471,7 @@ def list_blob_hierarchy_segment(
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> _models.ListBlobsHierarchySegmentResponse:
+ # pylint: disable=line-too-long
"""[Update] The List Blobs operation returns a list of the blobs under the specified container.
:param delimiter: When the request includes this parameter, the operation returns a BlobPrefix
@@ -2495,7 +2512,7 @@ def list_blob_hierarchy_segment(
:rtype: ~azure.storage.blob.models.ListBlobsHierarchySegmentResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2559,6 +2576,7 @@ def list_blob_hierarchy_segment(
def get_account_info( # pylint: disable=inconsistent-return-statements
self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Returns the sku name and account kind.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -2574,7 +2592,7 @@ def get_account_info( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_page_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_page_blob_operations.py
index a280a9f3048d..96fd5d1c503f 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_page_blob_operations.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_page_blob_operations.py
@@ -73,7 +73,7 @@ def build_create_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
blob_type: Literal["PageBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "PageBlob"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -181,6 +181,8 @@ def build_upload_pages_request(
if_none_match: Optional[str] = None,
if_tags: Optional[str] = None,
request_id_parameter: Optional[str] = None,
+ structured_body_type: Optional[str] = None,
+ structured_content_length: Optional[int] = None,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
@@ -189,7 +191,7 @@ def build_upload_pages_request(
comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page"))
page_write: Literal["update"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "update"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -255,6 +257,12 @@ def build_upload_pages_request(
_headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
if request_id_parameter is not None:
_headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str")
+ if structured_body_type is not None:
+ _headers["x-ms-structured-body"] = _SERIALIZER.header("structured_body_type", structured_body_type, "str")
+ if structured_content_length is not None:
+ _headers["x-ms-structured-content-length"] = _SERIALIZER.header(
+ "structured_content_length", structured_content_length, "int"
+ )
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
@@ -289,7 +297,7 @@ def build_clear_pages_request(
comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page"))
page_write: Literal["clear"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "clear"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -388,7 +396,7 @@ def build_upload_pages_from_url_request(
comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page"))
page_write: Literal["update"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "update"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -495,7 +503,7 @@ def build_get_page_ranges_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["pagelist"] = kwargs.pop("comp", _params.pop("comp", "pagelist"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -563,7 +571,7 @@ def build_get_page_ranges_diff_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["pagelist"] = kwargs.pop("comp", _params.pop("comp", "pagelist"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -634,7 +642,7 @@ def build_resize_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -701,7 +709,7 @@ def build_update_sequence_number_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -760,7 +768,7 @@ def build_copy_incremental_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["incrementalcopy"] = kwargs.pop("comp", _params.pop("comp", "incrementalcopy"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -836,6 +844,7 @@ def create( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Create operation creates a new page blob.
:param content_length: The length of the request. Required.
@@ -892,7 +901,7 @@ def create( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1026,6 +1035,8 @@ def upload_pages( # pylint: disable=inconsistent-return-statements
timeout: Optional[int] = None,
range: Optional[str] = None,
request_id_parameter: Optional[str] = None,
+ structured_body_type: Optional[str] = None,
+ structured_content_length: Optional[int] = None,
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
cpk_info: Optional[_models.CpkInfo] = None,
cpk_scope_info: Optional[_models.CpkScopeInfo] = None,
@@ -1033,6 +1044,7 @@ def upload_pages( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Upload Pages operation writes a range of pages to a page blob.
:param content_length: The length of the request. Required.
@@ -1056,6 +1068,13 @@ def upload_pages( # pylint: disable=inconsistent-return-statements
limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
value is None.
:type request_id_parameter: str
+ :param structured_body_type: Required if the request body is a structured message. Specifies
+ the message schema version and properties. Default value is None.
+ :type structured_body_type: str
+ :param structured_content_length: Required if the request body is a structured message.
+ Specifies the length of the blob/file content inside the message body. Will always be smaller
+ than Content-Length. Default value is None.
+ :type structured_content_length: int
:param lease_access_conditions: Parameter group. Default value is None.
:type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions
:param cpk_info: Parameter group. Default value is None.
@@ -1071,7 +1090,7 @@ def upload_pages( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1143,6 +1162,8 @@ def upload_pages( # pylint: disable=inconsistent-return-statements
if_none_match=_if_none_match,
if_tags=_if_tags,
request_id_parameter=request_id_parameter,
+ structured_body_type=structured_body_type,
+ structured_content_length=structured_content_length,
comp=comp,
page_write=page_write,
content_type=content_type,
@@ -1190,6 +1211,9 @@ def upload_pages( # pylint: disable=inconsistent-return-statements
response_headers["x-ms-encryption-scope"] = self._deserialize(
"str", response.headers.get("x-ms-encryption-scope")
)
+ response_headers["x-ms-structured-body"] = self._deserialize(
+ "str", response.headers.get("x-ms-structured-body")
+ )
if cls:
return cls(pipeline_response, None, response_headers) # type: ignore
@@ -1208,6 +1232,7 @@ def clear_pages( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Clear Pages operation clears a set of pages from a page blob.
:param content_length: The length of the request. Required.
@@ -1238,7 +1263,7 @@ def clear_pages( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1366,6 +1391,7 @@ def upload_pages_from_url( # pylint: disable=inconsistent-return-statements
source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Upload Pages operation writes a range of pages to a page blob where the contents are read
from a URL.
@@ -1415,7 +1441,7 @@ def upload_pages_from_url( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1560,6 +1586,7 @@ def get_page_ranges(
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> _models.PageList:
+ # pylint: disable=line-too-long
"""The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot
of a page blob.
@@ -1602,7 +1629,7 @@ def get_page_ranges(
:rtype: ~azure.storage.blob.models.PageList
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1699,6 +1726,7 @@ def get_page_ranges_diff(
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> _models.PageList:
+ # pylint: disable=line-too-long
"""The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that
were changed between target blob and previous snapshot.
@@ -1753,7 +1781,7 @@ def get_page_ranges_diff(
:rtype: ~azure.storage.blob.models.PageList
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1849,6 +1877,7 @@ def resize( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Resize the Blob.
:param blob_content_length: This header specifies the maximum size for the page blob, up to 1
@@ -1875,7 +1904,7 @@ def resize( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1975,6 +2004,7 @@ def update_sequence_number( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Update the sequence number of the blob.
:param sequence_number_action: Required if the x-ms-blob-sequence-number header is set for the
@@ -2003,7 +2033,7 @@ def update_sequence_number( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2088,6 +2118,7 @@ def copy_incremental( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Copy Incremental operation copies a snapshot of the source page blob to a destination page
blob. The snapshot is copied such that only the differential changes between the previously
copied snapshot are transferred to the destination. The copied snapshots are complete copies of
@@ -2114,7 +2145,7 @@ def copy_incremental( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_service_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_service_operations.py
index 2e2a84dc524d..85a930712ca5 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_service_operations.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_service_operations.py
@@ -47,7 +47,7 @@ def build_set_properties_request(
restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service"))
comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -83,7 +83,7 @@ def build_get_properties_request(
restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service"))
comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -117,7 +117,7 @@ def build_get_statistics_request(
restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service"))
comp: Literal["stats"] = kwargs.pop("comp", _params.pop("comp", "stats"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -158,7 +158,7 @@ def build_list_containers_segment_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -200,7 +200,7 @@ def build_get_user_delegation_key_request(
restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service"))
comp: Literal["userdelegationkey"] = kwargs.pop("comp", _params.pop("comp", "userdelegationkey"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -236,7 +236,7 @@ def build_get_account_info_request(
restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account"))
comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -276,7 +276,7 @@ def build_submit_batch_request(
comp: Literal["batch"] = kwargs.pop("comp", _params.pop("comp", "batch"))
multipart_content_type: Optional[str] = kwargs.pop("multipart_content_type", _headers.pop("Content-Type", None))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -319,7 +319,7 @@ def build_filter_blobs_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["blobs"] = kwargs.pop("comp", _params.pop("comp", "blobs"))
- version: Literal["2024-08-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-08-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -379,6 +379,7 @@ def set_properties( # pylint: disable=inconsistent-return-statements
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Sets properties for a storage account's Blob service endpoint, including properties for Storage
Analytics and CORS (Cross-Origin Resource Sharing) rules.
@@ -397,7 +398,7 @@ def set_properties( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -455,6 +456,7 @@ def set_properties( # pylint: disable=inconsistent-return-statements
def get_properties(
self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any
) -> _models.StorageServiceProperties:
+ # pylint: disable=line-too-long
"""gets the properties of a storage account's Blob service, including properties for Storage
Analytics and CORS (Cross-Origin Resource Sharing) rules.
@@ -471,7 +473,7 @@ def get_properties(
:rtype: ~azure.storage.blob.models.StorageServiceProperties
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -528,6 +530,7 @@ def get_properties(
def get_statistics(
self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any
) -> _models.StorageServiceStats:
+ # pylint: disable=line-too-long
"""Retrieves statistics related to replication for the Blob service. It is only available on the
secondary location endpoint when read-access geo-redundant replication is enabled for the
storage account.
@@ -545,7 +548,7 @@ def get_statistics(
:rtype: ~azure.storage.blob.models.StorageServiceStats
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -610,6 +613,7 @@ def list_containers_segment(
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> _models.ListContainersSegmentResponse:
+ # pylint: disable=line-too-long
"""The List Containers Segment operation returns a list of the containers under the specified
account.
@@ -646,7 +650,7 @@ def list_containers_segment(
:rtype: ~azure.storage.blob.models.ListContainersSegmentResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -709,6 +713,7 @@ def get_user_delegation_key(
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> _models.UserDelegationKey:
+ # pylint: disable=line-too-long
"""Retrieves a user delegation key for the Blob service. This is only a valid operation when using
bearer token authentication.
@@ -727,7 +732,7 @@ def get_user_delegation_key(
:rtype: ~azure.storage.blob.models.UserDelegationKey
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -790,6 +795,7 @@ def get_user_delegation_key(
def get_account_info( # pylint: disable=inconsistent-return-statements
self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Returns the sku name and account kind.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -805,7 +811,7 @@ def get_account_info( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -867,6 +873,7 @@ def submit_batch(
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> Iterator[bytes]:
+ # pylint: disable=line-too-long
"""The Batch operation allows multiple API calls to be embedded into a single HTTP request.
:param content_length: The length of the request. Required.
@@ -886,7 +893,7 @@ def submit_batch(
:rtype: Iterator[bytes]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -959,6 +966,7 @@ def filter_blobs(
include: Optional[List[Union[str, _models.FilterBlobsIncludeItem]]] = None,
**kwargs: Any
) -> _models.FilterBlobSegment:
+ # pylint: disable=line-too-long
"""The Filter Blobs operation enables callers to list blobs across all containers whose tags match
a given search expression. Filter blobs searches across all containers within a storage
account but can be scoped within the expression to a single container.
@@ -996,7 +1004,7 @@ def filter_blobs(
:rtype: ~azure.storage.blob.models.FilterBlobSegment
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py
index 67dc2f9a2aee..e9d5eb190959 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py
@@ -57,6 +57,7 @@
'2024-05-04',
'2024-08-04',
'2024-11-04',
+ '2025-01-05',
]
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/base_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/base_client_async.py
index 8e81643f5cce..6186b29db107 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/base_client_async.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/base_client_async.py
@@ -127,16 +127,16 @@ def _create_pipeline(
hosts = self._hosts
policies = [
QueueMessagePolicy(),
- config.headers_policy,
config.proxy_policy,
config.user_agent_policy,
StorageContentValidation(),
- StorageRequestHook(**kwargs),
- self._credential_policy,
ContentDecodePolicy(response_encoding="utf-8"),
AsyncRedirectPolicy(**kwargs),
StorageHosts(hosts=hosts, **kwargs),
config.retry_policy,
+ config.headers_policy,
+ StorageRequestHook(**kwargs),
+ self._credential_policy,
config.logging_policy,
AsyncStorageResponseHook(**kwargs),
DistributedTracingPolicy(**kwargs),
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/models.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/models.py
index bc418a1fbf70..d78cd9113133 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/models.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/models.py
@@ -70,6 +70,7 @@ class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta):
# Blob values
APPEND_POSITION_CONDITION_NOT_MET = "AppendPositionConditionNotMet"
+ BLOB_ACCESS_TIER_NOT_SUPPORTED_FOR_ACCOUNT_TYPE = "BlobAccessTierNotSupportedForAccountType"
BLOB_ALREADY_EXISTS = "BlobAlreadyExists"
BLOB_NOT_FOUND = "BlobNotFound"
BLOB_OVERWRITTEN = "BlobOverwritten"
@@ -154,6 +155,8 @@ class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta):
DELETE_PENDING = "DeletePending"
DIRECTORY_NOT_EMPTY = "DirectoryNotEmpty"
FILE_LOCK_CONFLICT = "FileLockConflict"
+ FILE_SHARE_PROVISIONED_BANDWIDTH_DOWNGRADE_NOT_ALLOWED = "FileShareProvisionedBandwidthDowngradeNotAllowed"
+ FILE_SHARE_PROVISIONED_IOPS_DOWNGRADE_NOT_ALLOWED = "FileShareProvisionedIopsDowngradeNotAllowed"
INVALID_FILE_OR_DIRECTORY_PATH_NAME = "InvalidFileOrDirectoryPathName"
PARENT_NOT_FOUND = "ParentNotFound"
READ_ONLY_ATTRIBUTE = "ReadOnlyAttribute"
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py
index c28bab9b509e..5d522e318983 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py
@@ -1125,6 +1125,9 @@ async def set_immutability_policy(
.. versionadded:: 12.10.0
This was introduced in API version '2020-10-02'.
+ :keyword str version_id:
+ The version id parameter is an opaque DateTime
+ value that, when present, specifies the version of the blob to check if it exists.
:keyword int timeout:
Sets the server-side timeout for the operation in seconds. For more details see
https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations.
@@ -1135,10 +1138,11 @@ async def set_immutability_policy(
:rtype: Dict[str, str]
"""
+ version_id = get_version_id(self.version_id, kwargs)
kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time
kwargs['immutability_policy_mode'] = immutability_policy.policy_mode
- return cast(Dict[str, str],
- await self._client.blob.set_immutability_policy(cls=return_response_headers, **kwargs))
+ return cast(Dict[str, str], await self._client.blob.set_immutability_policy(
+ cls=return_response_headers,version_id=version_id, **kwargs))
@distributed_trace_async
async def delete_immutability_policy(self, **kwargs: Any) -> None:
@@ -1147,6 +1151,9 @@ async def delete_immutability_policy(self, **kwargs: Any) -> None:
.. versionadded:: 12.10.0
This operation was introduced in API version '2020-10-02'.
+ :keyword str version_id:
+ The version id parameter is an opaque DateTime
+ value that, when present, specifies the version of the blob to check if it exists.
:keyword int timeout:
Sets the server-side timeout for the operation in seconds. For more details see
https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations.
@@ -1157,7 +1164,8 @@ async def delete_immutability_policy(self, **kwargs: Any) -> None:
:rtype: Dict[str, str]
"""
- await self._client.blob.delete_immutability_policy(**kwargs)
+ version_id = get_version_id(self.version_id, kwargs)
+ await self._client.blob.delete_immutability_policy(version_id=version_id, **kwargs)
@distributed_trace_async
async def set_legal_hold(self, legal_hold: bool, **kwargs: Any) -> Dict[str, Union[str, datetime, bool]]:
@@ -1168,6 +1176,9 @@ async def set_legal_hold(self, legal_hold: bool, **kwargs: Any) -> Dict[str, Uni
:param bool legal_hold:
Specified if a legal hold should be set on the blob.
+ :keyword str version_id:
+ The version id parameter is an opaque DateTime
+ value that, when present, specifies the version of the blob to check if it exists.
:keyword int timeout:
Sets the server-side timeout for the operation in seconds. For more details see
https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations.
@@ -1178,8 +1189,9 @@ async def set_legal_hold(self, legal_hold: bool, **kwargs: Any) -> Dict[str, Uni
:rtype: Dict[str, Union[str, datetime, bool]]
"""
- return cast(Dict[str, Union[str, datetime, bool]],
- await self._client.blob.set_legal_hold(legal_hold, cls=return_response_headers, **kwargs))
+ version_id = get_version_id(self.version_id, kwargs)
+ return cast(Dict[str, Union[str, datetime, bool]], await self._client.blob.set_legal_hold(
+ legal_hold, version_id=version_id, cls=return_response_headers, **kwargs))
@distributed_trace_async
async def create_page_blob(
diff --git a/sdk/storage/azure-storage-blob/swagger/README.md b/sdk/storage/azure-storage-blob/swagger/README.md
index 3d7e16f79a45..739f59cae350 100644
--- a/sdk/storage/azure-storage-blob/swagger/README.md
+++ b/sdk/storage/azure-storage-blob/swagger/README.md
@@ -16,7 +16,7 @@ autorest --v3 --python
### Settings
``` yaml
-input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/storage/data-plane/Microsoft.BlobStorage/stable/2024-08-04/blob.json
+input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/storage/data-plane/Microsoft.BlobStorage/stable/2025-01-05/blob.json
output-folder: ../azure/storage/blob/_generated
namespace: azure.storage.blob
no-namespace-folders: true
diff --git a/sdk/storage/azure-storage-blob/tests/test_common_blob.py b/sdk/storage/azure-storage-blob/tests/test_common_blob.py
index d5577a6f9c0a..b7e8936b3656 100644
--- a/sdk/storage/azure-storage-blob/tests/test_common_blob.py
+++ b/sdk/storage/azure-storage-blob/tests/test_common_blob.py
@@ -3231,6 +3231,109 @@ def test_list_blobs_with_immutability_policy(self, **kwargs):
return variables
+ @BlobPreparer()
+ @recorded_by_proxy
+ def test_snapshot_immutability_policy_and_legal_hold(self, **kwargs):
+ versioned_storage_account_name = kwargs.pop("versioned_storage_account_name")
+ versioned_storage_account_key = kwargs.pop("versioned_storage_account_key")
+ storage_resource_group_name = kwargs.pop("storage_resource_group_name")
+ variables = kwargs.pop("variables", {})
+
+ self._setup(versioned_storage_account_name, versioned_storage_account_key)
+ container_name = self.get_resource_name('container')
+ if self.is_live:
+ token_credential = self.get_credential(BlobServiceClient)
+ subscription_id = self.get_settings_value("SUBSCRIPTION_ID")
+ mgmt_client = StorageManagementClient(token_credential, subscription_id, '2021-04-01')
+ property = mgmt_client.models().BlobContainer(
+ immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True))
+ mgmt_client.blob_containers.create(storage_resource_group_name, versioned_storage_account_name, container_name, blob_container=property)
+
+ blob_name = self._get_blob_reference()
+ blob = self.bsc.get_blob_client(container_name, blob_name)
+ blob.upload_blob(self.byte_data, length=len(self.byte_data), overwrite=True)
+ snapshot_blob = self.bsc.get_blob_client(container_name, blob_name, snapshot=blob.create_snapshot())
+
+ try:
+ expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(seconds=5))
+ immutability_policy = ImmutabilityPolicy(
+ expiry_time=expiry_time,
+ policy_mode=BlobImmutabilityPolicyMode.Unlocked
+ )
+
+ snapshot_blob.set_immutability_policy(immutability_policy=immutability_policy)
+ props = snapshot_blob.get_blob_properties()
+ assert props['immutability_policy']['expiry_time'] is not None
+ assert props['immutability_policy']['policy_mode'] == "unlocked"
+
+ snapshot_blob.delete_immutability_policy()
+ props = snapshot_blob.get_blob_properties()
+ assert props['immutability_policy']['expiry_time'] is None
+ assert props['immutability_policy']['policy_mode'] is None
+
+ snapshot_blob.set_legal_hold(True)
+ props = snapshot_blob.get_blob_properties()
+ assert props['has_legal_hold']
+ finally:
+ snapshot_blob.set_legal_hold(False)
+ blob.delete_blob(delete_snapshots="include")
+
+ return variables
+
+ @BlobPreparer()
+ @recorded_by_proxy
+ def test_versioning_immutability_policy_and_legal_hold(self, **kwargs):
+ versioned_storage_account_name = kwargs.pop("versioned_storage_account_name")
+ versioned_storage_account_key = kwargs.pop("versioned_storage_account_key")
+ storage_resource_group_name = kwargs.pop("storage_resource_group_name")
+ variables = kwargs.pop("variables", {})
+
+ self._setup(versioned_storage_account_name, versioned_storage_account_key)
+ container_name = self.get_resource_name('container')
+ if self.is_live:
+ token_credential = self.get_credential(BlobServiceClient)
+ subscription_id = self.get_settings_value("SUBSCRIPTION_ID")
+ mgmt_client = StorageManagementClient(token_credential, subscription_id, '2021-04-01')
+ property = mgmt_client.models().BlobContainer(
+ immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True))
+ mgmt_client.blob_containers.create(storage_resource_group_name, versioned_storage_account_name,
+ container_name, blob_container=property)
+
+ blob_name = self._get_blob_reference()
+ root_blob = self.bsc.get_blob_client(container_name, blob_name)
+ old_version_dict = root_blob.upload_blob(b"abc", overwrite=True)
+ root_blob.upload_blob(b"abcdef", overwrite=True)
+
+ try:
+ expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(seconds=5))
+ immutability_policy = ImmutabilityPolicy(
+ expiry_time=expiry_time,
+ policy_mode=BlobImmutabilityPolicyMode.Unlocked
+ )
+ old_version_blob = self.bsc.get_blob_client(
+ container_name, blob_name,
+ version_id=old_version_dict['version_id']
+ )
+
+ old_version_blob.set_immutability_policy(immutability_policy=immutability_policy)
+ props = old_version_blob.get_blob_properties()
+ assert props['immutability_policy']['expiry_time'] is not None
+ assert props['immutability_policy']['policy_mode'] == "unlocked"
+
+ old_version_blob.delete_immutability_policy()
+ props = old_version_blob.get_blob_properties()
+ assert props['immutability_policy']['expiry_time'] is None
+ assert props['immutability_policy']['policy_mode'] is None
+
+ old_version_blob.set_legal_hold(True)
+ props = old_version_blob.get_blob_properties()
+ assert props['has_legal_hold']
+ finally:
+ old_version_blob.set_legal_hold(False)
+ root_blob.delete_blob(delete_snapshots="include")
+
+ return variables
+
@BlobPreparer()
@recorded_by_proxy
def test_validate_empty_blob(self, **kwargs):
diff --git a/sdk/storage/azure-storage-blob/tests/test_common_blob_async.py b/sdk/storage/azure-storage-blob/tests/test_common_blob_async.py
index 89c4f9635e51..864719ab7fcd 100644
--- a/sdk/storage/azure-storage-blob/tests/test_common_blob_async.py
+++ b/sdk/storage/azure-storage-blob/tests/test_common_blob_async.py
@@ -3154,6 +3154,111 @@ async def test_list_blobs_with_immutability_policy(self, **kwargs):
return variables
+ @BlobPreparer()
+ @recorded_by_proxy_async
+ async def test_snapshot_immutability_policy_and_legal_hold(self, **kwargs):
+ versioned_storage_account_name = kwargs.pop("versioned_storage_account_name")
+ versioned_storage_account_key = kwargs.pop("versioned_storage_account_key")
+ storage_resource_group_name = kwargs.pop("storage_resource_group_name")
+ variables = kwargs.pop("variables", {})
+
+ await self._setup(versioned_storage_account_name, versioned_storage_account_key)
+ container_name = self.get_resource_name('container')
+ if self.is_live:
+ token_credential = self.get_credential(BlobServiceClient, is_async=True)
+ subscription_id = self.get_settings_value("SUBSCRIPTION_ID")
+ mgmt_client = StorageManagementClient(token_credential, subscription_id, '2021-04-01')
+ property = mgmt_client.models().BlobContainer(
+ immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True))
+ await mgmt_client.blob_containers.create(storage_resource_group_name, versioned_storage_account_name,
+ container_name, blob_container=property)
+
+ blob_name = self._get_blob_reference()
+ blob = self.bsc.get_blob_client(container_name, blob_name)
+ await blob.upload_blob(self.byte_data, length=len(self.byte_data), overwrite=True)
+ snapshot = await blob.create_snapshot()
+ snapshot_blob = self.bsc.get_blob_client(container_name, blob_name, snapshot=snapshot)
+
+ try:
+ expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(seconds=5))
+ immutability_policy = ImmutabilityPolicy(
+ expiry_time=expiry_time,
+ policy_mode=BlobImmutabilityPolicyMode.Unlocked
+ )
+
+ await snapshot_blob.set_immutability_policy(immutability_policy=immutability_policy)
+ props = await snapshot_blob.get_blob_properties()
+ assert props['immutability_policy']['expiry_time'] is not None
+ assert props['immutability_policy']['policy_mode'] == "unlocked"
+
+ await snapshot_blob.delete_immutability_policy()
+ props = await snapshot_blob.get_blob_properties()
+ assert props['immutability_policy']['expiry_time'] is None
+ assert props['immutability_policy']['policy_mode'] is None
+
+ await snapshot_blob.set_legal_hold(True)
+ props = await snapshot_blob.get_blob_properties()
+ assert props['has_legal_hold']
+ finally:
+ await snapshot_blob.set_legal_hold(False)
+ await blob.delete_blob(delete_snapshots="include")
+
+ return variables
+
+ @BlobPreparer()
+ @recorded_by_proxy_async
+ async def test_versioning_immutability_policy_and_legal_hold(self, **kwargs):
+ versioned_storage_account_name = kwargs.pop("versioned_storage_account_name")
+ versioned_storage_account_key = kwargs.pop("versioned_storage_account_key")
+ storage_resource_group_name = kwargs.pop("storage_resource_group_name")
+ variables = kwargs.pop("variables", {})
+
+ await self._setup(versioned_storage_account_name, versioned_storage_account_key)
+ container_name = self.get_resource_name('container')
+ if self.is_live:
+ token_credential = self.get_credential(BlobServiceClient, is_async=True)
+ subscription_id = self.get_settings_value("SUBSCRIPTION_ID")
+ mgmt_client = StorageManagementClient(token_credential, subscription_id, '2021-04-01')
+ property = mgmt_client.models().BlobContainer(
+ immutable_storage_with_versioning=mgmt_client.models().ImmutableStorageWithVersioning(enabled=True))
+ await mgmt_client.blob_containers.create(storage_resource_group_name, versioned_storage_account_name,
+ container_name, blob_container=property)
+
+ blob_name = self.get_resource_name('blob')
+ root_blob = self.bsc.get_blob_client(container_name, blob_name)
+ old_version_dict = await root_blob.upload_blob(b"abc", overwrite=True)
+ await root_blob.upload_blob(b"abcdef", overwrite=True)
+
+ try:
+ expiry_time = self.get_datetime_variable(variables, 'expiry_time', datetime.utcnow() + timedelta(seconds=5))
+ immutability_policy = ImmutabilityPolicy(
+ expiry_time=expiry_time,
+ policy_mode=BlobImmutabilityPolicyMode.Unlocked
+ )
+ old_version_blob = self.bsc.get_blob_client(
+ container_name, blob_name,
+ version_id=old_version_dict['version_id']
+ )
+
+ await old_version_blob.set_immutability_policy(immutability_policy=immutability_policy)
+ props = await old_version_blob.get_blob_properties()
+ assert props['immutability_policy']['expiry_time'] is not None
+ assert props['immutability_policy']['policy_mode'] == "unlocked"
+
+ await old_version_blob.delete_immutability_policy()
+ props = await old_version_blob.get_blob_properties()
+ assert props['immutability_policy']['expiry_time'] is None
+ assert props['immutability_policy']['policy_mode'] is None
+
+ await old_version_blob.set_legal_hold(True)
+ props = await old_version_blob.get_blob_properties()
+ assert props['has_legal_hold']
+ finally:
+ await old_version_blob.set_legal_hold(False)
+ await root_blob.delete_blob(delete_snapshots="include")
+
+ return variables
+
@BlobPreparer()
@recorded_by_proxy_async
async def test_validate_empty_blob(self, **kwargs):
diff --git a/sdk/storage/azure-storage-file-datalake/CHANGELOG.md b/sdk/storage/azure-storage-file-datalake/CHANGELOG.md
index f6ffe549ab9c..b1cc802d6136 100644
--- a/sdk/storage/azure-storage-file-datalake/CHANGELOG.md
+++ b/sdk/storage/azure-storage-file-datalake/CHANGELOG.md
@@ -1,6 +1,6 @@
# Release History
-## 12.18.0b1 (2024-10-08)
+## 12.18.0b1 (2024-10-10)
### Features Added
- Added support for service version 2025-01-05.
diff --git a/sdk/storage/azure-storage-file-datalake/assets.json b/sdk/storage/azure-storage-file-datalake/assets.json
index 8e42b4351825..9f9856602d18 100644
--- a/sdk/storage/azure-storage-file-datalake/assets.json
+++ b/sdk/storage/azure-storage-file-datalake/assets.json
@@ -2,5 +2,5 @@
"AssetsRepo": "Azure/azure-sdk-assets",
"AssetsRepoPrefixPath": "python",
"TagPrefix": "python/storage/azure-storage-file-datalake",
- "Tag": "python/storage/azure-storage-file-datalake_4a2e7c5076"
+ "Tag": "python/storage/azure-storage-file-datalake_e5d1c29802"
}
diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py
index 0bdc45e88576..0aa26b8a5167 100644
--- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py
+++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py
@@ -5,6 +5,7 @@
# --------------------------------------------------------------------------
# pylint: disable=docstring-keyword-should-match-keyword-only
+import functools
from typing import (
Any, Dict, Optional, Union,
TYPE_CHECKING
@@ -13,17 +14,21 @@
from typing_extensions import Self
+from azure.core.paging import ItemPaged
from azure.core.pipeline import Pipeline
from azure.core.tracing.decorator import distributed_trace
-from ._deserialize import deserialize_dir_properties
-from ._shared.base_client import TransportWrapper, parse_connection_str
+
from ._data_lake_file_client import DataLakeFileClient
+from ._deserialize import deserialize_dir_properties
+from ._list_paths_helper import PathPropertiesPaged
from ._models import DirectoryProperties, FileProperties
from ._path_client import PathClient
+from ._shared.base_client import TransportWrapper, parse_connection_str
if TYPE_CHECKING:
from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential, TokenCredential
from datetime import datetime
+ from ._models import PathProperties
class DataLakeDirectoryClient(PathClient):
@@ -651,6 +656,52 @@ def create_file(self, file, # type: Union[FileProperties, str]
file_client.create_file(**kwargs)
return file_client
+ @distributed_trace
+ def get_paths(
+ self, *,
+ recursive: bool = True,
+ max_results: Optional[int] = None,
+ upn: Optional[bool] = None,
+ timeout: Optional[int] = None,
+ **kwargs: Any
+ ) -> ItemPaged["PathProperties"]:
+ """Returns a generator to list the paths under specified file system and directory.
+ The generator will lazily follow the continuation tokens returned by the service.
+
+ :keyword bool recursive: Set True for recursive, False for iterative. The default value is True.
+ :keyword Optional[int] max_results: An optional value that specifies the maximum
+ number of items to return per page. If omitted or greater than 5,000, the
+ response will include up to 5,000 items per page.
+ :keyword Optional[bool] upn:
+ If True, the user identity values returned in the x-ms-owner, x-ms-group,
+ and x-ms-acl response headers will be transformed from Azure Active Directory Object IDs to User
+ Principal Names in the owner, group, and acl fields of
+ :class:`~azure.storage.filedatalake.PathProperties`. If False, the values will be returned
+ as Azure Active Directory Object IDs. The default value is None. Note that group and application
+ Object IDs are not translate because they do not have unique friendly names.
+ :keyword Optional[int] timeout:
+ Sets the server-side timeout for the operation in seconds. For more details see
+ https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations.
+ This value is not tracked or validated on the client. To configure client-side network timesouts
+ see `here `_. The default value is None.
+ :returns: An iterable (auto-paging) response of PathProperties.
+ :rtype: ~azure.core.paging.ItemPaged[~azure.storage.filedatalake.PathProperties]
+ """
+ timeout = kwargs.pop('timeout', None)
+ hostname = self._hosts[self._location_mode]
+ url = f"{self.scheme}://{hostname}/{quote(self.file_system_name)}"
+ client = self._build_generated_client(url)
+ command = functools.partial(
+ client.file_system.list_paths,
+ path=self.path_name,
+ timeout=timeout,
+ **kwargs
+ )
+ return ItemPaged(
+ command, recursive, path=self.path_name, max_results=max_results,
+ upn=upn, page_iterator_class=PathPropertiesPaged, **kwargs)
+
def get_file_client(self, file # type: Union[FileProperties, str]
):
# type: (...) -> DataLakeFileClient
diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py
index 0c2e30214b13..7017527f32bc 100644
--- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py
+++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py
@@ -562,10 +562,10 @@ def get_paths(
The generator will lazily follow the continuation tokens returned by
the service.
- :param str path:
+ :param Optional[str] path:
Filters the results to return only paths under the specified path.
:param Optional[bool] recursive: Optional. Set True for recursive, False for iterative.
- :param int max_results: An optional value that specifies the maximum
+ :param Optional[int] max_results: An optional value that specifies the maximum
number of items to return per page. If omitted or greater than 5,000, the
response will include up to 5,000 items per page.
:keyword bool upn:
diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_azure_data_lake_storage_restapi.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_azure_data_lake_storage_restapi.py
index e1dec2bc80cd..ae1c9c2d97cf 100644
--- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_azure_data_lake_storage_restapi.py
+++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_azure_data_lake_storage_restapi.py
@@ -42,7 +42,7 @@ class AzureDataLakeStorageRESTAPI: # pylint: disable=client-accepts-api-version
is "filesystem". Note that overriding this default value may result in unsupported behavior.
:paramtype resource: str
:keyword version: Specifies the version of the operation to use for this request. Default value
- is "2023-05-03". Note that overriding this default value may result in unsupported behavior.
+ is "2025-01-05". Note that overriding this default value may result in unsupported behavior.
:paramtype version: str
"""
diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_configuration.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_configuration.py
index 5affd898facb..bc61b784da35 100644
--- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_configuration.py
+++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_configuration.py
@@ -30,13 +30,13 @@ class AzureDataLakeStorageRESTAPIConfiguration: # pylint: disable=too-many-inst
is "filesystem". Note that overriding this default value may result in unsupported behavior.
:paramtype resource: str
:keyword version: Specifies the version of the operation to use for this request. Default value
- is "2023-05-03". Note that overriding this default value may result in unsupported behavior.
+ is "2025-01-05". Note that overriding this default value may result in unsupported behavior.
:paramtype version: str
"""
def __init__(self, url: str, x_ms_lease_duration: Optional[int] = None, **kwargs: Any) -> None:
resource: Literal["filesystem"] = kwargs.pop("resource", "filesystem")
- version: Literal["2023-05-03"] = kwargs.pop("version", "2023-05-03")
+ version: Literal["2025-01-05"] = kwargs.pop("version", "2025-01-05")
if url is None:
raise ValueError("Parameter 'url' must not be None.")
diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_serialization.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_serialization.py
index 8139854b97bb..01a226bd7f14 100644
--- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_serialization.py
+++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_serialization.py
@@ -1,3 +1,4 @@
+# pylint: disable=too-many-lines
# --------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -24,7 +25,6 @@
#
# --------------------------------------------------------------------------
-# pylint: skip-file
# pyright: reportUnnecessaryTypeIgnoreComment=false
from base64 import b64decode, b64encode
@@ -52,7 +52,6 @@
MutableMapping,
Type,
List,
- Mapping,
)
try:
@@ -91,6 +90,8 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
:param data: Input, could be bytes or stream (will be decoded with UTF8) or text
:type data: str or bytes or IO
:param str content_type: The content type.
+ :return: The deserialized data.
+ :rtype: object
"""
if hasattr(data, "read"):
# Assume a stream
@@ -112,7 +113,7 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
try:
return json.loads(data_as_str)
except ValueError as err:
- raise DeserializationError("JSON is invalid: {}".format(err), err)
+ raise DeserializationError("JSON is invalid: {}".format(err), err) from err
elif "xml" in (content_type or []):
try:
@@ -155,6 +156,11 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]],
Use bytes and headers to NOT use any requests/aiohttp or whatever
specific implementation.
Headers will tested for "content-type"
+
+ :param bytes body_bytes: The body of the response.
+ :param dict headers: The headers of the response.
+ :returns: The deserialized data.
+ :rtype: object
"""
# Try to use content-type from headers if available
content_type = None
@@ -184,15 +190,30 @@ class UTC(datetime.tzinfo):
"""Time Zone info for handling UTC"""
def utcoffset(self, dt):
- """UTF offset for UTC is 0."""
+ """UTF offset for UTC is 0.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The offset
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(0)
def tzname(self, dt):
- """Timestamp representation."""
+ """Timestamp representation.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The timestamp representation
+ :rtype: str
+ """
return "Z"
def dst(self, dt):
- """No daylight saving for UTC."""
+ """No daylight saving for UTC.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The daylight saving time
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(hours=1)
@@ -235,24 +256,26 @@ def __getinitargs__(self):
_FLATTEN = re.compile(r"(? None:
self.additional_properties: Optional[Dict[str, Any]] = {}
- for k in kwargs:
+ for k in kwargs: # pylint: disable=consider-using-dict-items
if k not in self._attribute_map:
_LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__)
elif k in self._validation and self._validation[k].get("readonly", False):
@@ -300,13 +330,23 @@ def __init__(self, **kwargs: Any) -> None:
setattr(self, k, kwargs[k])
def __eq__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes."""
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are equal
+ :rtype: bool
+ """
if isinstance(other, self.__class__):
return self.__dict__ == other.__dict__
return False
def __ne__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes."""
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are not equal
+ :rtype: bool
+ """
return not self.__eq__(other)
def __str__(self) -> str:
@@ -326,7 +366,11 @@ def is_xml_model(cls) -> bool:
@classmethod
def _create_xml_node(cls):
- """Create XML node."""
+ """Create XML node.
+
+ :returns: The XML node
+ :rtype: xml.etree.ElementTree.Element
+ """
try:
xml_map = cls._xml_map # type: ignore
except AttributeError:
@@ -346,7 +390,9 @@ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON:
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) # type: ignore
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, keep_readonly=keep_readonly, **kwargs
+ )
def as_dict(
self,
@@ -380,12 +426,15 @@ def my_key_transformer(key, attr_desc, value):
If you want XML serialization, you can pass the kwargs is_xml=True.
+ :param bool keep_readonly: If you want to serialize the readonly attributes
:param function key_transformer: A key transformer function.
:returns: A dict JSON compatible object
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs) # type: ignore
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs
+ )
@classmethod
def _infer_class_models(cls):
@@ -395,7 +444,7 @@ def _infer_class_models(cls):
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
if cls.__name__ not in client_models:
raise ValueError("Not Autorest generated code")
- except Exception:
+ except Exception: # pylint: disable=broad-exception-caught
# Assume it's not Autorest generated (tests?). Add ourselves as dependencies.
client_models = {cls.__name__: cls}
return client_models
@@ -408,6 +457,7 @@ def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = N
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
@@ -426,9 +476,11 @@ def from_dict(
and last_rest_key_case_insensitive_extractor)
:param dict data: A dict using RestAPI structure
+ :param function key_extractors: A key extractor function.
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
deserializer.key_extractors = ( # type: ignore
@@ -448,7 +500,7 @@ def _flatten_subtype(cls, key, objects):
return {}
result = dict(cls._subtype_map[key])
for valuetype in cls._subtype_map[key].values():
- result.update(objects[valuetype]._flatten_subtype(key, objects))
+ result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access
return result
@classmethod
@@ -456,6 +508,11 @@ def _classify(cls, response, objects):
"""Check the class _subtype_map for any child classes.
We want to ignore any inherited _subtype_maps.
Remove the polymorphic key from the initial data.
+
+ :param dict response: The initial data
+ :param dict objects: The class objects
+ :returns: The class to be used
+ :rtype: class
"""
for subtype_key in cls.__dict__.get("_subtype_map", {}).keys():
subtype_value = None
@@ -501,11 +558,13 @@ def _decode_attribute_map_key(key):
inside the received data.
:param str key: A key string from the generated code
+ :returns: The decoded key
+ :rtype: str
"""
return key.replace("\\.", ".")
-class Serializer(object):
+class Serializer(object): # pylint: disable=too-many-public-methods
"""Request object model serializer."""
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
@@ -560,13 +619,16 @@ def __init__(self, classes: Optional[Mapping[str, type]] = None):
self.key_transformer = full_restapi_key_transformer
self.client_side_validation = True
- def _serialize(self, target_obj, data_type=None, **kwargs):
+ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
+ self, target_obj, data_type=None, **kwargs
+ ):
"""Serialize data into a string according to type.
- :param target_obj: The data to be serialized.
+ :param object target_obj: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str, dict
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
"""
key_transformer = kwargs.get("key_transformer", self.key_transformer)
keep_readonly = kwargs.get("keep_readonly", False)
@@ -592,12 +654,14 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
serialized = {}
if is_xml_model_serialization:
- serialized = target_obj._create_xml_node()
+ serialized = target_obj._create_xml_node() # pylint: disable=protected-access
try:
- attributes = target_obj._attribute_map
+ attributes = target_obj._attribute_map # pylint: disable=protected-access
for attr, attr_desc in attributes.items():
attr_name = attr
- if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False):
+ if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access
+ attr_name, {}
+ ).get("readonly", False):
continue
if attr_name == "additional_properties" and attr_desc["key"] == "":
@@ -633,7 +697,8 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
if isinstance(new_attr, list):
serialized.extend(new_attr) # type: ignore
elif isinstance(new_attr, ET.Element):
- # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces.
+ # If the down XML has no XML/Name,
+ # we MUST replace the tag with the local tag. But keeping the namespaces.
if "name" not in getattr(orig_attr, "_xml_map", {}):
splitted_tag = new_attr.tag.split("}")
if len(splitted_tag) == 2: # Namespace
@@ -664,17 +729,17 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
except (AttributeError, KeyError, TypeError) as err:
msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj))
raise SerializationError(msg) from err
- else:
- return serialized
+ return serialized
def body(self, data, data_type, **kwargs):
"""Serialize data intended for a request body.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: dict
:raises: SerializationError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized request body
"""
# Just in case this is a dict
@@ -703,7 +768,7 @@ def body(self, data, data_type, **kwargs):
attribute_key_case_insensitive_extractor,
last_rest_key_case_insensitive_extractor,
]
- data = deserializer._deserialize(data_type, data)
+ data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access
except DeserializationError as err:
raise SerializationError("Unable to build a model: " + str(err)) from err
@@ -712,9 +777,11 @@ def body(self, data, data_type, **kwargs):
def url(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL path.
- :param data: The data to be serialized.
+ :param str name: The name of the URL path parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
+ :returns: The serialized URL path
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
"""
@@ -728,21 +795,20 @@ def url(self, name, data, data_type, **kwargs):
output = output.replace("{", quote("{")).replace("}", quote("}"))
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return output
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return output
def query(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL query.
- :param data: The data to be serialized.
+ :param str name: The name of the query parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :keyword bool skip_quote: Whether to skip quote the serialized result.
- Defaults to False.
:rtype: str, list
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized query parameter
"""
try:
# Treat the list aside, since we don't want to encode the div separator
@@ -759,19 +825,20 @@ def query(self, name, data, data_type, **kwargs):
output = str(output)
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def header(self, name, data, data_type, **kwargs):
"""Serialize data intended for a request header.
- :param data: The data to be serialized.
+ :param str name: The name of the header.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized header
"""
try:
if data_type in ["[str]"]:
@@ -780,21 +847,20 @@ def header(self, name, data, data_type, **kwargs):
output = self.serialize_data(data, data_type, **kwargs)
if data_type == "bool":
output = json.dumps(output)
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def serialize_data(self, data, data_type, **kwargs):
"""Serialize generic data according to supplied data type.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :param bool required: Whether it's essential that the data not be
- empty or None
:raises: AttributeError if required data is None.
:raises: ValueError if data is None
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
+ :rtype: str, int, float, bool, dict, list
"""
if data is None:
raise ValueError("No value for given attribute")
@@ -805,7 +871,7 @@ def serialize_data(self, data, data_type, **kwargs):
if data_type in self.basic_types.values():
return self.serialize_basic(data, data_type, **kwargs)
- elif data_type in self.serialize_type:
+ if data_type in self.serialize_type:
return self.serialize_type[data_type](data, **kwargs)
# If dependencies is empty, try with current data class
@@ -821,11 +887,10 @@ def serialize_data(self, data, data_type, **kwargs):
except (ValueError, TypeError) as err:
msg = "Unable to serialize value: {!r} as type: {!r}."
raise SerializationError(msg.format(data, data_type)) from err
- else:
- return self._serialize(data, **kwargs)
+ return self._serialize(data, **kwargs)
@classmethod
- def _get_custom_serializers(cls, data_type, **kwargs):
+ def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements
custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type)
if custom_serializer:
return custom_serializer
@@ -841,23 +906,26 @@ def serialize_basic(cls, data, data_type, **kwargs):
- basic_types_serializers dict[str, callable] : If set, use the callable as serializer
- is_xml bool : If set, use xml_basic_types_serializers
- :param data: Object to be serialized.
+ :param obj data: Object to be serialized.
:param str data_type: Type of object in the iterable.
+ :rtype: str, int, float, bool
+ :return: serialized object
"""
custom_serializer = cls._get_custom_serializers(data_type, **kwargs)
if custom_serializer:
return custom_serializer(data)
if data_type == "str":
return cls.serialize_unicode(data)
- return eval(data_type)(data) # nosec
+ return eval(data_type)(data) # nosec # pylint: disable=eval-used
@classmethod
def serialize_unicode(cls, data):
"""Special handling for serializing unicode strings in Py2.
Encode to UTF-8 if unicode, otherwise handle as a str.
- :param data: Object to be serialized.
+ :param str data: Object to be serialized.
:rtype: str
+ :return: serialized object
"""
try: # If I received an enum, return its value
return data.value
@@ -871,8 +939,7 @@ def serialize_unicode(cls, data):
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
def serialize_iter(self, data, iter_type, div=None, **kwargs):
"""Serialize iterable.
@@ -882,15 +949,13 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs):
serialization_ctxt['type'] should be same as data_type.
- is_xml bool : If set, serialize as XML
- :param list attr: Object to be serialized.
+ :param list data: Object to be serialized.
:param str iter_type: Type of object in the iterable.
- :param bool required: Whether the objects in the iterable must
- not be None or empty.
:param str div: If set, this str will be used to combine the elements
in the iterable into a combined string. Default is 'None'.
- :keyword bool do_quote: Whether to quote the serialized result of each iterable element.
Defaults to False.
:rtype: list, str
+ :return: serialized iterable
"""
if isinstance(data, str):
raise SerializationError("Refuse str type as a valid iter type.")
@@ -945,9 +1010,8 @@ def serialize_dict(self, attr, dict_type, **kwargs):
:param dict attr: Object to be serialized.
:param str dict_type: Type of object in the dictionary.
- :param bool required: Whether the objects in the dictionary must
- not be None or empty.
:rtype: dict
+ :return: serialized dictionary
"""
serialization_ctxt = kwargs.get("serialization_ctxt", {})
serialized = {}
@@ -971,7 +1035,7 @@ def serialize_dict(self, attr, dict_type, **kwargs):
return serialized
- def serialize_object(self, attr, **kwargs):
+ def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Serialize a generic object.
This will be handled as a dictionary. If object passed in is not
a basic type (str, int, float, dict, list) it will simply be
@@ -979,6 +1043,7 @@ def serialize_object(self, attr, **kwargs):
:param dict attr: Object to be serialized.
:rtype: dict or str
+ :return: serialized object
"""
if attr is None:
return None
@@ -1003,7 +1068,7 @@ def serialize_object(self, attr, **kwargs):
return self.serialize_decimal(attr)
# If it's a model or I know this dependency, serialize as a Model
- elif obj_type in self.dependencies.values() or isinstance(attr, Model):
+ if obj_type in self.dependencies.values() or isinstance(attr, Model):
return self._serialize(attr)
if obj_type == dict:
@@ -1034,56 +1099,61 @@ def serialize_enum(attr, enum_obj=None):
try:
enum_obj(result) # type: ignore
return result
- except ValueError:
+ except ValueError as exc:
for enum_value in enum_obj: # type: ignore
if enum_value.value.lower() == str(attr).lower():
return enum_value.value
error = "{!r} is not valid value for enum {!r}"
- raise SerializationError(error.format(attr, enum_obj))
+ raise SerializationError(error.format(attr, enum_obj)) from exc
@staticmethod
- def serialize_bytearray(attr, **kwargs):
+ def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize bytearray into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
return b64encode(attr).decode()
@staticmethod
- def serialize_base64(attr, **kwargs):
+ def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize str into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
encoded = b64encode(attr).decode("ascii")
return encoded.strip("=").replace("+", "-").replace("/", "_")
@staticmethod
- def serialize_decimal(attr, **kwargs):
+ def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Decimal object to float.
- :param attr: Object to be serialized.
+ :param decimal attr: Object to be serialized.
:rtype: float
+ :return: serialized decimal
"""
return float(attr)
@staticmethod
- def serialize_long(attr, **kwargs):
+ def serialize_long(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize long (Py2) or int (Py3).
- :param attr: Object to be serialized.
+ :param int attr: Object to be serialized.
:rtype: int/long
+ :return: serialized long
"""
return _long_type(attr)
@staticmethod
- def serialize_date(attr, **kwargs):
+ def serialize_date(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Date object into ISO-8601 formatted string.
:param Date attr: Object to be serialized.
:rtype: str
+ :return: serialized date
"""
if isinstance(attr, str):
attr = isodate.parse_date(attr)
@@ -1091,11 +1161,12 @@ def serialize_date(attr, **kwargs):
return t
@staticmethod
- def serialize_time(attr, **kwargs):
+ def serialize_time(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Time object into ISO-8601 formatted string.
:param datetime.time attr: Object to be serialized.
:rtype: str
+ :return: serialized time
"""
if isinstance(attr, str):
attr = isodate.parse_time(attr)
@@ -1105,30 +1176,32 @@ def serialize_time(attr, **kwargs):
return t
@staticmethod
- def serialize_duration(attr, **kwargs):
+ def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize TimeDelta object into ISO-8601 formatted string.
:param TimeDelta attr: Object to be serialized.
:rtype: str
+ :return: serialized duration
"""
if isinstance(attr, str):
attr = isodate.parse_duration(attr)
return isodate.duration_isoformat(attr)
@staticmethod
- def serialize_rfc(attr, **kwargs):
+ def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into RFC-1123 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: TypeError if format invalid.
+ :return: serialized rfc
"""
try:
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
utc = attr.utctimetuple()
- except AttributeError:
- raise TypeError("RFC1123 object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("RFC1123 object must be valid Datetime object.") from exc
return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format(
Serializer.days[utc.tm_wday],
@@ -1141,12 +1214,13 @@ def serialize_rfc(attr, **kwargs):
)
@staticmethod
- def serialize_iso(attr, **kwargs):
+ def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into ISO-8601 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: SerializationError if format invalid.
+ :return: serialized iso
"""
if isinstance(attr, str):
attr = isodate.parse_datetime(attr)
@@ -1172,13 +1246,14 @@ def serialize_iso(attr, **kwargs):
raise TypeError(msg) from err
@staticmethod
- def serialize_unix(attr, **kwargs):
+ def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into IntTime format.
This is represented as seconds.
:param Datetime attr: Object to be serialized.
:rtype: int
:raises: SerializationError if format invalid
+ :return: serialied unix
"""
if isinstance(attr, int):
return attr
@@ -1186,11 +1261,11 @@ def serialize_unix(attr, **kwargs):
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
return int(calendar.timegm(attr.utctimetuple()))
- except AttributeError:
- raise TypeError("Unix time object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("Unix time object must be valid Datetime object.") from exc
-def rest_key_extractor(attr, attr_desc, data):
+def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
key = attr_desc["key"]
working_data = data
@@ -1211,7 +1286,9 @@ def rest_key_extractor(attr, attr_desc, data):
return working_data.get(key)
-def rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements
+ attr, attr_desc, data
+):
key = attr_desc["key"]
working_data = data
@@ -1232,17 +1309,29 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data):
return attribute_key_case_insensitive_extractor(key, None, working_data)
-def last_rest_key_extractor(attr, attr_desc, data):
- """Extract the attribute in "data" based on the last part of the JSON path key."""
+def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
+ """Extract the attribute in "data" based on the last part of the JSON path key.
+
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
+ """
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
return attribute_key_extractor(dict_keys[-1], None, data)
-def last_rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
"""Extract the attribute in "data" based on the last part of the JSON path key.
This is the case insensitive version of "last_rest_key_extractor"
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
"""
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
@@ -1279,7 +1368,7 @@ def _extract_name_from_internal_type(internal_type):
return xml_name
-def xml_key_extractor(attr, attr_desc, data):
+def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements
if isinstance(data, dict):
return None
@@ -1331,22 +1420,21 @@ def xml_key_extractor(attr, attr_desc, data):
if is_iter_type:
if is_wrapped:
return None # is_wrapped no node, we want None
- else:
- return [] # not wrapped, assume empty list
+ return [] # not wrapped, assume empty list
return None # Assume it's not there, maybe an optional node.
# If is_iter_type and not wrapped, return all found children
if is_iter_type:
if not is_wrapped:
return children
- else: # Iter and wrapped, should have found one node only (the wrap one)
- if len(children) != 1:
- raise DeserializationError(
- "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format(
- xml_name
- )
+ # Iter and wrapped, should have found one node only (the wrap one)
+ if len(children) != 1:
+ raise DeserializationError(
+ "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long
+ xml_name
)
- return list(children[0]) # Might be empty list and that's ok.
+ )
+ return list(children[0]) # Might be empty list and that's ok.
# Here it's not a itertype, we should have found one element only or empty
if len(children) > 1:
@@ -1363,7 +1451,7 @@ class Deserializer(object):
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
- valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
+ valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
def __init__(self, classes: Optional[Mapping[str, type]] = None):
self.deserialize_type = {
@@ -1403,11 +1491,12 @@ def __call__(self, target_obj, response_data, content_type=None):
:param str content_type: Swagger "produces" if available.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
data = self._unpack_content(response_data, content_type)
return self._deserialize(target_obj, data)
- def _deserialize(self, target_obj, data):
+ def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements
"""Call the deserializer on a model.
Data needs to be already deserialized as JSON or XML ElementTree
@@ -1416,12 +1505,13 @@ def _deserialize(self, target_obj, data):
:param object data: Object to deserialize.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
# This is already a model, go recursive just in case
if hasattr(data, "_attribute_map"):
constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")]
try:
- for attr, mapconfig in data._attribute_map.items():
+ for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access
if attr in constants:
continue
value = getattr(data, attr)
@@ -1440,13 +1530,13 @@ def _deserialize(self, target_obj, data):
if isinstance(response, str):
return self.deserialize_data(data, response)
- elif isinstance(response, type) and issubclass(response, Enum):
+ if isinstance(response, type) and issubclass(response, Enum):
return self.deserialize_enum(data, response)
if data is None or data is CoreNull:
return data
try:
- attributes = response._attribute_map # type: ignore
+ attributes = response._attribute_map # type: ignore # pylint: disable=protected-access
d_attrs = {}
for attr, attr_desc in attributes.items():
# Check empty string. If it's not empty, someone has a real "additionalProperties"...
@@ -1476,9 +1566,8 @@ def _deserialize(self, target_obj, data):
except (AttributeError, TypeError, KeyError) as err:
msg = "Unable to deserialize to object: " + class_name # type: ignore
raise DeserializationError(msg) from err
- else:
- additional_properties = self._build_additional_properties(attributes, data)
- return self._instantiate_model(response, d_attrs, additional_properties)
+ additional_properties = self._build_additional_properties(attributes, data)
+ return self._instantiate_model(response, d_attrs, additional_properties)
def _build_additional_properties(self, attribute_map, data):
if not self.additional_properties_detection:
@@ -1505,6 +1594,8 @@ def _classify_target(self, target, data):
:param str target: The target object type to deserialize to.
:param str/dict data: The response data to deserialize.
+ :return: The classified target object and its class name.
+ :rtype: tuple
"""
if target is None:
return None, None
@@ -1516,7 +1607,7 @@ def _classify_target(self, target, data):
return target, target
try:
- target = target._classify(data, self.dependencies) # type: ignore
+ target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access
except AttributeError:
pass # Target is not a Model, no classify
return target, target.__class__.__name__ # type: ignore
@@ -1531,10 +1622,12 @@ def failsafe_deserialize(self, target_obj, data, content_type=None):
:param str target_obj: The target object type to deserialize to.
:param str/dict data: The response data to deserialize.
:param str content_type: Swagger "produces" if available.
+ :return: Deserialized object.
+ :rtype: object
"""
try:
return self(target_obj, data, content_type=content_type)
- except:
+ except: # pylint: disable=bare-except
_LOGGER.debug(
"Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
)
@@ -1552,10 +1645,12 @@ def _unpack_content(raw_data, content_type=None):
If raw_data is something else, bypass all logic and return it directly.
- :param raw_data: Data to be processed.
- :param content_type: How to parse if raw_data is a string/bytes.
+ :param obj raw_data: Data to be processed.
+ :param str content_type: How to parse if raw_data is a string/bytes.
:raises JSONDecodeError: If JSON is requested and parsing is impossible.
:raises UnicodeDecodeError: If bytes is not UTF8
+ :rtype: object
+ :return: Unpacked content.
"""
# Assume this is enough to detect a Pipeline Response without importing it
context = getattr(raw_data, "context", {})
@@ -1579,14 +1674,21 @@ def _unpack_content(raw_data, content_type=None):
def _instantiate_model(self, response, attrs, additional_properties=None):
"""Instantiate a response model passing in deserialized args.
- :param response: The response model class.
- :param d_attrs: The deserialized response attributes.
+ :param Response response: The response model class.
+ :param dict attrs: The deserialized response attributes.
+ :param dict additional_properties: Additional properties to be set.
+ :rtype: Response
+ :return: The instantiated response model.
"""
if callable(response):
subtype = getattr(response, "_subtype_map", {})
try:
- readonly = [k for k, v in response._validation.items() if v.get("readonly")]
- const = [k for k, v in response._validation.items() if v.get("constant")]
+ readonly = [
+ k for k, v in response._validation.items() if v.get("readonly") # pylint: disable=protected-access
+ ]
+ const = [
+ k for k, v in response._validation.items() if v.get("constant") # pylint: disable=protected-access
+ ]
kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const}
response_obj = response(**kwargs)
for attr in readonly:
@@ -1596,7 +1698,7 @@ def _instantiate_model(self, response, attrs, additional_properties=None):
return response_obj
except TypeError as err:
msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore
- raise DeserializationError(msg + str(err))
+ raise DeserializationError(msg + str(err)) from err
else:
try:
for attr, value in attrs.items():
@@ -1605,15 +1707,16 @@ def _instantiate_model(self, response, attrs, additional_properties=None):
except Exception as exp:
msg = "Unable to populate response model. "
msg += "Type: {}, Error: {}".format(type(response), exp)
- raise DeserializationError(msg)
+ raise DeserializationError(msg) from exp
- def deserialize_data(self, data, data_type):
+ def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements
"""Process data for deserialization according to data type.
:param str data: The response string to be deserialized.
:param str data_type: The type to deserialize to.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
if data is None:
return data
@@ -1627,7 +1730,11 @@ def deserialize_data(self, data, data_type):
if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())):
return data
- is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"]
+ is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment
+ "object",
+ "[]",
+ r"{}",
+ ]
if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text:
return None
data_val = self.deserialize_type[data_type](data)
@@ -1647,14 +1754,14 @@ def deserialize_data(self, data, data_type):
msg = "Unable to deserialize response data."
msg += " Data: {}, {}".format(data, data_type)
raise DeserializationError(msg) from err
- else:
- return self._deserialize(obj_type, data)
+ return self._deserialize(obj_type, data)
def deserialize_iter(self, attr, iter_type):
"""Deserialize an iterable.
:param list attr: Iterable to be deserialized.
:param str iter_type: The type of object in the iterable.
+ :return: Deserialized iterable.
:rtype: list
"""
if attr is None:
@@ -1671,6 +1778,7 @@ def deserialize_dict(self, attr, dict_type):
:param dict/list attr: Dictionary to be deserialized. Also accepts
a list of key, value pairs.
:param str dict_type: The object type of the items in the dictionary.
+ :return: Deserialized dictionary.
:rtype: dict
"""
if isinstance(attr, list):
@@ -1681,11 +1789,12 @@ def deserialize_dict(self, attr, dict_type):
attr = {el.tag: el.text for el in attr}
return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()}
- def deserialize_object(self, attr, **kwargs):
+ def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Deserialize a generic object.
This will be handled as a dictionary.
:param dict attr: Dictionary to be deserialized.
+ :return: Deserialized object.
:rtype: dict
:raises: TypeError if non-builtin datatype encountered.
"""
@@ -1720,11 +1829,10 @@ def deserialize_object(self, attr, **kwargs):
pass
return deserialized
- else:
- error = "Cannot deserialize generic object with type: "
- raise TypeError(error + str(obj_type))
+ error = "Cannot deserialize generic object with type: "
+ raise TypeError(error + str(obj_type))
- def deserialize_basic(self, attr, data_type):
+ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements
"""Deserialize basic builtin data type from string.
Will attempt to convert to str, int, float and bool.
This function will also accept '1', '0', 'true' and 'false' as
@@ -1732,6 +1840,7 @@ def deserialize_basic(self, attr, data_type):
:param str attr: response string to be deserialized.
:param str data_type: deserialization data type.
+ :return: Deserialized basic type.
:rtype: str, int, float or bool
:raises: TypeError if string format is not valid.
"""
@@ -1743,24 +1852,23 @@ def deserialize_basic(self, attr, data_type):
if data_type == "str":
# None or '', node is empty string.
return ""
- else:
- # None or '', node with a strong type is None.
- # Don't try to model "empty bool" or "empty int"
- return None
+ # None or '', node with a strong type is None.
+ # Don't try to model "empty bool" or "empty int"
+ return None
if data_type == "bool":
if attr in [True, False, 1, 0]:
return bool(attr)
- elif isinstance(attr, str):
+ if isinstance(attr, str):
if attr.lower() in ["true", "1"]:
return True
- elif attr.lower() in ["false", "0"]:
+ if attr.lower() in ["false", "0"]:
return False
raise TypeError("Invalid boolean value: {}".format(attr))
if data_type == "str":
return self.deserialize_unicode(attr)
- return eval(data_type)(attr) # nosec
+ return eval(data_type)(attr) # nosec # pylint: disable=eval-used
@staticmethod
def deserialize_unicode(data):
@@ -1768,6 +1876,7 @@ def deserialize_unicode(data):
as a string.
:param str data: response string to be deserialized.
+ :return: Deserialized string.
:rtype: str or unicode
"""
# We might be here because we have an enum modeled as string,
@@ -1781,8 +1890,7 @@ def deserialize_unicode(data):
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
@staticmethod
def deserialize_enum(data, enum_obj):
@@ -1794,6 +1902,7 @@ def deserialize_enum(data, enum_obj):
:param str data: Response string to be deserialized. If this value is
None or invalid it will be returned as-is.
:param Enum enum_obj: Enum object to deserialize to.
+ :return: Deserialized enum object.
:rtype: Enum
"""
if isinstance(data, enum_obj) or data is None:
@@ -1804,9 +1913,9 @@ def deserialize_enum(data, enum_obj):
# Workaround. We might consider remove it in the future.
try:
return list(enum_obj.__members__.values())[data]
- except IndexError:
+ except IndexError as exc:
error = "{!r} is not a valid index for enum {!r}"
- raise DeserializationError(error.format(data, enum_obj))
+ raise DeserializationError(error.format(data, enum_obj)) from exc
try:
return enum_obj(str(data))
except ValueError:
@@ -1822,6 +1931,7 @@ def deserialize_bytearray(attr):
"""Deserialize string into bytearray.
:param str attr: response string to be deserialized.
+ :return: Deserialized bytearray
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
@@ -1834,6 +1944,7 @@ def deserialize_base64(attr):
"""Deserialize base64 encoded string into string.
:param str attr: response string to be deserialized.
+ :return: Deserialized base64 string
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
@@ -1849,8 +1960,9 @@ def deserialize_decimal(attr):
"""Deserialize string into Decimal object.
:param str attr: response string to be deserialized.
- :rtype: Decimal
+ :return: Deserialized decimal
:raises: DeserializationError if string format invalid.
+ :rtype: decimal
"""
if isinstance(attr, ET.Element):
attr = attr.text
@@ -1865,6 +1977,7 @@ def deserialize_long(attr):
"""Deserialize string into long (Py2) or int (Py3).
:param str attr: response string to be deserialized.
+ :return: Deserialized int
:rtype: long or int
:raises: ValueError if string format invalid.
"""
@@ -1877,6 +1990,7 @@ def deserialize_duration(attr):
"""Deserialize ISO-8601 formatted string into TimeDelta object.
:param str attr: response string to be deserialized.
+ :return: Deserialized duration
:rtype: TimeDelta
:raises: DeserializationError if string format invalid.
"""
@@ -1887,14 +2001,14 @@ def deserialize_duration(attr):
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize duration object."
raise DeserializationError(msg) from err
- else:
- return duration
+ return duration
@staticmethod
def deserialize_date(attr):
"""Deserialize ISO-8601 formatted string into Date object.
:param str attr: response string to be deserialized.
+ :return: Deserialized date
:rtype: Date
:raises: DeserializationError if string format invalid.
"""
@@ -1910,6 +2024,7 @@ def deserialize_time(attr):
"""Deserialize ISO-8601 formatted string into time object.
:param str attr: response string to be deserialized.
+ :return: Deserialized time
:rtype: datetime.time
:raises: DeserializationError if string format invalid.
"""
@@ -1924,6 +2039,7 @@ def deserialize_rfc(attr):
"""Deserialize RFC-1123 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized RFC datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
@@ -1939,14 +2055,14 @@ def deserialize_rfc(attr):
except ValueError as err:
msg = "Cannot deserialize to rfc datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
@staticmethod
def deserialize_iso(attr):
"""Deserialize ISO-8601 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized ISO datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
@@ -1976,8 +2092,7 @@ def deserialize_iso(attr):
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
@staticmethod
def deserialize_unix(attr):
@@ -1985,6 +2100,7 @@ def deserialize_unix(attr):
This is represented as seconds.
:param int attr: Object to be serialized.
+ :return: Deserialized datetime
:rtype: Datetime
:raises: DeserializationError if format invalid
"""
@@ -1996,5 +2112,4 @@ def deserialize_unix(attr):
except ValueError as err:
msg = "Cannot deserialize to unix datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_azure_data_lake_storage_restapi.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_azure_data_lake_storage_restapi.py
index a16a5be74366..ecfcec9b6dc3 100644
--- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_azure_data_lake_storage_restapi.py
+++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_azure_data_lake_storage_restapi.py
@@ -42,7 +42,7 @@ class AzureDataLakeStorageRESTAPI: # pylint: disable=client-accepts-api-version
is "filesystem". Note that overriding this default value may result in unsupported behavior.
:paramtype resource: str
:keyword version: Specifies the version of the operation to use for this request. Default value
- is "2023-05-03". Note that overriding this default value may result in unsupported behavior.
+ is "2025-01-05". Note that overriding this default value may result in unsupported behavior.
:paramtype version: str
"""
diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_configuration.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_configuration.py
index d99a81396ee6..040a9fc9f74f 100644
--- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_configuration.py
+++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_configuration.py
@@ -30,13 +30,13 @@ class AzureDataLakeStorageRESTAPIConfiguration: # pylint: disable=too-many-inst
is "filesystem". Note that overriding this default value may result in unsupported behavior.
:paramtype resource: str
:keyword version: Specifies the version of the operation to use for this request. Default value
- is "2023-05-03". Note that overriding this default value may result in unsupported behavior.
+ is "2025-01-05". Note that overriding this default value may result in unsupported behavior.
:paramtype version: str
"""
def __init__(self, url: str, x_ms_lease_duration: Optional[int] = None, **kwargs: Any) -> None:
resource: Literal["filesystem"] = kwargs.pop("resource", "filesystem")
- version: Literal["2023-05-03"] = kwargs.pop("version", "2023-05-03")
+ version: Literal["2025-01-05"] = kwargs.pop("version", "2025-01-05")
if url is None:
raise ValueError("Parameter 'url' must not be None.")
diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_file_system_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_file_system_operations.py
index 43db73b3c9b5..6a1cb558e6ca 100644
--- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_file_system_operations.py
+++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_file_system_operations.py
@@ -67,6 +67,7 @@ async def create( # pylint: disable=inconsistent-return-statements
properties: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Create FileSystem.
Create a FileSystem rooted at the specified location. If the FileSystem already exists, the
@@ -94,7 +95,7 @@ async def create( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -153,6 +154,7 @@ async def set_properties( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Set FileSystem Properties.
Set properties for the FileSystem. This operation supports conditional HTTP requests. For
@@ -183,7 +185,7 @@ async def set_properties( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -242,6 +244,7 @@ async def set_properties( # pylint: disable=inconsistent-return-statements
async def get_properties( # pylint: disable=inconsistent-return-statements
self, request_id_parameter: Optional[str] = None, timeout: Optional[int] = None, **kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Get FileSystem Properties.
All system and user-defined filesystem properties are specified in the response headers.
@@ -259,7 +262,7 @@ async def get_properties( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -317,6 +320,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Delete FileSystem.
Marks the FileSystem for deletion. When a FileSystem is deleted, a FileSystem with the same
@@ -344,7 +348,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -408,6 +412,7 @@ async def list_paths(
upn: Optional[bool] = None,
**kwargs: Any
) -> _models.PathList:
+ # pylint: disable=line-too-long
"""List Paths.
List FileSystem paths and their properties.
@@ -447,7 +452,7 @@ async def list_paths(
:rtype: ~azure.storage.filedatalake.models.PathList
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -516,6 +521,7 @@ async def list_blob_hierarchy_segment(
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> _models.ListBlobsHierarchySegmentResponse:
+ # pylint: disable=line-too-long
"""The List Blobs operation returns a list of the blobs under the specified container.
:param prefix: Filters results to filesystems within the specified prefix. Default value is
@@ -556,7 +562,7 @@ async def list_blob_hierarchy_segment(
:rtype: ~azure.storage.filedatalake.models.ListBlobsHierarchySegmentResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py
index ad9a7a7294ea..e4e8eab1e3ac 100644
--- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py
+++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py
@@ -95,6 +95,7 @@ async def create( # pylint: disable=inconsistent-return-statements
cpk_info: Optional[_models.CpkInfo] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Create File | Create Directory | Rename File | Rename Directory.
Create or rename a file or directory. By default, the destination is overwritten and if the
@@ -198,7 +199,7 @@ async def create( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -344,11 +345,14 @@ async def update(
group: Optional[str] = None,
permissions: Optional[str] = None,
acl: Optional[str] = None,
+ structured_body_type: Optional[str] = None,
+ structured_content_length: Optional[int] = None,
path_http_headers: Optional[_models.PathHTTPHeaders] = None,
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> Optional[_models.SetAccessControlRecursiveResponse]:
+ # pylint: disable=line-too-long
"""Append Data | Flush Data | Set Properties | Set Access Control.
Uploads data to be appended to a file, flushes (writes) previously uploaded data to a file,
@@ -454,6 +458,13 @@ async def update(
scope, a type, a user or group identifier, and permissions in the format
"[scope:][type]:[id]:[permissions]". Default value is None.
:type acl: str
+ :param structured_body_type: Required if the request body is a structured message. Specifies
+ the message schema version and properties. Default value is None.
+ :type structured_body_type: str
+ :param structured_content_length: Required if the request body is a structured message.
+ Specifies the length of the blob/file content inside the message body. Will always be smaller
+ than Content-Length. Default value is None.
+ :type structured_content_length: int
:param path_http_headers: Parameter group. Default value is None.
:type path_http_headers: ~azure.storage.filedatalake.models.PathHTTPHeaders
:param lease_access_conditions: Parameter group. Default value is None.
@@ -464,7 +475,7 @@ async def update(
:rtype: ~azure.storage.filedatalake.models.SetAccessControlRecursiveResponse or None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -534,6 +545,8 @@ async def update(
if_none_match=_if_none_match,
if_modified_since=_if_modified_since,
if_unmodified_since=_if_unmodified_since,
+ structured_body_type=structured_body_type,
+ structured_content_length=structured_content_length,
content_type=content_type,
version=self._config.version,
content=_content,
@@ -583,6 +596,9 @@ async def update(
response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["x-ms-structured-body"] = self._deserialize(
+ "str", response.headers.get("x-ms-structured-body")
+ )
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -601,6 +617,7 @@ async def lease( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Lease Path.
Create and manage a lease to restrict write and delete access to the path. This operation
@@ -644,7 +661,7 @@ async def lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -740,6 +757,7 @@ async def read(
cpk_info: Optional[_models.CpkInfo] = None,
**kwargs: Any
) -> AsyncIterator[bytes]:
+ # pylint: disable=line-too-long
"""Read File.
Read the contents of a file. For read operations, range requests are supported. This operation
@@ -776,7 +794,7 @@ async def read(
:rtype: AsyncIterator[bytes]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -932,6 +950,7 @@ async def get_properties( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Get Properties | Get Status | Get Access Control List.
Get Properties returns all system and user defined properties for a path. Get Status returns
@@ -970,7 +989,7 @@ async def get_properties( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1065,6 +1084,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Delete File | Delete Directory.
Delete the file or directory. This operation supports conditional HTTP requests. For more
@@ -1102,7 +1122,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1188,6 +1208,7 @@ async def set_access_control( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Set the owner, group, permissions, or access control list for a path.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -1222,7 +1243,7 @@ async def set_access_control( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1306,6 +1327,7 @@ async def set_access_control_recursive(
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> _models.SetAccessControlRecursiveResponse:
+ # pylint: disable=line-too-long
"""Set the access control list for a path and sub-paths.
:param mode: Mode "set" sets POSIX access control rights on files and directories, "modify"
@@ -1347,7 +1369,7 @@ async def set_access_control_recursive(
:rtype: ~azure.storage.filedatalake.models.SetAccessControlRecursiveResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1425,6 +1447,7 @@ async def flush_data( # pylint: disable=inconsistent-return-statements
cpk_info: Optional[_models.CpkInfo] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Set the owner, group, permissions, or access control list for a path.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -1491,7 +1514,7 @@ async def flush_data( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1616,11 +1639,14 @@ async def append_data( # pylint: disable=inconsistent-return-statements
proposed_lease_id: Optional[str] = None,
request_id_parameter: Optional[str] = None,
flush: Optional[bool] = None,
+ structured_body_type: Optional[str] = None,
+ structured_content_length: Optional[int] = None,
path_http_headers: Optional[_models.PathHTTPHeaders] = None,
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
cpk_info: Optional[_models.CpkInfo] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Append data to the file.
:param body: Initial data. Required.
@@ -1665,6 +1691,13 @@ async def append_data( # pylint: disable=inconsistent-return-statements
:type request_id_parameter: str
:param flush: If file should be flushed after the append. Default value is None.
:type flush: bool
+ :param structured_body_type: Required if the request body is a structured message. Specifies
+ the message schema version and properties. Default value is None.
+ :type structured_body_type: str
+ :param structured_content_length: Required if the request body is a structured message.
+ Specifies the length of the blob/file content inside the message body. Will always be smaller
+ than Content-Length. Default value is None.
+ :type structured_content_length: int
:param path_http_headers: Parameter group. Default value is None.
:type path_http_headers: ~azure.storage.filedatalake.models.PathHTTPHeaders
:param lease_access_conditions: Parameter group. Default value is None.
@@ -1675,7 +1708,7 @@ async def append_data( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1721,6 +1754,8 @@ async def append_data( # pylint: disable=inconsistent-return-statements
encryption_key_sha256=_encryption_key_sha256,
encryption_algorithm=_encryption_algorithm, # type: ignore
flush=flush,
+ structured_body_type=structured_body_type,
+ structured_content_length=structured_content_length,
action=action,
content_type=content_type,
version=self._config.version,
@@ -1761,6 +1796,9 @@ async def append_data( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("x-ms-encryption-key-sha256")
)
response_headers["x-ms-lease-renewed"] = self._deserialize("bool", response.headers.get("x-ms-lease-renewed"))
+ response_headers["x-ms-structured-body"] = self._deserialize(
+ "str", response.headers.get("x-ms-structured-body")
+ )
if cls:
return cls(pipeline_response, None, response_headers) # type: ignore
@@ -1774,6 +1812,7 @@ async def set_expiry( # pylint: disable=inconsistent-return-statements
expires_on: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Sets the time a blob will expire and be deleted.
:param expiry_options: Required. Indicates mode of the expiry time. Known values are:
@@ -1794,7 +1833,7 @@ async def set_expiry( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1854,6 +1893,7 @@ async def undelete( # pylint: disable=inconsistent-return-statements
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Undelete a path that was previously soft deleted.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -1872,7 +1912,7 @@ async def undelete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_service_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_service_operations.py
index a6da031a1483..9c5ab6c594b6 100644
--- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_service_operations.py
+++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_service_operations.py
@@ -63,6 +63,7 @@ def list_file_systems(
timeout: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.FileSystem"]:
+ # pylint: disable=line-too-long
"""List FileSystems.
List filesystems and their properties in given account.
@@ -99,7 +100,7 @@ def list_file_systems(
resource: Literal["account"] = kwargs.pop("resource", _params.pop("resource", "account"))
cls: ClsType[_models.FileSystemList] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_file_system_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_file_system_operations.py
index 35dbe9f3738b..feefae3f0e3c 100644
--- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_file_system_operations.py
+++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_file_system_operations.py
@@ -49,7 +49,7 @@ def build_create_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
resource: Literal["filesystem"] = kwargs.pop("resource", _params.pop("resource", "filesystem"))
- version: Literal["2023-05-03"] = kwargs.pop("version", _headers.pop("x-ms-version", "2023-05-03"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -90,7 +90,7 @@ def build_set_properties_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
resource: Literal["filesystem"] = kwargs.pop("resource", _params.pop("resource", "filesystem"))
- version: Literal["2023-05-03"] = kwargs.pop("version", _headers.pop("x-ms-version", "2023-05-03"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -128,7 +128,7 @@ def build_get_properties_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
resource: Literal["filesystem"] = kwargs.pop("resource", _params.pop("resource", "filesystem"))
- version: Literal["2023-05-03"] = kwargs.pop("version", _headers.pop("x-ms-version", "2023-05-03"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -166,7 +166,7 @@ def build_delete_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
resource: Literal["filesystem"] = kwargs.pop("resource", _params.pop("resource", "filesystem"))
- version: Literal["2023-05-03"] = kwargs.pop("version", _headers.pop("x-ms-version", "2023-05-03"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -211,7 +211,7 @@ def build_list_paths_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
resource: Literal["filesystem"] = kwargs.pop("resource", _params.pop("resource", "filesystem"))
- version: Literal["2023-05-03"] = kwargs.pop("version", _headers.pop("x-ms-version", "2023-05-03"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -263,7 +263,7 @@ def build_list_blob_hierarchy_segment_request( # pylint: disable=name-too-long
restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container"))
comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list"))
- version: Literal["2023-05-03"] = kwargs.pop("version", _headers.pop("x-ms-version", "2023-05-03"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -328,6 +328,7 @@ def create( # pylint: disable=inconsistent-return-statements
properties: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Create FileSystem.
Create a FileSystem rooted at the specified location. If the FileSystem already exists, the
@@ -355,7 +356,7 @@ def create( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -414,6 +415,7 @@ def set_properties( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Set FileSystem Properties.
Set properties for the FileSystem. This operation supports conditional HTTP requests. For
@@ -444,7 +446,7 @@ def set_properties( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -503,6 +505,7 @@ def set_properties( # pylint: disable=inconsistent-return-statements
def get_properties( # pylint: disable=inconsistent-return-statements
self, request_id_parameter: Optional[str] = None, timeout: Optional[int] = None, **kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Get FileSystem Properties.
All system and user-defined filesystem properties are specified in the response headers.
@@ -520,7 +523,7 @@ def get_properties( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -578,6 +581,7 @@ def delete( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Delete FileSystem.
Marks the FileSystem for deletion. When a FileSystem is deleted, a FileSystem with the same
@@ -605,7 +609,7 @@ def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -669,6 +673,7 @@ def list_paths(
upn: Optional[bool] = None,
**kwargs: Any
) -> _models.PathList:
+ # pylint: disable=line-too-long
"""List Paths.
List FileSystem paths and their properties.
@@ -708,7 +713,7 @@ def list_paths(
:rtype: ~azure.storage.filedatalake.models.PathList
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -777,6 +782,7 @@ def list_blob_hierarchy_segment(
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> _models.ListBlobsHierarchySegmentResponse:
+ # pylint: disable=line-too-long
"""The List Blobs operation returns a list of the blobs under the specified container.
:param prefix: Filters results to filesystems within the specified prefix. Default value is
@@ -817,7 +823,7 @@ def list_blob_hierarchy_segment(
:rtype: ~azure.storage.filedatalake.models.ListBlobsHierarchySegmentResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_path_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_path_operations.py
index eb2d08acfa37..2381014c6ee7 100644
--- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_path_operations.py
+++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_path_operations.py
@@ -82,7 +82,7 @@ def build_create_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- version: Literal["2023-05-03"] = kwargs.pop("version", _headers.pop("x-ms-version", "2023-05-03"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -209,13 +209,15 @@ def build_update_request(
if_none_match: Optional[str] = None,
if_modified_since: Optional[datetime.datetime] = None,
if_unmodified_since: Optional[datetime.datetime] = None,
+ structured_body_type: Optional[str] = None,
+ structured_content_length: Optional[int] = None,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- version: Literal["2023-05-03"] = kwargs.pop("version", _headers.pop("x-ms-version", "2023-05-03"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -282,6 +284,12 @@ def build_update_request(
_headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123")
if if_unmodified_since is not None:
_headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123")
+ if structured_body_type is not None:
+ _headers["x-ms-structured-body"] = _SERIALIZER.header("structured_body_type", structured_body_type, "str")
+ if structured_content_length is not None:
+ _headers["x-ms-structured-content-length"] = _SERIALIZER.header(
+ "structured_content_length", structured_content_length, "int"
+ )
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
@@ -308,7 +316,7 @@ def build_lease_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- version: Literal["2023-05-03"] = kwargs.pop("version", _headers.pop("x-ms-version", "2023-05-03"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -371,7 +379,7 @@ def build_read_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- version: Literal["2023-05-03"] = kwargs.pop("version", _headers.pop("x-ms-version", "2023-05-03"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -436,7 +444,7 @@ def build_get_properties_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- version: Literal["2023-05-03"] = kwargs.pop("version", _headers.pop("x-ms-version", "2023-05-03"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -492,7 +500,7 @@ def build_delete_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- version: Literal["2023-05-03"] = kwargs.pop("version", _headers.pop("x-ms-version", "2023-05-03"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -552,7 +560,7 @@ def build_set_access_control_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
action: Literal["setAccessControl"] = kwargs.pop("action", _params.pop("action", "setAccessControl"))
- version: Literal["2023-05-03"] = kwargs.pop("version", _headers.pop("x-ms-version", "2023-05-03"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -613,7 +621,7 @@ def build_set_access_control_recursive_request( # pylint: disable=name-too-long
action: Literal["setAccessControlRecursive"] = kwargs.pop(
"action", _params.pop("action", "setAccessControlRecursive")
)
- version: Literal["2023-05-03"] = kwargs.pop("version", _headers.pop("x-ms-version", "2023-05-03"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -679,7 +687,7 @@ def build_flush_data_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
action: Literal["flush"] = kwargs.pop("action", _params.pop("action", "flush"))
- version: Literal["2023-05-03"] = kwargs.pop("version", _headers.pop("x-ms-version", "2023-05-03"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -766,6 +774,8 @@ def build_append_data_request(
encryption_key_sha256: Optional[str] = None,
encryption_algorithm: Literal["AES256"] = "AES256",
flush: Optional[bool] = None,
+ structured_body_type: Optional[str] = None,
+ structured_content_length: Optional[int] = None,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
@@ -773,7 +783,7 @@ def build_append_data_request(
action: Literal["append"] = kwargs.pop("action", _params.pop("action", "append"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- version: Literal["2023-05-03"] = kwargs.pop("version", _headers.pop("x-ms-version", "2023-05-03"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -823,6 +833,12 @@ def build_append_data_request(
)
if encryption_algorithm is not None:
_headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str")
+ if structured_body_type is not None:
+ _headers["x-ms-structured-body"] = _SERIALIZER.header("structured_body_type", structured_body_type, "str")
+ if structured_content_length is not None:
+ _headers["x-ms-structured-content-length"] = _SERIALIZER.header(
+ "structured_content_length", structured_content_length, "int"
+ )
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
@@ -843,7 +859,7 @@ def build_set_expiry_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["expiry"] = kwargs.pop("comp", _params.pop("comp", "expiry"))
- version: Literal["2023-05-03"] = kwargs.pop("version", _headers.pop("x-ms-version", "2023-05-03"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -883,7 +899,7 @@ def build_undelete_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete"))
- version: Literal["2023-05-03"] = kwargs.pop("version", _headers.pop("x-ms-version", "2023-05-03"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -957,6 +973,7 @@ def create( # pylint: disable=inconsistent-return-statements
cpk_info: Optional[_models.CpkInfo] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Create File | Create Directory | Rename File | Rename Directory.
Create or rename a file or directory. By default, the destination is overwritten and if the
@@ -1060,7 +1077,7 @@ def create( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1206,11 +1223,14 @@ def update(
group: Optional[str] = None,
permissions: Optional[str] = None,
acl: Optional[str] = None,
+ structured_body_type: Optional[str] = None,
+ structured_content_length: Optional[int] = None,
path_http_headers: Optional[_models.PathHTTPHeaders] = None,
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> Optional[_models.SetAccessControlRecursiveResponse]:
+ # pylint: disable=line-too-long
"""Append Data | Flush Data | Set Properties | Set Access Control.
Uploads data to be appended to a file, flushes (writes) previously uploaded data to a file,
@@ -1316,6 +1336,13 @@ def update(
scope, a type, a user or group identifier, and permissions in the format
"[scope:][type]:[id]:[permissions]". Default value is None.
:type acl: str
+ :param structured_body_type: Required if the request body is a structured message. Specifies
+ the message schema version and properties. Default value is None.
+ :type structured_body_type: str
+ :param structured_content_length: Required if the request body is a structured message.
+ Specifies the length of the blob/file content inside the message body. Will always be smaller
+ than Content-Length. Default value is None.
+ :type structured_content_length: int
:param path_http_headers: Parameter group. Default value is None.
:type path_http_headers: ~azure.storage.filedatalake.models.PathHTTPHeaders
:param lease_access_conditions: Parameter group. Default value is None.
@@ -1326,7 +1353,7 @@ def update(
:rtype: ~azure.storage.filedatalake.models.SetAccessControlRecursiveResponse or None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1396,6 +1423,8 @@ def update(
if_none_match=_if_none_match,
if_modified_since=_if_modified_since,
if_unmodified_since=_if_unmodified_since,
+ structured_body_type=structured_body_type,
+ structured_content_length=structured_content_length,
content_type=content_type,
version=self._config.version,
content=_content,
@@ -1445,6 +1474,9 @@ def update(
response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["x-ms-structured-body"] = self._deserialize(
+ "str", response.headers.get("x-ms-structured-body")
+ )
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -1463,6 +1495,7 @@ def lease( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Lease Path.
Create and manage a lease to restrict write and delete access to the path. This operation
@@ -1506,7 +1539,7 @@ def lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1602,6 +1635,7 @@ def read(
cpk_info: Optional[_models.CpkInfo] = None,
**kwargs: Any
) -> Iterator[bytes]:
+ # pylint: disable=line-too-long
"""Read File.
Read the contents of a file. For read operations, range requests are supported. This operation
@@ -1638,7 +1672,7 @@ def read(
:rtype: Iterator[bytes]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1794,6 +1828,7 @@ def get_properties( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Get Properties | Get Status | Get Access Control List.
Get Properties returns all system and user defined properties for a path. Get Status returns
@@ -1832,7 +1867,7 @@ def get_properties( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1927,6 +1962,7 @@ def delete( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Delete File | Delete Directory.
Delete the file or directory. This operation supports conditional HTTP requests. For more
@@ -1964,7 +2000,7 @@ def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2050,6 +2086,7 @@ def set_access_control( # pylint: disable=inconsistent-return-statements
modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Set the owner, group, permissions, or access control list for a path.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -2084,7 +2121,7 @@ def set_access_control( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2168,6 +2205,7 @@ def set_access_control_recursive(
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> _models.SetAccessControlRecursiveResponse:
+ # pylint: disable=line-too-long
"""Set the access control list for a path and sub-paths.
:param mode: Mode "set" sets POSIX access control rights on files and directories, "modify"
@@ -2209,7 +2247,7 @@ def set_access_control_recursive(
:rtype: ~azure.storage.filedatalake.models.SetAccessControlRecursiveResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2287,6 +2325,7 @@ def flush_data( # pylint: disable=inconsistent-return-statements
cpk_info: Optional[_models.CpkInfo] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Set the owner, group, permissions, or access control list for a path.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -2353,7 +2392,7 @@ def flush_data( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2478,11 +2517,14 @@ def append_data( # pylint: disable=inconsistent-return-statements
proposed_lease_id: Optional[str] = None,
request_id_parameter: Optional[str] = None,
flush: Optional[bool] = None,
+ structured_body_type: Optional[str] = None,
+ structured_content_length: Optional[int] = None,
path_http_headers: Optional[_models.PathHTTPHeaders] = None,
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
cpk_info: Optional[_models.CpkInfo] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Append data to the file.
:param body: Initial data. Required.
@@ -2527,6 +2569,13 @@ def append_data( # pylint: disable=inconsistent-return-statements
:type request_id_parameter: str
:param flush: If file should be flushed after the append. Default value is None.
:type flush: bool
+ :param structured_body_type: Required if the request body is a structured message. Specifies
+ the message schema version and properties. Default value is None.
+ :type structured_body_type: str
+ :param structured_content_length: Required if the request body is a structured message.
+ Specifies the length of the blob/file content inside the message body. Will always be smaller
+ than Content-Length. Default value is None.
+ :type structured_content_length: int
:param path_http_headers: Parameter group. Default value is None.
:type path_http_headers: ~azure.storage.filedatalake.models.PathHTTPHeaders
:param lease_access_conditions: Parameter group. Default value is None.
@@ -2537,7 +2586,7 @@ def append_data( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2583,6 +2632,8 @@ def append_data( # pylint: disable=inconsistent-return-statements
encryption_key_sha256=_encryption_key_sha256,
encryption_algorithm=_encryption_algorithm, # type: ignore
flush=flush,
+ structured_body_type=structured_body_type,
+ structured_content_length=structured_content_length,
action=action,
content_type=content_type,
version=self._config.version,
@@ -2623,6 +2674,9 @@ def append_data( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("x-ms-encryption-key-sha256")
)
response_headers["x-ms-lease-renewed"] = self._deserialize("bool", response.headers.get("x-ms-lease-renewed"))
+ response_headers["x-ms-structured-body"] = self._deserialize(
+ "str", response.headers.get("x-ms-structured-body")
+ )
if cls:
return cls(pipeline_response, None, response_headers) # type: ignore
@@ -2636,6 +2690,7 @@ def set_expiry( # pylint: disable=inconsistent-return-statements
expires_on: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Sets the time a blob will expire and be deleted.
:param expiry_options: Required. Indicates mode of the expiry time. Known values are:
@@ -2656,7 +2711,7 @@ def set_expiry( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2716,6 +2771,7 @@ def undelete( # pylint: disable=inconsistent-return-statements
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Undelete a path that was previously soft deleted.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -2734,7 +2790,7 @@ def undelete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_service_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_service_operations.py
index e9bb654ff33a..f8b66f4d11c0 100644
--- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_service_operations.py
+++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_service_operations.py
@@ -51,7 +51,7 @@ def build_list_file_systems_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
resource: Literal["account"] = kwargs.pop("resource", _params.pop("resource", "account"))
- version: Literal["2023-05-03"] = kwargs.pop("version", _headers.pop("x-ms-version", "2023-05-03"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -111,6 +111,7 @@ def list_file_systems(
timeout: Optional[int] = None,
**kwargs: Any
) -> Iterable["_models.FileSystem"]:
+ # pylint: disable=line-too-long
"""List FileSystems.
List filesystems and their properties in given account.
@@ -147,7 +148,7 @@ def list_file_systems(
resource: Literal["account"] = kwargs.pop("resource", _params.pop("resource", "account"))
cls: ClsType[_models.FileSystemList] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py
index 776a36618b4d..5130ef44cbc7 100644
--- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py
+++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py
@@ -121,19 +121,20 @@ def __init__(
_hosts=datalake_hosts, **kwargs)
# ADLS doesn't support secondary endpoint, make sure it's empty
self._hosts[LocationMode.SECONDARY] = ""
- api_version = get_api_version(kwargs)
-
- self._client = AzureDataLakeStorageRESTAPI(self.url, base_url=self.url, file_system=file_system_name,
- path=path_name, pipeline=self._pipeline)
- self._client._config.version = api_version
-
- self._datalake_client_for_blob_operation = AzureDataLakeStorageRESTAPI(
- self._blob_client.url,
- base_url=self._blob_client.url,
- file_system=file_system_name,
- path=path_name,
- pipeline=self._pipeline)
- self._datalake_client_for_blob_operation._config.version = api_version
+ self._api_version = get_api_version(kwargs)
+ self._client = self._build_generated_client(self.url)
+ self._datalake_client_for_blob_operation = self._build_generated_client(self._blob_client.url)
+
+ def _build_generated_client(self, url: str) -> AzureDataLakeStorageRESTAPI:
+ client = AzureDataLakeStorageRESTAPI(
+ url,
+ base_url=url,
+ file_system=self.file_system_name,
+ path=self.path_name,
+ pipeline=self._pipeline
+ )
+ client._config.version = self._api_version # pylint: disable=protected-access
+ return client
def __exit__(self, *args):
self._blob_client.close()
diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py
index f0b157684c55..631250e237db 100644
--- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py
+++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py
@@ -36,6 +36,7 @@
'2024-05-04',
'2024-08-04',
'2024-11-04',
+ '2025-01-05',
] # This list must be in chronological order!
diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/base_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/base_client_async.py
index 8e81643f5cce..6186b29db107 100644
--- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/base_client_async.py
+++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/base_client_async.py
@@ -127,16 +127,16 @@ def _create_pipeline(
hosts = self._hosts
policies = [
QueueMessagePolicy(),
- config.headers_policy,
config.proxy_policy,
config.user_agent_policy,
StorageContentValidation(),
- StorageRequestHook(**kwargs),
- self._credential_policy,
ContentDecodePolicy(response_encoding="utf-8"),
AsyncRedirectPolicy(**kwargs),
StorageHosts(hosts=hosts, **kwargs),
config.retry_policy,
+ config.headers_policy,
+ StorageRequestHook(**kwargs),
+ self._credential_policy,
config.logging_policy,
AsyncStorageResponseHook(**kwargs),
DistributedTracingPolicy(**kwargs),
diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/models.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/models.py
index d4015f2b54b7..183d6f64a8be 100644
--- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/models.py
+++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/models.py
@@ -70,6 +70,7 @@ class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta):
# Blob values
APPEND_POSITION_CONDITION_NOT_MET = "AppendPositionConditionNotMet"
+ BLOB_ACCESS_TIER_NOT_SUPPORTED_FOR_ACCOUNT_TYPE = "BlobAccessTierNotSupportedForAccountType"
BLOB_ALREADY_EXISTS = "BlobAlreadyExists"
BLOB_NOT_FOUND = "BlobNotFound"
BLOB_OVERWRITTEN = "BlobOverwritten"
@@ -154,6 +155,8 @@ class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta):
DELETE_PENDING = "DeletePending"
DIRECTORY_NOT_EMPTY = "DirectoryNotEmpty"
FILE_LOCK_CONFLICT = "FileLockConflict"
+ FILE_SHARE_PROVISIONED_BANDWIDTH_DOWNGRADE_NOT_ALLOWED = "FileShareProvisionedBandwidthDowngradeNotAllowed"
+ FILE_SHARE_PROVISIONED_IOPS_DOWNGRADE_NOT_ALLOWED = "FileShareProvisionedIopsDowngradeNotAllowed"
INVALID_FILE_OR_DIRECTORY_PATH_NAME = "InvalidFileOrDirectoryPathName"
PARENT_NOT_FOUND = "ParentNotFound"
READ_ONLY_ATTRIBUTE = "ReadOnlyAttribute"
diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py
index ccf072da9c66..578f896eb977 100644
--- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py
+++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py
@@ -5,27 +5,34 @@
# --------------------------------------------------------------------------
# pylint: disable=invalid-overridden-method, docstring-keyword-should-match-keyword-only
+import functools
from typing import (
Any, Dict, Optional, Union,
- TYPE_CHECKING)
+ TYPE_CHECKING
+)
try:
from urllib.parse import quote, unquote
except ImportError:
- from urllib2 import quote, unquote # type: ignore
+ from urllib2 import quote, unquote # type: ignore
+
+from azure.core.async_paging import AsyncItemPaged
from azure.core.pipeline import AsyncPipeline
+from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
-from ._data_lake_file_client_async import DataLakeFileClient
from .._data_lake_directory_client import DataLakeDirectoryClient as DataLakeDirectoryClientBase
-from .._models import DirectoryProperties, FileProperties
from .._deserialize import deserialize_dir_properties
-from ._path_client_async import PathClient
+from .._models import DirectoryProperties, FileProperties
from .._shared.base_client_async import AsyncTransportWrapper
+from ._data_lake_file_client_async import DataLakeFileClient
+from ._list_paths_helper import PathPropertiesPaged
+from ._path_client_async import PathClient
if TYPE_CHECKING:
from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential
from azure.core.credentials_async import AsyncTokenCredential
from datetime import datetime
+ from .._models import PathProperties
class DataLakeDirectoryClient(PathClient, DataLakeDirectoryClientBase):
@@ -611,6 +618,52 @@ async def create_file(self, file, # type: Union[FileProperties, str]
await file_client.create_file(**kwargs)
return file_client
+ @distributed_trace
+ def get_paths(
+ self, *,
+ recursive: bool = True,
+ max_results: Optional[int] = None,
+ upn: Optional[bool] = None,
+ timeout: Optional[int] = None,
+ **kwargs: Any
+ ) -> AsyncItemPaged["PathProperties"]:
+ """Returns an async generator to list the paths under specified file system and directory.
+ The generator will lazily follow the continuation tokens returned by the service.
+
+ :keyword bool recursive: Set True for recursive, False for iterative. The default value is True.
+ :keyword Optional[int] max_results: An optional value that specifies the maximum
+ number of items to return per page. If omitted or greater than 5,000, the
+ response will include up to 5,000 items per page.
+ :keyword Optional[bool] upn:
+ If True, the user identity values returned in the x-ms-owner, x-ms-group,
+ and x-ms-acl response headers will be transformed from Azure Active Directory Object IDs to User
+ Principal Names in the owner, group, and acl fields of
+ :class:`~azure.storage.filedatalake.PathProperties`. If False, the values will be returned
+ as Azure Active Directory Object IDs. The default value is None. Note that group and application
+ Object IDs are not translate because they do not have unique friendly names.
+ :keyword Optional[int] timeout:
+ Sets the server-side timeout for the operation in seconds. For more details see
+ https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations.
+ This value is not tracked or validated on the client. To configure client-side network timesouts
+ see `here `_. The default value is None.
+ :returns: An iterable (auto-paging) response of PathProperties.
+ :rtype: ~azure.core.paging.AsyncItemPaged[~azure.storage.filedatalake.PathProperties]
+ """
+ timeout = kwargs.pop('timeout', None)
+ hostname = self._hosts[self._location_mode]
+ url = f"{self.scheme}://{hostname}/{quote(self.file_system_name)}"
+ client = self._build_generated_client(url)
+ command = functools.partial(
+ client.file_system.list_paths,
+ path=self.path_name,
+ timeout=timeout,
+ **kwargs
+ )
+ return AsyncItemPaged(
+ command, recursive, path=self.path_name, max_results=max_results,
+ upn=upn, page_iterator_class=PathPropertiesPaged, **kwargs)
+
def get_file_client(self, file # type: Union[FileProperties, str]
):
# type: (...) -> DataLakeFileClient
diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_file_system_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_file_system_client_async.py
index ab389fd8a8bd..9c3122a17ae5 100644
--- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_file_system_client_async.py
+++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_file_system_client_async.py
@@ -524,7 +524,7 @@ def get_paths(
see `here `_.
:returns: An iterable (auto-paging) response of PathProperties.
- :rtype: ~azure.core.paging.ItemPaged[~azure.storage.filedatalake.PathProperties]
+ :rtype: ~azure.core.paging.AsyncItemPaged[~azure.storage.filedatalake.PathProperties]
.. admonition:: Example:
diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py
index 7323564a7986..774f687dde9e 100644
--- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py
+++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py
@@ -8,7 +8,8 @@
from datetime import datetime
from typing import (
Any, Dict, Optional, Union,
- TYPE_CHECKING)
+ TYPE_CHECKING
+)
from azure.core.exceptions import AzureError, HttpResponseError
from azure.core.tracing.decorator_async import distributed_trace_async
@@ -82,20 +83,22 @@ def __init__(
credential=credential,
_hosts=self._blob_client._hosts,
**kwargs)
-
- self._client = AzureDataLakeStorageRESTAPI(self.url, base_url=self.url, file_system=self.file_system_name,
- path=self.path_name, pipeline=self._pipeline)
- self._datalake_client_for_blob_operation = AzureDataLakeStorageRESTAPI(self._blob_client.url,
- base_url=self._blob_client.url,
- file_system=self.file_system_name,
- path=self.path_name,
- pipeline=self._pipeline)
- api_version = get_api_version(kwargs)
- self._client._config.version = api_version
- self._datalake_client_for_blob_operation._config.version = api_version
-
+ self._api_version = get_api_version(kwargs)
+ self._client = self._build_generated_client(self.url)
+ self._datalake_client_for_blob_operation = self._build_generated_client(self._blob_client.url)
self._loop = kwargs.get('loop', None)
+ def _build_generated_client(self, url: str) -> AzureDataLakeStorageRESTAPI:
+ client = AzureDataLakeStorageRESTAPI(
+ url,
+ base_url=url,
+ file_system=self.file_system_name,
+ path=self.path_name,
+ pipeline=self._pipeline
+ )
+ client._config.version = self._api_version # pylint: disable=protected-access
+ return client
+
async def __aexit__(self, *args):
await self._blob_client.close()
await self._datalake_client_for_blob_operation.close()
diff --git a/sdk/storage/azure-storage-file-datalake/swagger/README.md b/sdk/storage/azure-storage-file-datalake/swagger/README.md
index edb3d2ef0466..1d5a13243f99 100644
--- a/sdk/storage/azure-storage-file-datalake/swagger/README.md
+++ b/sdk/storage/azure-storage-file-datalake/swagger/README.md
@@ -16,7 +16,7 @@ autorest --v3 --python
### Settings
``` yaml
-input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/storage/data-plane/Azure.Storage.Files.DataLake/stable/2023-05-03/DataLakeStorage.json
+input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/storage/data-plane/Azure.Storage.Files.DataLake/stable/2025-01-05/DataLakeStorage.json
output-folder: ../azure/storage/filedatalake/_generated
namespace: azure.storage.filedatalake
no-namespace-folders: true
diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_directory.py b/sdk/storage/azure-storage-file-datalake/tests/test_directory.py
index 3f83cdb20aac..7aa73e84888f 100644
--- a/sdk/storage/azure-storage-file-datalake/tests/test_directory.py
+++ b/sdk/storage/azure-storage-file-datalake/tests/test_directory.py
@@ -1607,6 +1607,30 @@ def test_bad_audience_dir_client(self, **kwargs):
directory_client.exists()
directory_client.create_sub_directory('testsubdir')
+ @DataLakePreparer()
+ @recorded_by_proxy
+ def test_directory_get_paths(self, **kwargs):
+ datalake_storage_account_name = kwargs.pop("datalake_storage_account_name")
+ datalake_storage_account_key = kwargs.pop("datalake_storage_account_key")
+
+ # Arrange
+ self._setUp(datalake_storage_account_name, datalake_storage_account_key)
+ directory_name = self._get_directory_reference()
+ directory_client1 = self.dsc.get_directory_client(self.file_system_name, directory_name + '1')
+ directory_client1.get_file_client('file0').create_file()
+ directory_client1.get_file_client('file1').create_file()
+ directory_client2 = self.dsc.get_directory_client(self.file_system_name, directory_name + '2')
+ directory_client2.get_file_client('file2').create_file()
+
+ # Act
+ path_response = list(directory_client1.get_paths())
+
+ # Assert
+ assert len(path_response) == 2
+ assert path_response[0]['name'] == directory_name + '1' + '/' + 'file0'
+ assert path_response[1]['name'] == directory_name + '1' + '/' + 'file1'
+
+
# ------------------------------------------------------------------------------
if __name__ == '__main__':
unittest.main()
diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_directory_async.py b/sdk/storage/azure-storage-file-datalake/tests/test_directory_async.py
index e4580cc0b3c2..d1ef61bb6493 100644
--- a/sdk/storage/azure-storage-file-datalake/tests/test_directory_async.py
+++ b/sdk/storage/azure-storage-file-datalake/tests/test_directory_async.py
@@ -1557,6 +1557,32 @@ async def test_bad_audience_dir_client(self, **kwargs):
await directory_client.exists()
await directory_client.create_sub_directory('testsubdir')
+ @DataLakePreparer()
+ @recorded_by_proxy_async
+ async def test_directory_get_paths(self, **kwargs):
+ datalake_storage_account_name = kwargs.pop("datalake_storage_account_name")
+ datalake_storage_account_key = kwargs.pop("datalake_storage_account_key")
+
+ # Arrange
+ await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
+ directory_name = self._get_directory_reference()
+ directory_client1 = self.dsc.get_directory_client(self.file_system_name, directory_name + '1')
+ await directory_client1.get_file_client('file0').create_file()
+ await directory_client1.get_file_client('file1').create_file()
+ directory_client2 = self.dsc.get_directory_client(self.file_system_name, directory_name + '2')
+ directory_client2.get_file_client('file2').create_file()
+
+ # Act
+ path_response = []
+ async for path in directory_client1.get_paths():
+ path_response.append(path)
+
+ # Assert
+ assert len(path_response) == 2
+ assert path_response[0]['name'] == directory_name + '1' + '/' + 'file0'
+ assert path_response[1]['name'] == directory_name + '1' + '/' + 'file1'
+
+
# ------------------------------------------------------------------------------
if __name__ == '__main__':
unittest.main()
diff --git a/sdk/storage/azure-storage-file-share/CHANGELOG.md b/sdk/storage/azure-storage-file-share/CHANGELOG.md
index 0f25c390129c..16c21517a7ce 100644
--- a/sdk/storage/azure-storage-file-share/CHANGELOG.md
+++ b/sdk/storage/azure-storage-file-share/CHANGELOG.md
@@ -1,6 +1,6 @@
# Release History
-## 12.20.0b1 (2024-10-09)
+## 12.20.0b1 (2024-10-10)
### Features Added
- Added support for service version 2025-01-05.
diff --git a/sdk/storage/azure-storage-file-share/assets.json b/sdk/storage/azure-storage-file-share/assets.json
index f3eba605543d..48da40397b8a 100644
--- a/sdk/storage/azure-storage-file-share/assets.json
+++ b/sdk/storage/azure-storage-file-share/assets.json
@@ -2,5 +2,5 @@
"AssetsRepo": "Azure/azure-sdk-assets",
"AssetsRepoPrefixPath": "python",
"TagPrefix": "python/storage/azure-storage-file-share",
- "Tag": "python/storage/azure-storage-file-share_b797b17048"
+ "Tag": "python/storage/azure-storage-file-share_faf91d3111"
}
diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.py
index 8cb9aa465403..fd1127fa1381 100644
--- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.py
+++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.py
@@ -644,6 +644,9 @@ def start_copy_from_url(self, source_url: str, **kwargs: Any) -> Dict[str, Any]:
This parameter was introduced in API version '2019-07-07'.
+ :keyword file_permission_format:
+ Specifies the format in which the permission is returned. If not specified, SDDL will be the default.
+ :paramtype file_permission_format: Literal['sddl', 'binary']
:keyword file_attributes:
This value can be set to "source" to copy file attributes from the source file to the target file,
or to clear all attributes, it can be set to "None". Otherwise it can be set to a list of attributes
@@ -787,10 +790,11 @@ def abort_copy(self, copy_id: Union[str, FileProperties], **kwargs: Any) -> None
@distributed_trace
def download_file(
- self, offset: Optional[int] = None,
- length: Optional[int] = None,
- **kwargs: Any
- ) -> StorageStreamDownloader:
+ self, offset=None, # type: Optional[int]
+ length=None, # type: Optional[int]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> StorageStreamDownloader
"""Downloads a file to the StorageStreamDownloader. The readall() method must
be used to read all the content or readinto() must be used to download the file into
a stream. Using chunks() returns an iterator which allows the user to iterate over the content in chunks.
diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_azure_file_storage.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_azure_file_storage.py
index 66a4dbd98ceb..78d7d26dbf5a 100644
--- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_azure_file_storage.py
+++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_azure_file_storage.py
@@ -45,7 +45,7 @@ class AzureFileStorage: # pylint: disable=client-accepts-api-version-keyword
URI. Default value is None.
:type allow_source_trailing_dot: bool
:keyword version: Specifies the version of the operation to use for this request. Default value
- is "2024-11-04". Note that overriding this default value may result in unsupported behavior.
+ is "2025-01-05". Note that overriding this default value may result in unsupported behavior.
:paramtype version: str
:keyword file_range_write_from_url: Only update is supported: - Update: Writes the bytes
downloaded from the source url into the specified range. Default value is "update". Note that
diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_configuration.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_configuration.py
index 7ebbda623a2e..68e02be5fe4f 100644
--- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_configuration.py
+++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_configuration.py
@@ -33,7 +33,7 @@ class AzureFileStorageConfiguration: # pylint: disable=too-many-instance-attrib
URI. Default value is None.
:type allow_source_trailing_dot: bool
:keyword version: Specifies the version of the operation to use for this request. Default value
- is "2024-11-04". Note that overriding this default value may result in unsupported behavior.
+ is "2025-01-05". Note that overriding this default value may result in unsupported behavior.
:paramtype version: str
:keyword file_range_write_from_url: Only update is supported: - Update: Writes the bytes
downloaded from the source url into the specified range. Default value is "update". Note that
@@ -49,7 +49,7 @@ def __init__(
allow_source_trailing_dot: Optional[bool] = None,
**kwargs: Any
) -> None:
- version: Literal["2024-11-04"] = kwargs.pop("version", "2024-11-04")
+ version: Literal["2025-01-05"] = kwargs.pop("version", "2025-01-05")
file_range_write_from_url: Literal["update"] = kwargs.pop("file_range_write_from_url", "update")
if url is None:
diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_serialization.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_serialization.py
index 8139854b97bb..01a226bd7f14 100644
--- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_serialization.py
+++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_serialization.py
@@ -1,3 +1,4 @@
+# pylint: disable=too-many-lines
# --------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -24,7 +25,6 @@
#
# --------------------------------------------------------------------------
-# pylint: skip-file
# pyright: reportUnnecessaryTypeIgnoreComment=false
from base64 import b64decode, b64encode
@@ -52,7 +52,6 @@
MutableMapping,
Type,
List,
- Mapping,
)
try:
@@ -91,6 +90,8 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
:param data: Input, could be bytes or stream (will be decoded with UTF8) or text
:type data: str or bytes or IO
:param str content_type: The content type.
+ :return: The deserialized data.
+ :rtype: object
"""
if hasattr(data, "read"):
# Assume a stream
@@ -112,7 +113,7 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
try:
return json.loads(data_as_str)
except ValueError as err:
- raise DeserializationError("JSON is invalid: {}".format(err), err)
+ raise DeserializationError("JSON is invalid: {}".format(err), err) from err
elif "xml" in (content_type or []):
try:
@@ -155,6 +156,11 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]],
Use bytes and headers to NOT use any requests/aiohttp or whatever
specific implementation.
Headers will tested for "content-type"
+
+ :param bytes body_bytes: The body of the response.
+ :param dict headers: The headers of the response.
+ :returns: The deserialized data.
+ :rtype: object
"""
# Try to use content-type from headers if available
content_type = None
@@ -184,15 +190,30 @@ class UTC(datetime.tzinfo):
"""Time Zone info for handling UTC"""
def utcoffset(self, dt):
- """UTF offset for UTC is 0."""
+ """UTF offset for UTC is 0.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The offset
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(0)
def tzname(self, dt):
- """Timestamp representation."""
+ """Timestamp representation.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The timestamp representation
+ :rtype: str
+ """
return "Z"
def dst(self, dt):
- """No daylight saving for UTC."""
+ """No daylight saving for UTC.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The daylight saving time
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(hours=1)
@@ -235,24 +256,26 @@ def __getinitargs__(self):
_FLATTEN = re.compile(r"(? None:
self.additional_properties: Optional[Dict[str, Any]] = {}
- for k in kwargs:
+ for k in kwargs: # pylint: disable=consider-using-dict-items
if k not in self._attribute_map:
_LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__)
elif k in self._validation and self._validation[k].get("readonly", False):
@@ -300,13 +330,23 @@ def __init__(self, **kwargs: Any) -> None:
setattr(self, k, kwargs[k])
def __eq__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes."""
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are equal
+ :rtype: bool
+ """
if isinstance(other, self.__class__):
return self.__dict__ == other.__dict__
return False
def __ne__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes."""
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are not equal
+ :rtype: bool
+ """
return not self.__eq__(other)
def __str__(self) -> str:
@@ -326,7 +366,11 @@ def is_xml_model(cls) -> bool:
@classmethod
def _create_xml_node(cls):
- """Create XML node."""
+ """Create XML node.
+
+ :returns: The XML node
+ :rtype: xml.etree.ElementTree.Element
+ """
try:
xml_map = cls._xml_map # type: ignore
except AttributeError:
@@ -346,7 +390,9 @@ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON:
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) # type: ignore
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, keep_readonly=keep_readonly, **kwargs
+ )
def as_dict(
self,
@@ -380,12 +426,15 @@ def my_key_transformer(key, attr_desc, value):
If you want XML serialization, you can pass the kwargs is_xml=True.
+ :param bool keep_readonly: If you want to serialize the readonly attributes
:param function key_transformer: A key transformer function.
:returns: A dict JSON compatible object
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs) # type: ignore
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs
+ )
@classmethod
def _infer_class_models(cls):
@@ -395,7 +444,7 @@ def _infer_class_models(cls):
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
if cls.__name__ not in client_models:
raise ValueError("Not Autorest generated code")
- except Exception:
+ except Exception: # pylint: disable=broad-exception-caught
# Assume it's not Autorest generated (tests?). Add ourselves as dependencies.
client_models = {cls.__name__: cls}
return client_models
@@ -408,6 +457,7 @@ def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = N
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
@@ -426,9 +476,11 @@ def from_dict(
and last_rest_key_case_insensitive_extractor)
:param dict data: A dict using RestAPI structure
+ :param function key_extractors: A key extractor function.
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
deserializer.key_extractors = ( # type: ignore
@@ -448,7 +500,7 @@ def _flatten_subtype(cls, key, objects):
return {}
result = dict(cls._subtype_map[key])
for valuetype in cls._subtype_map[key].values():
- result.update(objects[valuetype]._flatten_subtype(key, objects))
+ result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access
return result
@classmethod
@@ -456,6 +508,11 @@ def _classify(cls, response, objects):
"""Check the class _subtype_map for any child classes.
We want to ignore any inherited _subtype_maps.
Remove the polymorphic key from the initial data.
+
+ :param dict response: The initial data
+ :param dict objects: The class objects
+ :returns: The class to be used
+ :rtype: class
"""
for subtype_key in cls.__dict__.get("_subtype_map", {}).keys():
subtype_value = None
@@ -501,11 +558,13 @@ def _decode_attribute_map_key(key):
inside the received data.
:param str key: A key string from the generated code
+ :returns: The decoded key
+ :rtype: str
"""
return key.replace("\\.", ".")
-class Serializer(object):
+class Serializer(object): # pylint: disable=too-many-public-methods
"""Request object model serializer."""
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
@@ -560,13 +619,16 @@ def __init__(self, classes: Optional[Mapping[str, type]] = None):
self.key_transformer = full_restapi_key_transformer
self.client_side_validation = True
- def _serialize(self, target_obj, data_type=None, **kwargs):
+ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
+ self, target_obj, data_type=None, **kwargs
+ ):
"""Serialize data into a string according to type.
- :param target_obj: The data to be serialized.
+ :param object target_obj: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str, dict
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
"""
key_transformer = kwargs.get("key_transformer", self.key_transformer)
keep_readonly = kwargs.get("keep_readonly", False)
@@ -592,12 +654,14 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
serialized = {}
if is_xml_model_serialization:
- serialized = target_obj._create_xml_node()
+ serialized = target_obj._create_xml_node() # pylint: disable=protected-access
try:
- attributes = target_obj._attribute_map
+ attributes = target_obj._attribute_map # pylint: disable=protected-access
for attr, attr_desc in attributes.items():
attr_name = attr
- if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False):
+ if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access
+ attr_name, {}
+ ).get("readonly", False):
continue
if attr_name == "additional_properties" and attr_desc["key"] == "":
@@ -633,7 +697,8 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
if isinstance(new_attr, list):
serialized.extend(new_attr) # type: ignore
elif isinstance(new_attr, ET.Element):
- # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces.
+ # If the down XML has no XML/Name,
+ # we MUST replace the tag with the local tag. But keeping the namespaces.
if "name" not in getattr(orig_attr, "_xml_map", {}):
splitted_tag = new_attr.tag.split("}")
if len(splitted_tag) == 2: # Namespace
@@ -664,17 +729,17 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
except (AttributeError, KeyError, TypeError) as err:
msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj))
raise SerializationError(msg) from err
- else:
- return serialized
+ return serialized
def body(self, data, data_type, **kwargs):
"""Serialize data intended for a request body.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: dict
:raises: SerializationError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized request body
"""
# Just in case this is a dict
@@ -703,7 +768,7 @@ def body(self, data, data_type, **kwargs):
attribute_key_case_insensitive_extractor,
last_rest_key_case_insensitive_extractor,
]
- data = deserializer._deserialize(data_type, data)
+ data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access
except DeserializationError as err:
raise SerializationError("Unable to build a model: " + str(err)) from err
@@ -712,9 +777,11 @@ def body(self, data, data_type, **kwargs):
def url(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL path.
- :param data: The data to be serialized.
+ :param str name: The name of the URL path parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
+ :returns: The serialized URL path
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
"""
@@ -728,21 +795,20 @@ def url(self, name, data, data_type, **kwargs):
output = output.replace("{", quote("{")).replace("}", quote("}"))
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return output
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return output
def query(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL query.
- :param data: The data to be serialized.
+ :param str name: The name of the query parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :keyword bool skip_quote: Whether to skip quote the serialized result.
- Defaults to False.
:rtype: str, list
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized query parameter
"""
try:
# Treat the list aside, since we don't want to encode the div separator
@@ -759,19 +825,20 @@ def query(self, name, data, data_type, **kwargs):
output = str(output)
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def header(self, name, data, data_type, **kwargs):
"""Serialize data intended for a request header.
- :param data: The data to be serialized.
+ :param str name: The name of the header.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized header
"""
try:
if data_type in ["[str]"]:
@@ -780,21 +847,20 @@ def header(self, name, data, data_type, **kwargs):
output = self.serialize_data(data, data_type, **kwargs)
if data_type == "bool":
output = json.dumps(output)
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def serialize_data(self, data, data_type, **kwargs):
"""Serialize generic data according to supplied data type.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :param bool required: Whether it's essential that the data not be
- empty or None
:raises: AttributeError if required data is None.
:raises: ValueError if data is None
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
+ :rtype: str, int, float, bool, dict, list
"""
if data is None:
raise ValueError("No value for given attribute")
@@ -805,7 +871,7 @@ def serialize_data(self, data, data_type, **kwargs):
if data_type in self.basic_types.values():
return self.serialize_basic(data, data_type, **kwargs)
- elif data_type in self.serialize_type:
+ if data_type in self.serialize_type:
return self.serialize_type[data_type](data, **kwargs)
# If dependencies is empty, try with current data class
@@ -821,11 +887,10 @@ def serialize_data(self, data, data_type, **kwargs):
except (ValueError, TypeError) as err:
msg = "Unable to serialize value: {!r} as type: {!r}."
raise SerializationError(msg.format(data, data_type)) from err
- else:
- return self._serialize(data, **kwargs)
+ return self._serialize(data, **kwargs)
@classmethod
- def _get_custom_serializers(cls, data_type, **kwargs):
+ def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements
custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type)
if custom_serializer:
return custom_serializer
@@ -841,23 +906,26 @@ def serialize_basic(cls, data, data_type, **kwargs):
- basic_types_serializers dict[str, callable] : If set, use the callable as serializer
- is_xml bool : If set, use xml_basic_types_serializers
- :param data: Object to be serialized.
+ :param obj data: Object to be serialized.
:param str data_type: Type of object in the iterable.
+ :rtype: str, int, float, bool
+ :return: serialized object
"""
custom_serializer = cls._get_custom_serializers(data_type, **kwargs)
if custom_serializer:
return custom_serializer(data)
if data_type == "str":
return cls.serialize_unicode(data)
- return eval(data_type)(data) # nosec
+ return eval(data_type)(data) # nosec # pylint: disable=eval-used
@classmethod
def serialize_unicode(cls, data):
"""Special handling for serializing unicode strings in Py2.
Encode to UTF-8 if unicode, otherwise handle as a str.
- :param data: Object to be serialized.
+ :param str data: Object to be serialized.
:rtype: str
+ :return: serialized object
"""
try: # If I received an enum, return its value
return data.value
@@ -871,8 +939,7 @@ def serialize_unicode(cls, data):
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
def serialize_iter(self, data, iter_type, div=None, **kwargs):
"""Serialize iterable.
@@ -882,15 +949,13 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs):
serialization_ctxt['type'] should be same as data_type.
- is_xml bool : If set, serialize as XML
- :param list attr: Object to be serialized.
+ :param list data: Object to be serialized.
:param str iter_type: Type of object in the iterable.
- :param bool required: Whether the objects in the iterable must
- not be None or empty.
:param str div: If set, this str will be used to combine the elements
in the iterable into a combined string. Default is 'None'.
- :keyword bool do_quote: Whether to quote the serialized result of each iterable element.
Defaults to False.
:rtype: list, str
+ :return: serialized iterable
"""
if isinstance(data, str):
raise SerializationError("Refuse str type as a valid iter type.")
@@ -945,9 +1010,8 @@ def serialize_dict(self, attr, dict_type, **kwargs):
:param dict attr: Object to be serialized.
:param str dict_type: Type of object in the dictionary.
- :param bool required: Whether the objects in the dictionary must
- not be None or empty.
:rtype: dict
+ :return: serialized dictionary
"""
serialization_ctxt = kwargs.get("serialization_ctxt", {})
serialized = {}
@@ -971,7 +1035,7 @@ def serialize_dict(self, attr, dict_type, **kwargs):
return serialized
- def serialize_object(self, attr, **kwargs):
+ def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Serialize a generic object.
This will be handled as a dictionary. If object passed in is not
a basic type (str, int, float, dict, list) it will simply be
@@ -979,6 +1043,7 @@ def serialize_object(self, attr, **kwargs):
:param dict attr: Object to be serialized.
:rtype: dict or str
+ :return: serialized object
"""
if attr is None:
return None
@@ -1003,7 +1068,7 @@ def serialize_object(self, attr, **kwargs):
return self.serialize_decimal(attr)
# If it's a model or I know this dependency, serialize as a Model
- elif obj_type in self.dependencies.values() or isinstance(attr, Model):
+ if obj_type in self.dependencies.values() or isinstance(attr, Model):
return self._serialize(attr)
if obj_type == dict:
@@ -1034,56 +1099,61 @@ def serialize_enum(attr, enum_obj=None):
try:
enum_obj(result) # type: ignore
return result
- except ValueError:
+ except ValueError as exc:
for enum_value in enum_obj: # type: ignore
if enum_value.value.lower() == str(attr).lower():
return enum_value.value
error = "{!r} is not valid value for enum {!r}"
- raise SerializationError(error.format(attr, enum_obj))
+ raise SerializationError(error.format(attr, enum_obj)) from exc
@staticmethod
- def serialize_bytearray(attr, **kwargs):
+ def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize bytearray into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
return b64encode(attr).decode()
@staticmethod
- def serialize_base64(attr, **kwargs):
+ def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize str into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
encoded = b64encode(attr).decode("ascii")
return encoded.strip("=").replace("+", "-").replace("/", "_")
@staticmethod
- def serialize_decimal(attr, **kwargs):
+ def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Decimal object to float.
- :param attr: Object to be serialized.
+ :param decimal attr: Object to be serialized.
:rtype: float
+ :return: serialized decimal
"""
return float(attr)
@staticmethod
- def serialize_long(attr, **kwargs):
+ def serialize_long(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize long (Py2) or int (Py3).
- :param attr: Object to be serialized.
+ :param int attr: Object to be serialized.
:rtype: int/long
+ :return: serialized long
"""
return _long_type(attr)
@staticmethod
- def serialize_date(attr, **kwargs):
+ def serialize_date(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Date object into ISO-8601 formatted string.
:param Date attr: Object to be serialized.
:rtype: str
+ :return: serialized date
"""
if isinstance(attr, str):
attr = isodate.parse_date(attr)
@@ -1091,11 +1161,12 @@ def serialize_date(attr, **kwargs):
return t
@staticmethod
- def serialize_time(attr, **kwargs):
+ def serialize_time(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Time object into ISO-8601 formatted string.
:param datetime.time attr: Object to be serialized.
:rtype: str
+ :return: serialized time
"""
if isinstance(attr, str):
attr = isodate.parse_time(attr)
@@ -1105,30 +1176,32 @@ def serialize_time(attr, **kwargs):
return t
@staticmethod
- def serialize_duration(attr, **kwargs):
+ def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize TimeDelta object into ISO-8601 formatted string.
:param TimeDelta attr: Object to be serialized.
:rtype: str
+ :return: serialized duration
"""
if isinstance(attr, str):
attr = isodate.parse_duration(attr)
return isodate.duration_isoformat(attr)
@staticmethod
- def serialize_rfc(attr, **kwargs):
+ def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into RFC-1123 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: TypeError if format invalid.
+ :return: serialized rfc
"""
try:
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
utc = attr.utctimetuple()
- except AttributeError:
- raise TypeError("RFC1123 object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("RFC1123 object must be valid Datetime object.") from exc
return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format(
Serializer.days[utc.tm_wday],
@@ -1141,12 +1214,13 @@ def serialize_rfc(attr, **kwargs):
)
@staticmethod
- def serialize_iso(attr, **kwargs):
+ def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into ISO-8601 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: SerializationError if format invalid.
+ :return: serialized iso
"""
if isinstance(attr, str):
attr = isodate.parse_datetime(attr)
@@ -1172,13 +1246,14 @@ def serialize_iso(attr, **kwargs):
raise TypeError(msg) from err
@staticmethod
- def serialize_unix(attr, **kwargs):
+ def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into IntTime format.
This is represented as seconds.
:param Datetime attr: Object to be serialized.
:rtype: int
:raises: SerializationError if format invalid
+ :return: serialied unix
"""
if isinstance(attr, int):
return attr
@@ -1186,11 +1261,11 @@ def serialize_unix(attr, **kwargs):
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
return int(calendar.timegm(attr.utctimetuple()))
- except AttributeError:
- raise TypeError("Unix time object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("Unix time object must be valid Datetime object.") from exc
-def rest_key_extractor(attr, attr_desc, data):
+def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
key = attr_desc["key"]
working_data = data
@@ -1211,7 +1286,9 @@ def rest_key_extractor(attr, attr_desc, data):
return working_data.get(key)
-def rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements
+ attr, attr_desc, data
+):
key = attr_desc["key"]
working_data = data
@@ -1232,17 +1309,29 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data):
return attribute_key_case_insensitive_extractor(key, None, working_data)
-def last_rest_key_extractor(attr, attr_desc, data):
- """Extract the attribute in "data" based on the last part of the JSON path key."""
+def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
+ """Extract the attribute in "data" based on the last part of the JSON path key.
+
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
+ """
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
return attribute_key_extractor(dict_keys[-1], None, data)
-def last_rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
"""Extract the attribute in "data" based on the last part of the JSON path key.
This is the case insensitive version of "last_rest_key_extractor"
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
"""
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
@@ -1279,7 +1368,7 @@ def _extract_name_from_internal_type(internal_type):
return xml_name
-def xml_key_extractor(attr, attr_desc, data):
+def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements
if isinstance(data, dict):
return None
@@ -1331,22 +1420,21 @@ def xml_key_extractor(attr, attr_desc, data):
if is_iter_type:
if is_wrapped:
return None # is_wrapped no node, we want None
- else:
- return [] # not wrapped, assume empty list
+ return [] # not wrapped, assume empty list
return None # Assume it's not there, maybe an optional node.
# If is_iter_type and not wrapped, return all found children
if is_iter_type:
if not is_wrapped:
return children
- else: # Iter and wrapped, should have found one node only (the wrap one)
- if len(children) != 1:
- raise DeserializationError(
- "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format(
- xml_name
- )
+ # Iter and wrapped, should have found one node only (the wrap one)
+ if len(children) != 1:
+ raise DeserializationError(
+ "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long
+ xml_name
)
- return list(children[0]) # Might be empty list and that's ok.
+ )
+ return list(children[0]) # Might be empty list and that's ok.
# Here it's not a itertype, we should have found one element only or empty
if len(children) > 1:
@@ -1363,7 +1451,7 @@ class Deserializer(object):
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
- valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
+ valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
def __init__(self, classes: Optional[Mapping[str, type]] = None):
self.deserialize_type = {
@@ -1403,11 +1491,12 @@ def __call__(self, target_obj, response_data, content_type=None):
:param str content_type: Swagger "produces" if available.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
data = self._unpack_content(response_data, content_type)
return self._deserialize(target_obj, data)
- def _deserialize(self, target_obj, data):
+ def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements
"""Call the deserializer on a model.
Data needs to be already deserialized as JSON or XML ElementTree
@@ -1416,12 +1505,13 @@ def _deserialize(self, target_obj, data):
:param object data: Object to deserialize.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
# This is already a model, go recursive just in case
if hasattr(data, "_attribute_map"):
constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")]
try:
- for attr, mapconfig in data._attribute_map.items():
+ for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access
if attr in constants:
continue
value = getattr(data, attr)
@@ -1440,13 +1530,13 @@ def _deserialize(self, target_obj, data):
if isinstance(response, str):
return self.deserialize_data(data, response)
- elif isinstance(response, type) and issubclass(response, Enum):
+ if isinstance(response, type) and issubclass(response, Enum):
return self.deserialize_enum(data, response)
if data is None or data is CoreNull:
return data
try:
- attributes = response._attribute_map # type: ignore
+ attributes = response._attribute_map # type: ignore # pylint: disable=protected-access
d_attrs = {}
for attr, attr_desc in attributes.items():
# Check empty string. If it's not empty, someone has a real "additionalProperties"...
@@ -1476,9 +1566,8 @@ def _deserialize(self, target_obj, data):
except (AttributeError, TypeError, KeyError) as err:
msg = "Unable to deserialize to object: " + class_name # type: ignore
raise DeserializationError(msg) from err
- else:
- additional_properties = self._build_additional_properties(attributes, data)
- return self._instantiate_model(response, d_attrs, additional_properties)
+ additional_properties = self._build_additional_properties(attributes, data)
+ return self._instantiate_model(response, d_attrs, additional_properties)
def _build_additional_properties(self, attribute_map, data):
if not self.additional_properties_detection:
@@ -1505,6 +1594,8 @@ def _classify_target(self, target, data):
:param str target: The target object type to deserialize to.
:param str/dict data: The response data to deserialize.
+ :return: The classified target object and its class name.
+ :rtype: tuple
"""
if target is None:
return None, None
@@ -1516,7 +1607,7 @@ def _classify_target(self, target, data):
return target, target
try:
- target = target._classify(data, self.dependencies) # type: ignore
+ target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access
except AttributeError:
pass # Target is not a Model, no classify
return target, target.__class__.__name__ # type: ignore
@@ -1531,10 +1622,12 @@ def failsafe_deserialize(self, target_obj, data, content_type=None):
:param str target_obj: The target object type to deserialize to.
:param str/dict data: The response data to deserialize.
:param str content_type: Swagger "produces" if available.
+ :return: Deserialized object.
+ :rtype: object
"""
try:
return self(target_obj, data, content_type=content_type)
- except:
+ except: # pylint: disable=bare-except
_LOGGER.debug(
"Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
)
@@ -1552,10 +1645,12 @@ def _unpack_content(raw_data, content_type=None):
If raw_data is something else, bypass all logic and return it directly.
- :param raw_data: Data to be processed.
- :param content_type: How to parse if raw_data is a string/bytes.
+ :param obj raw_data: Data to be processed.
+ :param str content_type: How to parse if raw_data is a string/bytes.
:raises JSONDecodeError: If JSON is requested and parsing is impossible.
:raises UnicodeDecodeError: If bytes is not UTF8
+ :rtype: object
+ :return: Unpacked content.
"""
# Assume this is enough to detect a Pipeline Response without importing it
context = getattr(raw_data, "context", {})
@@ -1579,14 +1674,21 @@ def _unpack_content(raw_data, content_type=None):
def _instantiate_model(self, response, attrs, additional_properties=None):
"""Instantiate a response model passing in deserialized args.
- :param response: The response model class.
- :param d_attrs: The deserialized response attributes.
+ :param Response response: The response model class.
+ :param dict attrs: The deserialized response attributes.
+ :param dict additional_properties: Additional properties to be set.
+ :rtype: Response
+ :return: The instantiated response model.
"""
if callable(response):
subtype = getattr(response, "_subtype_map", {})
try:
- readonly = [k for k, v in response._validation.items() if v.get("readonly")]
- const = [k for k, v in response._validation.items() if v.get("constant")]
+ readonly = [
+ k for k, v in response._validation.items() if v.get("readonly") # pylint: disable=protected-access
+ ]
+ const = [
+ k for k, v in response._validation.items() if v.get("constant") # pylint: disable=protected-access
+ ]
kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const}
response_obj = response(**kwargs)
for attr in readonly:
@@ -1596,7 +1698,7 @@ def _instantiate_model(self, response, attrs, additional_properties=None):
return response_obj
except TypeError as err:
msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore
- raise DeserializationError(msg + str(err))
+ raise DeserializationError(msg + str(err)) from err
else:
try:
for attr, value in attrs.items():
@@ -1605,15 +1707,16 @@ def _instantiate_model(self, response, attrs, additional_properties=None):
except Exception as exp:
msg = "Unable to populate response model. "
msg += "Type: {}, Error: {}".format(type(response), exp)
- raise DeserializationError(msg)
+ raise DeserializationError(msg) from exp
- def deserialize_data(self, data, data_type):
+ def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements
"""Process data for deserialization according to data type.
:param str data: The response string to be deserialized.
:param str data_type: The type to deserialize to.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
if data is None:
return data
@@ -1627,7 +1730,11 @@ def deserialize_data(self, data, data_type):
if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())):
return data
- is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"]
+ is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment
+ "object",
+ "[]",
+ r"{}",
+ ]
if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text:
return None
data_val = self.deserialize_type[data_type](data)
@@ -1647,14 +1754,14 @@ def deserialize_data(self, data, data_type):
msg = "Unable to deserialize response data."
msg += " Data: {}, {}".format(data, data_type)
raise DeserializationError(msg) from err
- else:
- return self._deserialize(obj_type, data)
+ return self._deserialize(obj_type, data)
def deserialize_iter(self, attr, iter_type):
"""Deserialize an iterable.
:param list attr: Iterable to be deserialized.
:param str iter_type: The type of object in the iterable.
+ :return: Deserialized iterable.
:rtype: list
"""
if attr is None:
@@ -1671,6 +1778,7 @@ def deserialize_dict(self, attr, dict_type):
:param dict/list attr: Dictionary to be deserialized. Also accepts
a list of key, value pairs.
:param str dict_type: The object type of the items in the dictionary.
+ :return: Deserialized dictionary.
:rtype: dict
"""
if isinstance(attr, list):
@@ -1681,11 +1789,12 @@ def deserialize_dict(self, attr, dict_type):
attr = {el.tag: el.text for el in attr}
return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()}
- def deserialize_object(self, attr, **kwargs):
+ def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Deserialize a generic object.
This will be handled as a dictionary.
:param dict attr: Dictionary to be deserialized.
+ :return: Deserialized object.
:rtype: dict
:raises: TypeError if non-builtin datatype encountered.
"""
@@ -1720,11 +1829,10 @@ def deserialize_object(self, attr, **kwargs):
pass
return deserialized
- else:
- error = "Cannot deserialize generic object with type: "
- raise TypeError(error + str(obj_type))
+ error = "Cannot deserialize generic object with type: "
+ raise TypeError(error + str(obj_type))
- def deserialize_basic(self, attr, data_type):
+ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements
"""Deserialize basic builtin data type from string.
Will attempt to convert to str, int, float and bool.
This function will also accept '1', '0', 'true' and 'false' as
@@ -1732,6 +1840,7 @@ def deserialize_basic(self, attr, data_type):
:param str attr: response string to be deserialized.
:param str data_type: deserialization data type.
+ :return: Deserialized basic type.
:rtype: str, int, float or bool
:raises: TypeError if string format is not valid.
"""
@@ -1743,24 +1852,23 @@ def deserialize_basic(self, attr, data_type):
if data_type == "str":
# None or '', node is empty string.
return ""
- else:
- # None or '', node with a strong type is None.
- # Don't try to model "empty bool" or "empty int"
- return None
+ # None or '', node with a strong type is None.
+ # Don't try to model "empty bool" or "empty int"
+ return None
if data_type == "bool":
if attr in [True, False, 1, 0]:
return bool(attr)
- elif isinstance(attr, str):
+ if isinstance(attr, str):
if attr.lower() in ["true", "1"]:
return True
- elif attr.lower() in ["false", "0"]:
+ if attr.lower() in ["false", "0"]:
return False
raise TypeError("Invalid boolean value: {}".format(attr))
if data_type == "str":
return self.deserialize_unicode(attr)
- return eval(data_type)(attr) # nosec
+ return eval(data_type)(attr) # nosec # pylint: disable=eval-used
@staticmethod
def deserialize_unicode(data):
@@ -1768,6 +1876,7 @@ def deserialize_unicode(data):
as a string.
:param str data: response string to be deserialized.
+ :return: Deserialized string.
:rtype: str or unicode
"""
# We might be here because we have an enum modeled as string,
@@ -1781,8 +1890,7 @@ def deserialize_unicode(data):
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
@staticmethod
def deserialize_enum(data, enum_obj):
@@ -1794,6 +1902,7 @@ def deserialize_enum(data, enum_obj):
:param str data: Response string to be deserialized. If this value is
None or invalid it will be returned as-is.
:param Enum enum_obj: Enum object to deserialize to.
+ :return: Deserialized enum object.
:rtype: Enum
"""
if isinstance(data, enum_obj) or data is None:
@@ -1804,9 +1913,9 @@ def deserialize_enum(data, enum_obj):
# Workaround. We might consider remove it in the future.
try:
return list(enum_obj.__members__.values())[data]
- except IndexError:
+ except IndexError as exc:
error = "{!r} is not a valid index for enum {!r}"
- raise DeserializationError(error.format(data, enum_obj))
+ raise DeserializationError(error.format(data, enum_obj)) from exc
try:
return enum_obj(str(data))
except ValueError:
@@ -1822,6 +1931,7 @@ def deserialize_bytearray(attr):
"""Deserialize string into bytearray.
:param str attr: response string to be deserialized.
+ :return: Deserialized bytearray
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
@@ -1834,6 +1944,7 @@ def deserialize_base64(attr):
"""Deserialize base64 encoded string into string.
:param str attr: response string to be deserialized.
+ :return: Deserialized base64 string
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
@@ -1849,8 +1960,9 @@ def deserialize_decimal(attr):
"""Deserialize string into Decimal object.
:param str attr: response string to be deserialized.
- :rtype: Decimal
+ :return: Deserialized decimal
:raises: DeserializationError if string format invalid.
+ :rtype: decimal
"""
if isinstance(attr, ET.Element):
attr = attr.text
@@ -1865,6 +1977,7 @@ def deserialize_long(attr):
"""Deserialize string into long (Py2) or int (Py3).
:param str attr: response string to be deserialized.
+ :return: Deserialized int
:rtype: long or int
:raises: ValueError if string format invalid.
"""
@@ -1877,6 +1990,7 @@ def deserialize_duration(attr):
"""Deserialize ISO-8601 formatted string into TimeDelta object.
:param str attr: response string to be deserialized.
+ :return: Deserialized duration
:rtype: TimeDelta
:raises: DeserializationError if string format invalid.
"""
@@ -1887,14 +2001,14 @@ def deserialize_duration(attr):
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize duration object."
raise DeserializationError(msg) from err
- else:
- return duration
+ return duration
@staticmethod
def deserialize_date(attr):
"""Deserialize ISO-8601 formatted string into Date object.
:param str attr: response string to be deserialized.
+ :return: Deserialized date
:rtype: Date
:raises: DeserializationError if string format invalid.
"""
@@ -1910,6 +2024,7 @@ def deserialize_time(attr):
"""Deserialize ISO-8601 formatted string into time object.
:param str attr: response string to be deserialized.
+ :return: Deserialized time
:rtype: datetime.time
:raises: DeserializationError if string format invalid.
"""
@@ -1924,6 +2039,7 @@ def deserialize_rfc(attr):
"""Deserialize RFC-1123 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized RFC datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
@@ -1939,14 +2055,14 @@ def deserialize_rfc(attr):
except ValueError as err:
msg = "Cannot deserialize to rfc datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
@staticmethod
def deserialize_iso(attr):
"""Deserialize ISO-8601 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized ISO datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
@@ -1976,8 +2092,7 @@ def deserialize_iso(attr):
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
@staticmethod
def deserialize_unix(attr):
@@ -1985,6 +2100,7 @@ def deserialize_unix(attr):
This is represented as seconds.
:param int attr: Object to be serialized.
+ :return: Deserialized datetime
:rtype: Datetime
:raises: DeserializationError if format invalid
"""
@@ -1996,5 +2112,4 @@ def deserialize_unix(attr):
except ValueError as err:
msg = "Cannot deserialize to unix datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/_azure_file_storage.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/_azure_file_storage.py
index 94784b9052bc..6770b4dd2183 100644
--- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/_azure_file_storage.py
+++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/_azure_file_storage.py
@@ -45,7 +45,7 @@ class AzureFileStorage: # pylint: disable=client-accepts-api-version-keyword
URI. Default value is None.
:type allow_source_trailing_dot: bool
:keyword version: Specifies the version of the operation to use for this request. Default value
- is "2024-11-04". Note that overriding this default value may result in unsupported behavior.
+ is "2025-01-05". Note that overriding this default value may result in unsupported behavior.
:paramtype version: str
:keyword file_range_write_from_url: Only update is supported: - Update: Writes the bytes
downloaded from the source url into the specified range. Default value is "update". Note that
diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/_configuration.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/_configuration.py
index 694f5fa8a193..a9ff0d970826 100644
--- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/_configuration.py
+++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/_configuration.py
@@ -33,7 +33,7 @@ class AzureFileStorageConfiguration: # pylint: disable=too-many-instance-attrib
URI. Default value is None.
:type allow_source_trailing_dot: bool
:keyword version: Specifies the version of the operation to use for this request. Default value
- is "2024-11-04". Note that overriding this default value may result in unsupported behavior.
+ is "2025-01-05". Note that overriding this default value may result in unsupported behavior.
:paramtype version: str
:keyword file_range_write_from_url: Only update is supported: - Update: Writes the bytes
downloaded from the source url into the specified range. Default value is "update". Note that
@@ -49,7 +49,7 @@ def __init__(
allow_source_trailing_dot: Optional[bool] = None,
**kwargs: Any
) -> None:
- version: Literal["2024-11-04"] = kwargs.pop("version", "2024-11-04")
+ version: Literal["2025-01-05"] = kwargs.pop("version", "2025-01-05")
file_range_write_from_url: Literal["update"] = kwargs.pop("file_range_write_from_url", "update")
if url is None:
diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_directory_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_directory_operations.py
index 80d6896d2f11..225de2a155b0 100644
--- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_directory_operations.py
+++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_directory_operations.py
@@ -76,6 +76,7 @@ async def create( # pylint: disable=inconsistent-return-statements
file_change_time: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Creates a new directory under the specified share or parent directory.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -120,7 +121,7 @@ async def create( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -200,6 +201,7 @@ async def create( # pylint: disable=inconsistent-return-statements
async def get_properties( # pylint: disable=inconsistent-return-statements
self, sharesnapshot: Optional[str] = None, timeout: Optional[int] = None, **kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Returns all system properties for the specified directory, and can also be used to check the
existence of a directory. The data returned does not include the files in the directory or any
subdirectories.
@@ -216,7 +218,7 @@ async def get_properties( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -290,6 +292,7 @@ async def get_properties( # pylint: disable=inconsistent-return-statements
async def delete( # pylint: disable=inconsistent-return-statements
self, timeout: Optional[int] = None, **kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Removes the specified empty directory. Note that the directory must be empty before it can be
deleted.
@@ -302,7 +305,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -361,6 +364,7 @@ async def set_properties( # pylint: disable=inconsistent-return-statements
file_change_time: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Sets properties on the directory.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -402,7 +406,7 @@ async def set_properties( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -483,6 +487,7 @@ async def set_properties( # pylint: disable=inconsistent-return-statements
async def set_metadata( # pylint: disable=inconsistent-return-statements
self, timeout: Optional[int] = None, metadata: Optional[Dict[str, str]] = None, **kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Updates user defined metadata for the specified directory.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -497,7 +502,7 @@ async def set_metadata( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -562,6 +567,7 @@ async def list_files_and_directories_segment(
include_extended_info: Optional[bool] = None,
**kwargs: Any
) -> _models.ListFilesAndDirectoriesSegmentResponse:
+ # pylint: disable=line-too-long
"""Returns a list of files or directories under the specified share or directory. It lists the
contents only for a single level of the directory hierarchy.
@@ -594,7 +600,7 @@ async def list_files_and_directories_segment(
:rtype: ~azure.storage.fileshare.models.ListFilesAndDirectoriesSegmentResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -663,6 +669,7 @@ async def list_handles(
recursive: Optional[bool] = None,
**kwargs: Any
) -> _models.ListHandlesResponse:
+ # pylint: disable=line-too-long
"""Lists handles for directory.
:param marker: A string value that identifies the portion of the list to be returned with the
@@ -689,7 +696,7 @@ async def list_handles(
:rtype: ~azure.storage.fileshare.models.ListHandlesResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -754,6 +761,7 @@ async def force_close_handles( # pylint: disable=inconsistent-return-statements
recursive: Optional[bool] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Closes all handles open for given directory.
:param handle_id: Specifies handle ID opened on the file or directory to be closed. Asterisk
@@ -779,7 +787,7 @@ async def force_close_handles( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -852,6 +860,7 @@ async def rename( # pylint: disable=inconsistent-return-statements
copy_file_smb_info: Optional[_models.CopyFileSmbInfo] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Renames a directory.
:param rename_source: Required. Specifies the URI-style path of the source file, up to 2 KB in
@@ -906,7 +915,7 @@ async def rename( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_file_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_file_operations.py
index 35bc561a6c07..cb72e8d4a660 100644
--- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_file_operations.py
+++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_file_operations.py
@@ -90,6 +90,7 @@ async def create( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Creates a new file or replaces a file. Note it only initializes the file with no content.
:param file_content_length: Specifies the maximum size for the file, up to 4 TB. Required.
@@ -140,7 +141,7 @@ async def create( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -247,9 +248,11 @@ async def download(
timeout: Optional[int] = None,
range: Optional[str] = None,
range_get_content_md5: Optional[bool] = None,
+ structured_body_type: Optional[str] = None,
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> AsyncIterator[bytes]:
+ # pylint: disable=line-too-long
"""Reads or downloads a file from the system, including its metadata and properties.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -263,13 +266,16 @@ async def download(
Range header, the service returns the MD5 hash for the range, as long as the range is less than
or equal to 4 MB in size. Default value is None.
:type range_get_content_md5: bool
+ :param structured_body_type: Specifies the response content should be returned as a structured
+ message and specifies the message schema version and properties. Default value is None.
+ :type structured_body_type: str
:param lease_access_conditions: Parameter group. Default value is None.
:type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
:return: AsyncIterator[bytes] or the result of cls(response)
:rtype: AsyncIterator[bytes]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -291,6 +297,7 @@ async def download(
timeout=timeout,
range=range,
range_get_content_md5=range_get_content_md5,
+ structured_body_type=structured_body_type,
lease_id=_lease_id,
allow_trailing_dot=self._config.allow_trailing_dot,
file_request_intent=self._config.file_request_intent,
@@ -367,6 +374,12 @@ async def download(
response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration"))
response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state"))
response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status"))
+ response_headers["x-ms-structured-body"] = self._deserialize(
+ "str", response.headers.get("x-ms-structured-body")
+ )
+ response_headers["x-ms-structured-content-length"] = self._deserialize(
+ "int", response.headers.get("x-ms-structured-content-length")
+ )
deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
@@ -383,6 +396,7 @@ async def get_properties( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Returns all user-defined metadata, standard HTTP properties, and system properties for the
file. It does not return the content of the file.
@@ -400,7 +414,7 @@ async def get_properties( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -501,6 +515,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""removes the file from the storage account.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -514,7 +529,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -579,6 +594,7 @@ async def set_http_headers( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Sets HTTP headers on the file.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -628,7 +644,7 @@ async def set_http_headers( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -736,6 +752,7 @@ async def set_metadata( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Updates user-defined metadata for the specified file.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -752,7 +769,7 @@ async def set_metadata( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -817,6 +834,7 @@ async def acquire_lease( # pylint: disable=inconsistent-return-statements
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""[Update] The Lease File operation establishes and manages a lock on a file for write and delete
operations.
@@ -841,7 +859,7 @@ async def acquire_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -902,6 +920,7 @@ async def acquire_lease( # pylint: disable=inconsistent-return-statements
async def release_lease( # pylint: disable=inconsistent-return-statements
self, lease_id: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""[Update] The Lease File operation establishes and manages a lock on a file for write and delete
operations.
@@ -920,7 +939,7 @@ async def release_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -984,6 +1003,7 @@ async def change_lease( # pylint: disable=inconsistent-return-statements
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""[Update] The Lease File operation establishes and manages a lock on a file for write and delete
operations.
@@ -1006,7 +1026,7 @@ async def change_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1071,6 +1091,7 @@ async def break_lease( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""[Update] The Lease File operation establishes and manages a lock on a file for write and delete
operations.
@@ -1089,7 +1110,7 @@ async def break_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1158,10 +1179,13 @@ async def upload_range( # pylint: disable=inconsistent-return-statements
file_range_write: Union[str, _models.FileRangeWriteType] = "update",
content_md5: Optional[bytes] = None,
file_last_written_mode: Optional[Union[str, _models.FileLastWrittenMode]] = None,
+ structured_body_type: Optional[str] = None,
+ structured_content_length: Optional[int] = None,
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
optionalbody: Optional[IO[bytes]] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Upload a range of bytes to a file.
:param range: Specifies the range of bytes to be written. Both the start and end of the range
@@ -1194,6 +1218,13 @@ async def upload_range( # pylint: disable=inconsistent-return-statements
:param file_last_written_mode: If the file last write time should be preserved or overwritten.
Known values are: "Now" and "Preserve". Default value is None.
:type file_last_written_mode: str or ~azure.storage.fileshare.models.FileLastWrittenMode
+ :param structured_body_type: Required if the request body is a structured message. Specifies
+ the message schema version and properties. Default value is None.
+ :type structured_body_type: str
+ :param structured_content_length: Required if the request body is a structured message.
+ Specifies the length of the blob/file content inside the message body. Will always be smaller
+ than Content-Length. Default value is None.
+ :type structured_content_length: int
:param lease_access_conditions: Parameter group. Default value is None.
:type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
:param optionalbody: Initial data. Default value is None.
@@ -1202,7 +1233,7 @@ async def upload_range( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1231,6 +1262,8 @@ async def upload_range( # pylint: disable=inconsistent-return-statements
content_md5=content_md5,
lease_id=_lease_id,
file_last_written_mode=file_last_written_mode,
+ structured_body_type=structured_body_type,
+ structured_content_length=structured_content_length,
allow_trailing_dot=self._config.allow_trailing_dot,
file_request_intent=self._config.file_request_intent,
comp=comp,
@@ -1267,6 +1300,9 @@ async def upload_range( # pylint: disable=inconsistent-return-statements
response_headers["x-ms-file-last-write-time"] = self._deserialize(
"str", response.headers.get("x-ms-file-last-write-time")
)
+ response_headers["x-ms-structured-body"] = self._deserialize(
+ "str", response.headers.get("x-ms-structured-body")
+ )
if cls:
return cls(pipeline_response, None, response_headers) # type: ignore
@@ -1286,6 +1322,7 @@ async def upload_range_from_url( # pylint: disable=inconsistent-return-statemen
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Upload a range of bytes to a file where the contents are read from a URL.
:param range: Writes data to the specified byte range in the file. Required.
@@ -1327,7 +1364,7 @@ async def upload_range_from_url( # pylint: disable=inconsistent-return-statemen
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1416,6 +1453,7 @@ async def get_range_list(
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> _models.ShareFileRangeList:
+ # pylint: disable=line-too-long
"""Returns the list of valid ranges for a file.
:param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
@@ -1445,7 +1483,7 @@ async def get_range_list(
:rtype: ~azure.storage.fileshare.models.ShareFileRangeList
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1514,11 +1552,13 @@ async def start_copy( # pylint: disable=inconsistent-return-statements
timeout: Optional[int] = None,
metadata: Optional[Dict[str, str]] = None,
file_permission: str = "inherit",
+ file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None,
file_permission_key: Optional[str] = None,
copy_file_smb_info: Optional[_models.CopyFileSmbInfo] = None,
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Copies a blob or file to a destination file within the storage account.
:param copy_source: Specifies the URL of the source file or blob, up to 2 KB in length. To copy
@@ -1543,6 +1583,13 @@ async def start_copy( # pylint: disable=inconsistent-return-statements
input, it must have owner, group and dacl. Note: Only one of the x-ms-file-permission or
x-ms-file-permission-key should be specified. Default value is "inherit".
:type file_permission: str
+ :param file_permission_format: Optional. Available for version 2023-06-01 and later. Specifies
+ the format in which the permission is returned. Acceptable values are SDDL or binary. If
+ x-ms-file-permission-format is unspecified or explicitly set to SDDL, the permission is
+ returned in SDDL format. If x-ms-file-permission-format is explicitly set to binary, the
+ permission is returned as a base64 string representing the binary encoding of the permission.
+ Known values are: "Sddl" and "Binary". Default value is None.
+ :type file_permission_format: str or ~azure.storage.fileshare.models.FilePermissionFormat
:param file_permission_key: Key of the permission to be set for the directory/file. Note: Only
one of the x-ms-file-permission or x-ms-file-permission-key should be specified. Default value
is None.
@@ -1555,7 +1602,7 @@ async def start_copy( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1593,6 +1640,7 @@ async def start_copy( # pylint: disable=inconsistent-return-statements
timeout=timeout,
metadata=metadata,
file_permission=file_permission,
+ file_permission_format=file_permission_format,
file_permission_key=file_permission_key,
file_permission_copy_mode=_file_permission_copy_mode,
ignore_read_only=_ignore_read_only,
@@ -1643,6 +1691,7 @@ async def abort_copy( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Aborts a pending Copy File operation, and leaves a destination file with zero length and full
metadata.
@@ -1660,7 +1709,7 @@ async def abort_copy( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1725,6 +1774,7 @@ async def list_handles(
sharesnapshot: Optional[str] = None,
**kwargs: Any
) -> _models.ListHandlesResponse:
+ # pylint: disable=line-too-long
"""Lists handles for file.
:param marker: A string value that identifies the portion of the list to be returned with the
@@ -1748,7 +1798,7 @@ async def list_handles(
:rtype: ~azure.storage.fileshare.models.ListHandlesResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1811,6 +1861,7 @@ async def force_close_handles( # pylint: disable=inconsistent-return-statements
sharesnapshot: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Closes all handles open for given file.
:param handle_id: Specifies handle ID opened on the file or directory to be closed. Asterisk
@@ -1833,7 +1884,7 @@ async def force_close_handles( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1906,6 +1957,7 @@ async def rename( # pylint: disable=inconsistent-return-statements
file_http_headers: Optional[_models.FileHTTPHeaders] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Renames a file.
:param rename_source: Required. Specifies the URI-style path of the source file, up to 2 KB in
@@ -1962,7 +2014,7 @@ async def rename( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_service_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_service_operations.py
index b393d21685ba..ea878f63b913 100644
--- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_service_operations.py
+++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_service_operations.py
@@ -60,6 +60,7 @@ def __init__(self, *args, **kwargs) -> None:
async def set_properties( # pylint: disable=inconsistent-return-statements
self, storage_service_properties: _models.StorageServiceProperties, timeout: Optional[int] = None, **kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Sets properties for a storage account's File service endpoint, including properties for Storage
Analytics metrics and CORS (Cross-Origin Resource Sharing) rules.
@@ -74,7 +75,7 @@ async def set_properties( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -127,6 +128,7 @@ async def set_properties( # pylint: disable=inconsistent-return-statements
@distributed_trace_async
async def get_properties(self, timeout: Optional[int] = None, **kwargs: Any) -> _models.StorageServiceProperties:
+ # pylint: disable=line-too-long
"""Gets the properties of a storage account's File service, including properties for Storage
Analytics metrics and CORS (Cross-Origin Resource Sharing) rules.
@@ -139,7 +141,7 @@ async def get_properties(self, timeout: Optional[int] = None, **kwargs: Any) ->
:rtype: ~azure.storage.fileshare.models.StorageServiceProperties
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -199,6 +201,7 @@ async def list_shares_segment(
timeout: Optional[int] = None,
**kwargs: Any
) -> _models.ListSharesResponse:
+ # pylint: disable=line-too-long
"""The List Shares Segment operation returns a list of the shares and share snapshots under the
specified account.
@@ -226,7 +229,7 @@ async def list_shares_segment(
:rtype: ~azure.storage.fileshare.models.ListSharesResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_share_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_share_operations.py
index 46d2c38171cc..ae5bcd70d4be 100644
--- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_share_operations.py
+++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_share_operations.py
@@ -84,8 +84,11 @@ async def create( # pylint: disable=inconsistent-return-statements
paid_bursting_enabled: Optional[bool] = None,
paid_bursting_max_bandwidth_mibps: Optional[int] = None,
paid_bursting_max_iops: Optional[int] = None,
+ share_provisioned_iops: Optional[int] = None,
+ share_provisioned_bandwidth_mibps: Optional[int] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Creates a new share under the specified account. If the share with the same name already
exists, the operation fails.
@@ -100,7 +103,7 @@ async def create( # pylint: disable=inconsistent-return-statements
:param quota: Specifies the maximum size of the share, in gigabytes. Default value is None.
:type quota: int
:param access_tier: Specifies the access tier of the share. Known values are:
- "TransactionOptimized", "Hot", and "Cool". Default value is None.
+ "TransactionOptimized", "Hot", "Cool", and "Premium". Default value is None.
:type access_tier: str or ~azure.storage.fileshare.models.ShareAccessTier
:param enabled_protocols: Protocols to enable on the share. Default value is None.
:type enabled_protocols: str
@@ -120,11 +123,21 @@ async def create( # pylint: disable=inconsistent-return-statements
the file share can support. Current maximum for a file share is 102,400 IOPS. Default value is
None.
:type paid_bursting_max_iops: int
+ :param share_provisioned_iops: Optional. Supported in version 2025-01-05 and later. Only
+ allowed for provisioned v2 file shares. Specifies the provisioned number of input/output
+ operations per second (IOPS) of the share. If this is not specified, the provisioned IOPS is
+ set to value calculated based on recommendation formula. Default value is None.
+ :type share_provisioned_iops: int
+ :param share_provisioned_bandwidth_mibps: Optional. Supported in version 2025-01-05 and later.
+ Only allowed for provisioned v2 file shares. Specifies the provisioned bandwidth of the share,
+ in mebibytes per second (MiBps). If this is not specified, the provisioned bandwidth is set to
+ value calculated based on recommendation formula. Default value is None.
+ :type share_provisioned_bandwidth_mibps: int
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -150,6 +163,8 @@ async def create( # pylint: disable=inconsistent-return-statements
paid_bursting_enabled=paid_bursting_enabled,
paid_bursting_max_bandwidth_mibps=paid_bursting_max_bandwidth_mibps,
paid_bursting_max_iops=paid_bursting_max_iops,
+ share_provisioned_iops=share_provisioned_iops,
+ share_provisioned_bandwidth_mibps=share_provisioned_bandwidth_mibps,
file_request_intent=self._config.file_request_intent,
restype=restype,
version=self._config.version,
@@ -176,6 +191,19 @@ async def create( # pylint: disable=inconsistent-return-statements
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-share-quota"] = self._deserialize("int", response.headers.get("x-ms-share-quota"))
+ response_headers["x-ms-share-provisioned-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-iops")
+ )
+ response_headers["x-ms-share-provisioned-bandwidth-mibps"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-bandwidth-mibps")
+ )
+ response_headers["x-ms-share-included-burst-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-included-burst-iops")
+ )
+ response_headers["x-ms-share-max-burst-credits-for-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-max-burst-credits-for-iops")
+ )
if cls:
return cls(pipeline_response, None, response_headers) # type: ignore
@@ -188,6 +216,7 @@ async def get_properties( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Returns all user-defined metadata and system properties for the specified share or share
snapshot. The data returned does not include the share's list of files.
@@ -205,7 +234,7 @@ async def get_properties( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -297,6 +326,18 @@ async def get_properties( # pylint: disable=inconsistent-return-statements
response_headers["x-ms-share-paid-bursting-max-bandwidth-mibps"] = self._deserialize(
"int", response.headers.get("x-ms-share-paid-bursting-max-bandwidth-mibps")
)
+ response_headers["x-ms-share-included-burst-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-included-burst-iops")
+ )
+ response_headers["x-ms-share-max-burst-credits-for-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-max-burst-credits-for-iops")
+ )
+ response_headers["x-ms-share-next-allowed-provisioned-iops-downgrade-time"] = self._deserialize(
+ "rfc-1123", response.headers.get("x-ms-share-next-allowed-provisioned-iops-downgrade-time")
+ )
+ response_headers["x-ms-share-next-allowed-provisioned-bandwidth-downgrade-time"] = self._deserialize(
+ "rfc-1123", response.headers.get("x-ms-share-next-allowed-provisioned-bandwidth-downgrade-time")
+ )
if cls:
return cls(pipeline_response, None, response_headers) # type: ignore
@@ -310,6 +351,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Operation marks the specified share or share snapshot for deletion. The share or share snapshot
and any files contained within it are later deleted during garbage collection.
@@ -330,7 +372,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -378,6 +420,12 @@ async def delete( # pylint: disable=inconsistent-return-statements
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-file-share-usage-bytes"] = self._deserialize(
+ "int", response.headers.get("x-ms-file-share-usage-bytes")
+ )
+ response_headers["x-ms-file-share-snapshot-usage-bytes"] = self._deserialize(
+ "int", response.headers.get("x-ms-file-share-snapshot-usage-bytes")
+ )
if cls:
return cls(pipeline_response, None, response_headers) # type: ignore
@@ -392,6 +440,7 @@ async def acquire_lease( # pylint: disable=inconsistent-return-statements
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Lease Share operation establishes and manages a lock on a share, or the specified snapshot
for set and delete share operations.
@@ -419,7 +468,7 @@ async def acquire_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -487,6 +536,7 @@ async def release_lease( # pylint: disable=inconsistent-return-statements
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Lease Share operation establishes and manages a lock on a share, or the specified snapshot
for set and delete share operations.
@@ -508,7 +558,7 @@ async def release_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -575,6 +625,7 @@ async def change_lease( # pylint: disable=inconsistent-return-statements
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Lease Share operation establishes and manages a lock on a share, or the specified snapshot
for set and delete share operations.
@@ -600,7 +651,7 @@ async def change_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -668,6 +719,7 @@ async def renew_lease( # pylint: disable=inconsistent-return-statements
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Lease Share operation establishes and manages a lock on a share, or the specified snapshot
for set and delete share operations.
@@ -689,7 +741,7 @@ async def renew_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -757,6 +809,7 @@ async def break_lease( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Lease Share operation establishes and manages a lock on a share, or the specified snapshot
for set and delete share operations.
@@ -786,7 +839,7 @@ async def break_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -854,6 +907,7 @@ async def break_lease( # pylint: disable=inconsistent-return-statements
async def create_snapshot( # pylint: disable=inconsistent-return-statements
self, timeout: Optional[int] = None, metadata: Optional[Dict[str, str]] = None, **kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Creates a read-only snapshot of a share.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -868,7 +922,7 @@ async def create_snapshot( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -928,6 +982,7 @@ async def create_permission( # pylint: disable=inconsistent-return-statements
content_type: str = "application/json",
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Create a permission (a security descriptor).
:param share_permission: A permission (a security descriptor) at the share level. Required.
@@ -954,6 +1009,7 @@ async def create_permission( # pylint: disable=inconsistent-return-statements
content_type: str = "application/json",
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Create a permission (a security descriptor).
:param share_permission: A permission (a security descriptor) at the share level. Required.
@@ -975,6 +1031,7 @@ async def create_permission( # pylint: disable=inconsistent-return-statements
async def create_permission( # pylint: disable=inconsistent-return-statements
self, share_permission: Union[_models.SharePermission, IO[bytes]], timeout: Optional[int] = None, **kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Create a permission (a security descriptor).
:param share_permission: A permission (a security descriptor) at the share level. Is either a
@@ -989,7 +1046,7 @@ async def create_permission( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1059,6 +1116,7 @@ async def get_permission(
timeout: Optional[int] = None,
**kwargs: Any
) -> _models.SharePermission:
+ # pylint: disable=line-too-long
"""Returns the permission (security descriptor) for a given key.
:param file_permission_key: Key of the permission to be set for the directory/file. Required.
@@ -1079,7 +1137,7 @@ async def get_permission(
:rtype: ~azure.storage.fileshare.models.SharePermission
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1143,9 +1201,12 @@ async def set_properties( # pylint: disable=inconsistent-return-statements
paid_bursting_enabled: Optional[bool] = None,
paid_bursting_max_bandwidth_mibps: Optional[int] = None,
paid_bursting_max_iops: Optional[int] = None,
+ share_provisioned_iops: Optional[int] = None,
+ share_provisioned_bandwidth_mibps: Optional[int] = None,
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Sets properties for the specified share.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -1156,7 +1217,7 @@ async def set_properties( # pylint: disable=inconsistent-return-statements
:param quota: Specifies the maximum size of the share, in gigabytes. Default value is None.
:type quota: int
:param access_tier: Specifies the access tier of the share. Known values are:
- "TransactionOptimized", "Hot", and "Cool". Default value is None.
+ "TransactionOptimized", "Hot", "Cool", and "Premium". Default value is None.
:type access_tier: str or ~azure.storage.fileshare.models.ShareAccessTier
:param root_squash: Root squash to set on the share. Only valid for NFS shares. Known values
are: "NoRootSquash", "RootSquash", and "AllSquash". Default value is None.
@@ -1174,13 +1235,23 @@ async def set_properties( # pylint: disable=inconsistent-return-statements
the file share can support. Current maximum for a file share is 102,400 IOPS. Default value is
None.
:type paid_bursting_max_iops: int
+ :param share_provisioned_iops: Optional. Supported in version 2025-01-05 and later. Only
+ allowed for provisioned v2 file shares. Specifies the provisioned number of input/output
+ operations per second (IOPS) of the share. If this is not specified, the provisioned IOPS is
+ set to value calculated based on recommendation formula. Default value is None.
+ :type share_provisioned_iops: int
+ :param share_provisioned_bandwidth_mibps: Optional. Supported in version 2025-01-05 and later.
+ Only allowed for provisioned v2 file shares. Specifies the provisioned bandwidth of the share,
+ in mebibytes per second (MiBps). If this is not specified, the provisioned bandwidth is set to
+ value calculated based on recommendation formula. Default value is None.
+ :type share_provisioned_bandwidth_mibps: int
:param lease_access_conditions: Parameter group. Default value is None.
:type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1210,6 +1281,8 @@ async def set_properties( # pylint: disable=inconsistent-return-statements
paid_bursting_enabled=paid_bursting_enabled,
paid_bursting_max_bandwidth_mibps=paid_bursting_max_bandwidth_mibps,
paid_bursting_max_iops=paid_bursting_max_iops,
+ share_provisioned_iops=share_provisioned_iops,
+ share_provisioned_bandwidth_mibps=share_provisioned_bandwidth_mibps,
file_request_intent=self._config.file_request_intent,
restype=restype,
comp=comp,
@@ -1237,6 +1310,28 @@ async def set_properties( # pylint: disable=inconsistent-return-statements
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-share-quota"] = self._deserialize("int", response.headers.get("x-ms-share-quota"))
+ response_headers["x-ms-share-provisioned-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-iops")
+ )
+ response_headers["x-ms-share-provisioned-bandwidth-mibps"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-bandwidth-mibps")
+ )
+ response_headers["x-ms-share-included-burst-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-included-burst-iops")
+ )
+ response_headers["x-ms-share-max-burst-credits-for-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-max-burst-credits-for-iops")
+ )
+ response_headers["x-ms-share-next-allowed-quota-downgrade-time"] = self._deserialize(
+ "rfc-1123", response.headers.get("x-ms-share-next-allowed-quota-downgrade-time")
+ )
+ response_headers["x-ms-share-next-allowed-provisioned-iops-downgrade-time"] = self._deserialize(
+ "rfc-1123", response.headers.get("x-ms-share-next-allowed-provisioned-iops-downgrade-time")
+ )
+ response_headers["x-ms-share-next-allowed-provisioned-bandwidth-downgrade-time"] = self._deserialize(
+ "rfc-1123", response.headers.get("x-ms-share-next-allowed-provisioned-bandwidth-downgrade-time")
+ )
if cls:
return cls(pipeline_response, None, response_headers) # type: ignore
@@ -1249,6 +1344,7 @@ async def set_metadata( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Sets one or more user-defined name-value pairs for the specified share.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -1265,7 +1361,7 @@ async def set_metadata( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1327,6 +1423,7 @@ async def get_access_policy(
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> List[_models.SignedIdentifier]:
+ # pylint: disable=line-too-long
"""Returns information about stored access policies specified on the share.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -1340,7 +1437,7 @@ async def get_access_policy(
:rtype: list[~azure.storage.fileshare.models.SignedIdentifier]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1406,6 +1503,7 @@ async def set_access_policy( # pylint: disable=inconsistent-return-statements
share_acl: Optional[List[_models.SignedIdentifier]] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Sets a stored access policy for use with shared access signatures.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -1421,7 +1519,7 @@ async def set_access_policy( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1492,6 +1590,7 @@ async def get_statistics(
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> _models.ShareStats:
+ # pylint: disable=line-too-long
"""Retrieves statistics related to the share.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -1505,7 +1604,7 @@ async def get_statistics(
:rtype: ~azure.storage.fileshare.models.ShareStats
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1572,6 +1671,7 @@ async def restore( # pylint: disable=inconsistent-return-statements
deleted_share_version: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Restores a previously deleted Share.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -1593,7 +1693,7 @@ async def restore( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1644,6 +1744,19 @@ async def restore( # pylint: disable=inconsistent-return-statements
)
response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-share-quota"] = self._deserialize("int", response.headers.get("x-ms-share-quota"))
+ response_headers["x-ms-share-provisioned-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-iops")
+ )
+ response_headers["x-ms-share-provisioned-bandwidth-mibps"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-bandwidth-mibps")
+ )
+ response_headers["x-ms-share-included-burst-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-included-burst-iops")
+ )
+ response_headers["x-ms-share-max-burst-credits-for-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-max-burst-credits-for-iops")
+ )
if cls:
return cls(pipeline_response, None, response_headers) # type: ignore
diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/models/_azure_file_storage_enums.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/models/_azure_file_storage_enums.py
index 72de9b03134e..816fa71507cf 100644
--- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/models/_azure_file_storage_enums.py
+++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/models/_azure_file_storage_enums.py
@@ -109,6 +109,7 @@ class ShareAccessTier(str, Enum, metaclass=CaseInsensitiveEnumMeta):
TRANSACTION_OPTIMIZED = "TransactionOptimized"
HOT = "Hot"
COOL = "Cool"
+ PREMIUM = "Premium"
class ShareRootSquash(str, Enum, metaclass=CaseInsensitiveEnumMeta):
@@ -136,6 +137,8 @@ class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta):
CONDITION_HEADERS_NOT_SUPPORTED = "ConditionHeadersNotSupported"
CONDITION_NOT_MET = "ConditionNotMet"
EMPTY_METADATA_KEY = "EmptyMetadataKey"
+ FILE_SHARE_PROVISIONED_BANDWIDTH_DOWNGRADE_NOT_ALLOWED = "FileShareProvisionedBandwidthDowngradeNotAllowed"
+ FILE_SHARE_PROVISIONED_IOPS_DOWNGRADE_NOT_ALLOWED = "FileShareProvisionedIopsDowngradeNotAllowed"
INSUFFICIENT_ACCOUNT_PERMISSIONS = "InsufficientAccountPermissions"
INTERNAL_ERROR = "InternalError"
INVALID_AUTHENTICATION_INFO = "InvalidAuthenticationInfo"
diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/models/_models_py3.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/models/_models_py3.py
index d31101cef648..a58bca195317 100644
--- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/models/_models_py3.py
+++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/models/_models_py3.py
@@ -1255,6 +1255,14 @@ class SharePropertiesInternal(_serialization.Model): # pylint: disable=too-many
:vartype paid_bursting_max_iops: int
:ivar paid_bursting_max_bandwidth_mibps:
:vartype paid_bursting_max_bandwidth_mibps: int
+ :ivar included_burst_iops:
+ :vartype included_burst_iops: int
+ :ivar max_burst_credits_for_iops:
+ :vartype max_burst_credits_for_iops: int
+ :ivar next_allowed_provisioned_iops_downgrade_time:
+ :vartype next_allowed_provisioned_iops_downgrade_time: ~datetime.datetime
+ :ivar next_allowed_provisioned_bandwidth_downgrade_time:
+ :vartype next_allowed_provisioned_bandwidth_downgrade_time: ~datetime.datetime
"""
_validation = {
@@ -1286,9 +1294,19 @@ class SharePropertiesInternal(_serialization.Model): # pylint: disable=too-many
"paid_bursting_enabled": {"key": "PaidBurstingEnabled", "type": "bool"},
"paid_bursting_max_iops": {"key": "PaidBurstingMaxIops", "type": "int"},
"paid_bursting_max_bandwidth_mibps": {"key": "PaidBurstingMaxBandwidthMibps", "type": "int"},
+ "included_burst_iops": {"key": "IncludedBurstIops", "type": "int"},
+ "max_burst_credits_for_iops": {"key": "MaxBurstCreditsForIops", "type": "int"},
+ "next_allowed_provisioned_iops_downgrade_time": {
+ "key": "NextAllowedProvisionedIopsDowngradeTime",
+ "type": "rfc-1123",
+ },
+ "next_allowed_provisioned_bandwidth_downgrade_time": {
+ "key": "NextAllowedProvisionedBandwidthDowngradeTime",
+ "type": "rfc-1123",
+ },
}
- def __init__(
+ def __init__( # pylint: disable=too-many-locals
self,
*,
last_modified: datetime.datetime,
@@ -1313,6 +1331,10 @@ def __init__(
paid_bursting_enabled: Optional[bool] = None,
paid_bursting_max_iops: Optional[int] = None,
paid_bursting_max_bandwidth_mibps: Optional[int] = None,
+ included_burst_iops: Optional[int] = None,
+ max_burst_credits_for_iops: Optional[int] = None,
+ next_allowed_provisioned_iops_downgrade_time: Optional[datetime.datetime] = None,
+ next_allowed_provisioned_bandwidth_downgrade_time: Optional[datetime.datetime] = None,
**kwargs: Any
) -> None:
"""
@@ -1363,6 +1385,14 @@ def __init__(
:paramtype paid_bursting_max_iops: int
:keyword paid_bursting_max_bandwidth_mibps:
:paramtype paid_bursting_max_bandwidth_mibps: int
+ :keyword included_burst_iops:
+ :paramtype included_burst_iops: int
+ :keyword max_burst_credits_for_iops:
+ :paramtype max_burst_credits_for_iops: int
+ :keyword next_allowed_provisioned_iops_downgrade_time:
+ :paramtype next_allowed_provisioned_iops_downgrade_time: ~datetime.datetime
+ :keyword next_allowed_provisioned_bandwidth_downgrade_time:
+ :paramtype next_allowed_provisioned_bandwidth_downgrade_time: ~datetime.datetime
"""
super().__init__(**kwargs)
self.last_modified = last_modified
@@ -1387,6 +1417,10 @@ def __init__(
self.paid_bursting_enabled = paid_bursting_enabled
self.paid_bursting_max_iops = paid_bursting_max_iops
self.paid_bursting_max_bandwidth_mibps = paid_bursting_max_bandwidth_mibps
+ self.included_burst_iops = included_burst_iops
+ self.max_burst_credits_for_iops = max_burst_credits_for_iops
+ self.next_allowed_provisioned_iops_downgrade_time = next_allowed_provisioned_iops_downgrade_time
+ self.next_allowed_provisioned_bandwidth_downgrade_time = next_allowed_provisioned_bandwidth_downgrade_time
class ShareProtocolSettings(_serialization.Model):
diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_directory_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_directory_operations.py
index 98a8faa7d456..dcd395a2831a 100644
--- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_directory_operations.py
+++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_directory_operations.py
@@ -56,7 +56,7 @@ def build_create_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -113,7 +113,7 @@ def build_get_properties_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -154,7 +154,7 @@ def build_delete_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -201,7 +201,7 @@ def build_set_properties_request(
restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory"))
comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -258,7 +258,7 @@ def build_set_metadata_request(
restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory"))
comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -307,7 +307,7 @@ def build_list_files_and_directories_segment_request( # pylint: disable=name-to
restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory"))
comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -363,7 +363,7 @@ def build_list_handles_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["listhandles"] = kwargs.pop("comp", _params.pop("comp", "listhandles"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -414,7 +414,7 @@ def build_force_close_handles_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["forceclosehandles"] = kwargs.pop("comp", _params.pop("comp", "forceclosehandles"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -475,7 +475,7 @@ def build_rename_request(
restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory"))
comp: Literal["rename"] = kwargs.pop("comp", _params.pop("comp", "rename"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -569,6 +569,7 @@ def create( # pylint: disable=inconsistent-return-statements
file_change_time: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Creates a new directory under the specified share or parent directory.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -613,7 +614,7 @@ def create( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -693,6 +694,7 @@ def create( # pylint: disable=inconsistent-return-statements
def get_properties( # pylint: disable=inconsistent-return-statements
self, sharesnapshot: Optional[str] = None, timeout: Optional[int] = None, **kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Returns all system properties for the specified directory, and can also be used to check the
existence of a directory. The data returned does not include the files in the directory or any
subdirectories.
@@ -709,7 +711,7 @@ def get_properties( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -783,6 +785,7 @@ def get_properties( # pylint: disable=inconsistent-return-statements
def delete( # pylint: disable=inconsistent-return-statements
self, timeout: Optional[int] = None, **kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Removes the specified empty directory. Note that the directory must be empty before it can be
deleted.
@@ -795,7 +798,7 @@ def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -854,6 +857,7 @@ def set_properties( # pylint: disable=inconsistent-return-statements
file_change_time: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Sets properties on the directory.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -895,7 +899,7 @@ def set_properties( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -976,6 +980,7 @@ def set_properties( # pylint: disable=inconsistent-return-statements
def set_metadata( # pylint: disable=inconsistent-return-statements
self, timeout: Optional[int] = None, metadata: Optional[Dict[str, str]] = None, **kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Updates user defined metadata for the specified directory.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -990,7 +995,7 @@ def set_metadata( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1055,6 +1060,7 @@ def list_files_and_directories_segment(
include_extended_info: Optional[bool] = None,
**kwargs: Any
) -> _models.ListFilesAndDirectoriesSegmentResponse:
+ # pylint: disable=line-too-long
"""Returns a list of files or directories under the specified share or directory. It lists the
contents only for a single level of the directory hierarchy.
@@ -1087,7 +1093,7 @@ def list_files_and_directories_segment(
:rtype: ~azure.storage.fileshare.models.ListFilesAndDirectoriesSegmentResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1156,6 +1162,7 @@ def list_handles(
recursive: Optional[bool] = None,
**kwargs: Any
) -> _models.ListHandlesResponse:
+ # pylint: disable=line-too-long
"""Lists handles for directory.
:param marker: A string value that identifies the portion of the list to be returned with the
@@ -1182,7 +1189,7 @@ def list_handles(
:rtype: ~azure.storage.fileshare.models.ListHandlesResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1247,6 +1254,7 @@ def force_close_handles( # pylint: disable=inconsistent-return-statements
recursive: Optional[bool] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Closes all handles open for given directory.
:param handle_id: Specifies handle ID opened on the file or directory to be closed. Asterisk
@@ -1272,7 +1280,7 @@ def force_close_handles( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1345,6 +1353,7 @@ def rename( # pylint: disable=inconsistent-return-statements
copy_file_smb_info: Optional[_models.CopyFileSmbInfo] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Renames a directory.
:param rename_source: Required. Specifies the URI-style path of the source file, up to 2 KB in
@@ -1399,7 +1408,7 @@ def rename( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_file_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_file_operations.py
index c07fbd141e66..287270b96c65 100644
--- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_file_operations.py
+++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_file_operations.py
@@ -66,7 +66,7 @@ def build_create_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
file_type_constant: Literal["file"] = kwargs.pop("file_type_constant", _headers.pop("x-ms-type", "file"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -133,6 +133,7 @@ def build_download_request(
timeout: Optional[int] = None,
range: Optional[str] = None,
range_get_content_md5: Optional[bool] = None,
+ structured_body_type: Optional[str] = None,
lease_id: Optional[str] = None,
allow_trailing_dot: Optional[bool] = None,
file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
@@ -141,7 +142,7 @@ def build_download_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -166,6 +167,8 @@ def build_download_request(
_headers["x-ms-range-get-content-md5"] = _SERIALIZER.header(
"range_get_content_md5", range_get_content_md5, "bool"
)
+ if structured_body_type is not None:
+ _headers["x-ms-structured-body"] = _SERIALIZER.header("structured_body_type", structured_body_type, "str")
if lease_id is not None:
_headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str")
if file_request_intent is not None:
@@ -188,7 +191,7 @@ def build_get_properties_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -230,7 +233,7 @@ def build_delete_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -285,7 +288,7 @@ def build_set_http_headers_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -359,7 +362,7 @@ def build_set_metadata_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -406,7 +409,7 @@ def build_acquire_lease_request(
comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -455,7 +458,7 @@ def build_release_lease_request(
comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -502,7 +505,7 @@ def build_change_lease_request(
comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -550,7 +553,7 @@ def build_break_lease_request(
comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -592,6 +595,8 @@ def build_upload_range_request(
content_md5: Optional[bytes] = None,
lease_id: Optional[str] = None,
file_last_written_mode: Optional[Union[str, _models.FileLastWrittenMode]] = None,
+ structured_body_type: Optional[str] = None,
+ structured_content_length: Optional[int] = None,
content: Optional[IO[bytes]] = None,
allow_trailing_dot: Optional[bool] = None,
file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
@@ -602,7 +607,7 @@ def build_upload_range_request(
comp: Literal["range"] = kwargs.pop("comp", _params.pop("comp", "range"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -635,6 +640,12 @@ def build_upload_range_request(
_headers["x-ms-allow-trailing-dot"] = _SERIALIZER.header("allow_trailing_dot", allow_trailing_dot, "bool")
if file_request_intent is not None:
_headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ if structured_body_type is not None:
+ _headers["x-ms-structured-body"] = _SERIALIZER.header("structured_body_type", structured_body_type, "str")
+ if structured_content_length is not None:
+ _headers["x-ms-structured-content-length"] = _SERIALIZER.header(
+ "structured_content_length", structured_content_length, "int"
+ )
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
@@ -668,7 +679,7 @@ def build_upload_range_from_url_request(
file_range_write_from_url: Literal["update"] = kwargs.pop(
"file_range_write_from_url", _headers.pop("x-ms-write", "update")
)
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -744,7 +755,7 @@ def build_get_range_list_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["rangelist"] = kwargs.pop("comp", _params.pop("comp", "rangelist"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -788,6 +799,7 @@ def build_start_copy_request(
timeout: Optional[int] = None,
metadata: Optional[Dict[str, str]] = None,
file_permission: str = "inherit",
+ file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None,
file_permission_key: Optional[str] = None,
file_permission_copy_mode: Optional[Union[str, _models.PermissionCopyModeType]] = None,
ignore_read_only: Optional[bool] = None,
@@ -805,7 +817,7 @@ def build_start_copy_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -827,6 +839,10 @@ def build_start_copy_request(
_headers["x-ms-copy-source"] = _SERIALIZER.header("copy_source", copy_source, "str")
if file_permission is not None:
_headers["x-ms-file-permission"] = _SERIALIZER.header("file_permission", file_permission, "str")
+ if file_permission_format is not None:
+ _headers["x-ms-file-permission-format"] = _SERIALIZER.header(
+ "file_permission_format", file_permission_format, "str"
+ )
if file_permission_key is not None:
_headers["x-ms-file-permission-key"] = _SERIALIZER.header("file_permission_key", file_permission_key, "str")
if file_permission_copy_mode is not None:
@@ -879,7 +895,7 @@ def build_abort_copy_request(
copy_action_abort_constant: Literal["abort"] = kwargs.pop(
"copy_action_abort_constant", _headers.pop("x-ms-copy-action", "abort")
)
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -925,7 +941,7 @@ def build_list_handles_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["listhandles"] = kwargs.pop("comp", _params.pop("comp", "listhandles"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -973,7 +989,7 @@ def build_force_close_handles_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["forceclosehandles"] = kwargs.pop("comp", _params.pop("comp", "forceclosehandles"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -1032,7 +1048,7 @@ def build_rename_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["rename"] = kwargs.pop("comp", _params.pop("comp", "rename"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -1130,6 +1146,7 @@ def create( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Creates a new file or replaces a file. Note it only initializes the file with no content.
:param file_content_length: Specifies the maximum size for the file, up to 4 TB. Required.
@@ -1180,7 +1197,7 @@ def create( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1287,9 +1304,11 @@ def download(
timeout: Optional[int] = None,
range: Optional[str] = None,
range_get_content_md5: Optional[bool] = None,
+ structured_body_type: Optional[str] = None,
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> Iterator[bytes]:
+ # pylint: disable=line-too-long
"""Reads or downloads a file from the system, including its metadata and properties.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -1303,13 +1322,16 @@ def download(
Range header, the service returns the MD5 hash for the range, as long as the range is less than
or equal to 4 MB in size. Default value is None.
:type range_get_content_md5: bool
+ :param structured_body_type: Specifies the response content should be returned as a structured
+ message and specifies the message schema version and properties. Default value is None.
+ :type structured_body_type: str
:param lease_access_conditions: Parameter group. Default value is None.
:type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
:return: Iterator[bytes] or the result of cls(response)
:rtype: Iterator[bytes]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1331,6 +1353,7 @@ def download(
timeout=timeout,
range=range,
range_get_content_md5=range_get_content_md5,
+ structured_body_type=structured_body_type,
lease_id=_lease_id,
allow_trailing_dot=self._config.allow_trailing_dot,
file_request_intent=self._config.file_request_intent,
@@ -1407,6 +1430,12 @@ def download(
response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration"))
response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state"))
response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status"))
+ response_headers["x-ms-structured-body"] = self._deserialize(
+ "str", response.headers.get("x-ms-structured-body")
+ )
+ response_headers["x-ms-structured-content-length"] = self._deserialize(
+ "int", response.headers.get("x-ms-structured-content-length")
+ )
deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
@@ -1423,6 +1452,7 @@ def get_properties( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Returns all user-defined metadata, standard HTTP properties, and system properties for the
file. It does not return the content of the file.
@@ -1440,7 +1470,7 @@ def get_properties( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1541,6 +1571,7 @@ def delete( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""removes the file from the storage account.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -1554,7 +1585,7 @@ def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1619,6 +1650,7 @@ def set_http_headers( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Sets HTTP headers on the file.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -1668,7 +1700,7 @@ def set_http_headers( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1776,6 +1808,7 @@ def set_metadata( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Updates user-defined metadata for the specified file.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -1792,7 +1825,7 @@ def set_metadata( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1857,6 +1890,7 @@ def acquire_lease( # pylint: disable=inconsistent-return-statements
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""[Update] The Lease File operation establishes and manages a lock on a file for write and delete
operations.
@@ -1881,7 +1915,7 @@ def acquire_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1942,6 +1976,7 @@ def acquire_lease( # pylint: disable=inconsistent-return-statements
def release_lease( # pylint: disable=inconsistent-return-statements
self, lease_id: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""[Update] The Lease File operation establishes and manages a lock on a file for write and delete
operations.
@@ -1960,7 +1995,7 @@ def release_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2024,6 +2059,7 @@ def change_lease( # pylint: disable=inconsistent-return-statements
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""[Update] The Lease File operation establishes and manages a lock on a file for write and delete
operations.
@@ -2046,7 +2082,7 @@ def change_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2111,6 +2147,7 @@ def break_lease( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""[Update] The Lease File operation establishes and manages a lock on a file for write and delete
operations.
@@ -2129,7 +2166,7 @@ def break_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2198,10 +2235,13 @@ def upload_range( # pylint: disable=inconsistent-return-statements
file_range_write: Union[str, _models.FileRangeWriteType] = "update",
content_md5: Optional[bytes] = None,
file_last_written_mode: Optional[Union[str, _models.FileLastWrittenMode]] = None,
+ structured_body_type: Optional[str] = None,
+ structured_content_length: Optional[int] = None,
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
optionalbody: Optional[IO[bytes]] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Upload a range of bytes to a file.
:param range: Specifies the range of bytes to be written. Both the start and end of the range
@@ -2234,6 +2274,13 @@ def upload_range( # pylint: disable=inconsistent-return-statements
:param file_last_written_mode: If the file last write time should be preserved or overwritten.
Known values are: "Now" and "Preserve". Default value is None.
:type file_last_written_mode: str or ~azure.storage.fileshare.models.FileLastWrittenMode
+ :param structured_body_type: Required if the request body is a structured message. Specifies
+ the message schema version and properties. Default value is None.
+ :type structured_body_type: str
+ :param structured_content_length: Required if the request body is a structured message.
+ Specifies the length of the blob/file content inside the message body. Will always be smaller
+ than Content-Length. Default value is None.
+ :type structured_content_length: int
:param lease_access_conditions: Parameter group. Default value is None.
:type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
:param optionalbody: Initial data. Default value is None.
@@ -2242,7 +2289,7 @@ def upload_range( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2271,6 +2318,8 @@ def upload_range( # pylint: disable=inconsistent-return-statements
content_md5=content_md5,
lease_id=_lease_id,
file_last_written_mode=file_last_written_mode,
+ structured_body_type=structured_body_type,
+ structured_content_length=structured_content_length,
allow_trailing_dot=self._config.allow_trailing_dot,
file_request_intent=self._config.file_request_intent,
comp=comp,
@@ -2307,6 +2356,9 @@ def upload_range( # pylint: disable=inconsistent-return-statements
response_headers["x-ms-file-last-write-time"] = self._deserialize(
"str", response.headers.get("x-ms-file-last-write-time")
)
+ response_headers["x-ms-structured-body"] = self._deserialize(
+ "str", response.headers.get("x-ms-structured-body")
+ )
if cls:
return cls(pipeline_response, None, response_headers) # type: ignore
@@ -2326,6 +2378,7 @@ def upload_range_from_url( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Upload a range of bytes to a file where the contents are read from a URL.
:param range: Writes data to the specified byte range in the file. Required.
@@ -2367,7 +2420,7 @@ def upload_range_from_url( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2456,6 +2509,7 @@ def get_range_list(
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> _models.ShareFileRangeList:
+ # pylint: disable=line-too-long
"""Returns the list of valid ranges for a file.
:param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
@@ -2485,7 +2539,7 @@ def get_range_list(
:rtype: ~azure.storage.fileshare.models.ShareFileRangeList
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2554,11 +2608,13 @@ def start_copy( # pylint: disable=inconsistent-return-statements
timeout: Optional[int] = None,
metadata: Optional[Dict[str, str]] = None,
file_permission: str = "inherit",
+ file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None,
file_permission_key: Optional[str] = None,
copy_file_smb_info: Optional[_models.CopyFileSmbInfo] = None,
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Copies a blob or file to a destination file within the storage account.
:param copy_source: Specifies the URL of the source file or blob, up to 2 KB in length. To copy
@@ -2583,6 +2639,13 @@ def start_copy( # pylint: disable=inconsistent-return-statements
input, it must have owner, group and dacl. Note: Only one of the x-ms-file-permission or
x-ms-file-permission-key should be specified. Default value is "inherit".
:type file_permission: str
+ :param file_permission_format: Optional. Available for version 2023-06-01 and later. Specifies
+ the format in which the permission is returned. Acceptable values are SDDL or binary. If
+ x-ms-file-permission-format is unspecified or explicitly set to SDDL, the permission is
+ returned in SDDL format. If x-ms-file-permission-format is explicitly set to binary, the
+ permission is returned as a base64 string representing the binary encoding of the permission.
+ Known values are: "Sddl" and "Binary". Default value is None.
+ :type file_permission_format: str or ~azure.storage.fileshare.models.FilePermissionFormat
:param file_permission_key: Key of the permission to be set for the directory/file. Note: Only
one of the x-ms-file-permission or x-ms-file-permission-key should be specified. Default value
is None.
@@ -2595,7 +2658,7 @@ def start_copy( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2633,6 +2696,7 @@ def start_copy( # pylint: disable=inconsistent-return-statements
timeout=timeout,
metadata=metadata,
file_permission=file_permission,
+ file_permission_format=file_permission_format,
file_permission_key=file_permission_key,
file_permission_copy_mode=_file_permission_copy_mode,
ignore_read_only=_ignore_read_only,
@@ -2683,6 +2747,7 @@ def abort_copy( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Aborts a pending Copy File operation, and leaves a destination file with zero length and full
metadata.
@@ -2700,7 +2765,7 @@ def abort_copy( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2765,6 +2830,7 @@ def list_handles(
sharesnapshot: Optional[str] = None,
**kwargs: Any
) -> _models.ListHandlesResponse:
+ # pylint: disable=line-too-long
"""Lists handles for file.
:param marker: A string value that identifies the portion of the list to be returned with the
@@ -2788,7 +2854,7 @@ def list_handles(
:rtype: ~azure.storage.fileshare.models.ListHandlesResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2851,6 +2917,7 @@ def force_close_handles( # pylint: disable=inconsistent-return-statements
sharesnapshot: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Closes all handles open for given file.
:param handle_id: Specifies handle ID opened on the file or directory to be closed. Asterisk
@@ -2873,7 +2940,7 @@ def force_close_handles( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2946,6 +3013,7 @@ def rename( # pylint: disable=inconsistent-return-statements
file_http_headers: Optional[_models.FileHTTPHeaders] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Renames a file.
:param rename_source: Required. Specifies the URI-style path of the source file, up to 2 KB in
@@ -3002,7 +3070,7 @@ def rename( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_service_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_service_operations.py
index 7ff801a48b09..b7b3b10ad0fa 100644
--- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_service_operations.py
+++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_service_operations.py
@@ -50,7 +50,7 @@ def build_set_properties_request(
restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service"))
comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -90,7 +90,7 @@ def build_get_properties_request(
restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service"))
comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -131,7 +131,7 @@ def build_list_shares_segment_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -187,6 +187,7 @@ def __init__(self, *args, **kwargs):
def set_properties( # pylint: disable=inconsistent-return-statements
self, storage_service_properties: _models.StorageServiceProperties, timeout: Optional[int] = None, **kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Sets properties for a storage account's File service endpoint, including properties for Storage
Analytics metrics and CORS (Cross-Origin Resource Sharing) rules.
@@ -201,7 +202,7 @@ def set_properties( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -254,6 +255,7 @@ def set_properties( # pylint: disable=inconsistent-return-statements
@distributed_trace
def get_properties(self, timeout: Optional[int] = None, **kwargs: Any) -> _models.StorageServiceProperties:
+ # pylint: disable=line-too-long
"""Gets the properties of a storage account's File service, including properties for Storage
Analytics metrics and CORS (Cross-Origin Resource Sharing) rules.
@@ -266,7 +268,7 @@ def get_properties(self, timeout: Optional[int] = None, **kwargs: Any) -> _model
:rtype: ~azure.storage.fileshare.models.StorageServiceProperties
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -326,6 +328,7 @@ def list_shares_segment(
timeout: Optional[int] = None,
**kwargs: Any
) -> _models.ListSharesResponse:
+ # pylint: disable=line-too-long
"""The List Shares Segment operation returns a list of the shares and share snapshots under the
specified account.
@@ -353,7 +356,7 @@ def list_shares_segment(
:rtype: ~azure.storage.fileshare.models.ListSharesResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_share_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_share_operations.py
index 8639994b70b7..48ff127e0a5c 100644
--- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_share_operations.py
+++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_share_operations.py
@@ -50,6 +50,8 @@ def build_create_request(
paid_bursting_enabled: Optional[bool] = None,
paid_bursting_max_bandwidth_mibps: Optional[int] = None,
paid_bursting_max_iops: Optional[int] = None,
+ share_provisioned_iops: Optional[int] = None,
+ share_provisioned_bandwidth_mibps: Optional[int] = None,
file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
**kwargs: Any
) -> HttpRequest:
@@ -57,7 +59,7 @@ def build_create_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -103,6 +105,14 @@ def build_create_request(
)
if file_request_intent is not None:
_headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ if share_provisioned_iops is not None:
+ _headers["x-ms-share-provisioned-iops"] = _SERIALIZER.header(
+ "share_provisioned_iops", share_provisioned_iops, "int"
+ )
+ if share_provisioned_bandwidth_mibps is not None:
+ _headers["x-ms-share-provisioned-bandwidth-mibps"] = _SERIALIZER.header(
+ "share_provisioned_bandwidth_mibps", share_provisioned_bandwidth_mibps, "int"
+ )
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
@@ -121,7 +131,7 @@ def build_get_properties_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -164,7 +174,7 @@ def build_delete_request(
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -212,7 +222,7 @@ def build_acquire_lease_request(
comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire"))
restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -263,7 +273,7 @@ def build_release_lease_request(
comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release"))
restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -312,7 +322,7 @@ def build_change_lease_request(
comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change"))
restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -362,7 +372,7 @@ def build_renew_lease_request(
comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew"))
restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -411,7 +421,7 @@ def build_break_lease_request(
comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break"))
restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -459,7 +469,7 @@ def build_create_snapshot_request(
restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
comp: Literal["snapshot"] = kwargs.pop("comp", _params.pop("comp", "snapshot"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -500,7 +510,7 @@ def build_create_permission_request(
restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
comp: Literal["filepermission"] = kwargs.pop("comp", _params.pop("comp", "filepermission"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -542,7 +552,7 @@ def build_get_permission_request(
restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
comp: Literal["filepermission"] = kwargs.pop("comp", _params.pop("comp", "filepermission"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -585,6 +595,8 @@ def build_set_properties_request(
paid_bursting_enabled: Optional[bool] = None,
paid_bursting_max_bandwidth_mibps: Optional[int] = None,
paid_bursting_max_iops: Optional[int] = None,
+ share_provisioned_iops: Optional[int] = None,
+ share_provisioned_bandwidth_mibps: Optional[int] = None,
file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
**kwargs: Any
) -> HttpRequest:
@@ -593,7 +605,7 @@ def build_set_properties_request(
restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -638,6 +650,14 @@ def build_set_properties_request(
)
if file_request_intent is not None:
_headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ if share_provisioned_iops is not None:
+ _headers["x-ms-share-provisioned-iops"] = _SERIALIZER.header(
+ "share_provisioned_iops", share_provisioned_iops, "int"
+ )
+ if share_provisioned_bandwidth_mibps is not None:
+ _headers["x-ms-share-provisioned-bandwidth-mibps"] = _SERIALIZER.header(
+ "share_provisioned_bandwidth_mibps", share_provisioned_bandwidth_mibps, "int"
+ )
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
@@ -657,7 +677,7 @@ def build_set_metadata_request(
restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -700,7 +720,7 @@ def build_get_access_policy_request(
restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -743,7 +763,7 @@ def build_set_access_policy_request(
restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -786,7 +806,7 @@ def build_get_statistics_request(
restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
comp: Literal["stats"] = kwargs.pop("comp", _params.pop("comp", "stats"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -829,7 +849,7 @@ def build_restore_request(
restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete"))
- version: Literal["2024-11-04"] = kwargs.pop("version", _headers.pop("x-ms-version", "2024-11-04"))
+ version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05"))
accept = _headers.pop("Accept", "application/xml")
# Construct URL
@@ -895,8 +915,11 @@ def create( # pylint: disable=inconsistent-return-statements
paid_bursting_enabled: Optional[bool] = None,
paid_bursting_max_bandwidth_mibps: Optional[int] = None,
paid_bursting_max_iops: Optional[int] = None,
+ share_provisioned_iops: Optional[int] = None,
+ share_provisioned_bandwidth_mibps: Optional[int] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Creates a new share under the specified account. If the share with the same name already
exists, the operation fails.
@@ -911,7 +934,7 @@ def create( # pylint: disable=inconsistent-return-statements
:param quota: Specifies the maximum size of the share, in gigabytes. Default value is None.
:type quota: int
:param access_tier: Specifies the access tier of the share. Known values are:
- "TransactionOptimized", "Hot", and "Cool". Default value is None.
+ "TransactionOptimized", "Hot", "Cool", and "Premium". Default value is None.
:type access_tier: str or ~azure.storage.fileshare.models.ShareAccessTier
:param enabled_protocols: Protocols to enable on the share. Default value is None.
:type enabled_protocols: str
@@ -931,11 +954,21 @@ def create( # pylint: disable=inconsistent-return-statements
the file share can support. Current maximum for a file share is 102,400 IOPS. Default value is
None.
:type paid_bursting_max_iops: int
+ :param share_provisioned_iops: Optional. Supported in version 2025-01-05 and later. Only
+ allowed for provisioned v2 file shares. Specifies the provisioned number of input/output
+ operations per second (IOPS) of the share. If this is not specified, the provisioned IOPS is
+ set to value calculated based on recommendation formula. Default value is None.
+ :type share_provisioned_iops: int
+ :param share_provisioned_bandwidth_mibps: Optional. Supported in version 2025-01-05 and later.
+ Only allowed for provisioned v2 file shares. Specifies the provisioned bandwidth of the share,
+ in mebibytes per second (MiBps). If this is not specified, the provisioned bandwidth is set to
+ value calculated based on recommendation formula. Default value is None.
+ :type share_provisioned_bandwidth_mibps: int
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -961,6 +994,8 @@ def create( # pylint: disable=inconsistent-return-statements
paid_bursting_enabled=paid_bursting_enabled,
paid_bursting_max_bandwidth_mibps=paid_bursting_max_bandwidth_mibps,
paid_bursting_max_iops=paid_bursting_max_iops,
+ share_provisioned_iops=share_provisioned_iops,
+ share_provisioned_bandwidth_mibps=share_provisioned_bandwidth_mibps,
file_request_intent=self._config.file_request_intent,
restype=restype,
version=self._config.version,
@@ -987,6 +1022,19 @@ def create( # pylint: disable=inconsistent-return-statements
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-share-quota"] = self._deserialize("int", response.headers.get("x-ms-share-quota"))
+ response_headers["x-ms-share-provisioned-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-iops")
+ )
+ response_headers["x-ms-share-provisioned-bandwidth-mibps"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-bandwidth-mibps")
+ )
+ response_headers["x-ms-share-included-burst-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-included-burst-iops")
+ )
+ response_headers["x-ms-share-max-burst-credits-for-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-max-burst-credits-for-iops")
+ )
if cls:
return cls(pipeline_response, None, response_headers) # type: ignore
@@ -999,6 +1047,7 @@ def get_properties( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Returns all user-defined metadata and system properties for the specified share or share
snapshot. The data returned does not include the share's list of files.
@@ -1016,7 +1065,7 @@ def get_properties( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1108,6 +1157,18 @@ def get_properties( # pylint: disable=inconsistent-return-statements
response_headers["x-ms-share-paid-bursting-max-bandwidth-mibps"] = self._deserialize(
"int", response.headers.get("x-ms-share-paid-bursting-max-bandwidth-mibps")
)
+ response_headers["x-ms-share-included-burst-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-included-burst-iops")
+ )
+ response_headers["x-ms-share-max-burst-credits-for-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-max-burst-credits-for-iops")
+ )
+ response_headers["x-ms-share-next-allowed-provisioned-iops-downgrade-time"] = self._deserialize(
+ "rfc-1123", response.headers.get("x-ms-share-next-allowed-provisioned-iops-downgrade-time")
+ )
+ response_headers["x-ms-share-next-allowed-provisioned-bandwidth-downgrade-time"] = self._deserialize(
+ "rfc-1123", response.headers.get("x-ms-share-next-allowed-provisioned-bandwidth-downgrade-time")
+ )
if cls:
return cls(pipeline_response, None, response_headers) # type: ignore
@@ -1121,6 +1182,7 @@ def delete( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Operation marks the specified share or share snapshot for deletion. The share or share snapshot
and any files contained within it are later deleted during garbage collection.
@@ -1141,7 +1203,7 @@ def delete( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1189,6 +1251,12 @@ def delete( # pylint: disable=inconsistent-return-statements
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-file-share-usage-bytes"] = self._deserialize(
+ "int", response.headers.get("x-ms-file-share-usage-bytes")
+ )
+ response_headers["x-ms-file-share-snapshot-usage-bytes"] = self._deserialize(
+ "int", response.headers.get("x-ms-file-share-snapshot-usage-bytes")
+ )
if cls:
return cls(pipeline_response, None, response_headers) # type: ignore
@@ -1203,6 +1271,7 @@ def acquire_lease( # pylint: disable=inconsistent-return-statements
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Lease Share operation establishes and manages a lock on a share, or the specified snapshot
for set and delete share operations.
@@ -1230,7 +1299,7 @@ def acquire_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1298,6 +1367,7 @@ def release_lease( # pylint: disable=inconsistent-return-statements
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Lease Share operation establishes and manages a lock on a share, or the specified snapshot
for set and delete share operations.
@@ -1319,7 +1389,7 @@ def release_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1386,6 +1456,7 @@ def change_lease( # pylint: disable=inconsistent-return-statements
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Lease Share operation establishes and manages a lock on a share, or the specified snapshot
for set and delete share operations.
@@ -1411,7 +1482,7 @@ def change_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1479,6 +1550,7 @@ def renew_lease( # pylint: disable=inconsistent-return-statements
request_id_parameter: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Lease Share operation establishes and manages a lock on a share, or the specified snapshot
for set and delete share operations.
@@ -1500,7 +1572,7 @@ def renew_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1568,6 +1640,7 @@ def break_lease( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""The Lease Share operation establishes and manages a lock on a share, or the specified snapshot
for set and delete share operations.
@@ -1597,7 +1670,7 @@ def break_lease( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1665,6 +1738,7 @@ def break_lease( # pylint: disable=inconsistent-return-statements
def create_snapshot( # pylint: disable=inconsistent-return-statements
self, timeout: Optional[int] = None, metadata: Optional[Dict[str, str]] = None, **kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Creates a read-only snapshot of a share.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -1679,7 +1753,7 @@ def create_snapshot( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1739,6 +1813,7 @@ def create_permission( # pylint: disable=inconsistent-return-statements
content_type: str = "application/json",
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Create a permission (a security descriptor).
:param share_permission: A permission (a security descriptor) at the share level. Required.
@@ -1765,6 +1840,7 @@ def create_permission( # pylint: disable=inconsistent-return-statements
content_type: str = "application/json",
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Create a permission (a security descriptor).
:param share_permission: A permission (a security descriptor) at the share level. Required.
@@ -1786,6 +1862,7 @@ def create_permission( # pylint: disable=inconsistent-return-statements
def create_permission( # pylint: disable=inconsistent-return-statements
self, share_permission: Union[_models.SharePermission, IO[bytes]], timeout: Optional[int] = None, **kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Create a permission (a security descriptor).
:param share_permission: A permission (a security descriptor) at the share level. Is either a
@@ -1800,7 +1877,7 @@ def create_permission( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1870,6 +1947,7 @@ def get_permission(
timeout: Optional[int] = None,
**kwargs: Any
) -> _models.SharePermission:
+ # pylint: disable=line-too-long
"""Returns the permission (security descriptor) for a given key.
:param file_permission_key: Key of the permission to be set for the directory/file. Required.
@@ -1890,7 +1968,7 @@ def get_permission(
:rtype: ~azure.storage.fileshare.models.SharePermission
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1954,9 +2032,12 @@ def set_properties( # pylint: disable=inconsistent-return-statements
paid_bursting_enabled: Optional[bool] = None,
paid_bursting_max_bandwidth_mibps: Optional[int] = None,
paid_bursting_max_iops: Optional[int] = None,
+ share_provisioned_iops: Optional[int] = None,
+ share_provisioned_bandwidth_mibps: Optional[int] = None,
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Sets properties for the specified share.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -1967,7 +2048,7 @@ def set_properties( # pylint: disable=inconsistent-return-statements
:param quota: Specifies the maximum size of the share, in gigabytes. Default value is None.
:type quota: int
:param access_tier: Specifies the access tier of the share. Known values are:
- "TransactionOptimized", "Hot", and "Cool". Default value is None.
+ "TransactionOptimized", "Hot", "Cool", and "Premium". Default value is None.
:type access_tier: str or ~azure.storage.fileshare.models.ShareAccessTier
:param root_squash: Root squash to set on the share. Only valid for NFS shares. Known values
are: "NoRootSquash", "RootSquash", and "AllSquash". Default value is None.
@@ -1985,13 +2066,23 @@ def set_properties( # pylint: disable=inconsistent-return-statements
the file share can support. Current maximum for a file share is 102,400 IOPS. Default value is
None.
:type paid_bursting_max_iops: int
+ :param share_provisioned_iops: Optional. Supported in version 2025-01-05 and later. Only
+ allowed for provisioned v2 file shares. Specifies the provisioned number of input/output
+ operations per second (IOPS) of the share. If this is not specified, the provisioned IOPS is
+ set to value calculated based on recommendation formula. Default value is None.
+ :type share_provisioned_iops: int
+ :param share_provisioned_bandwidth_mibps: Optional. Supported in version 2025-01-05 and later.
+ Only allowed for provisioned v2 file shares. Specifies the provisioned bandwidth of the share,
+ in mebibytes per second (MiBps). If this is not specified, the provisioned bandwidth is set to
+ value calculated based on recommendation formula. Default value is None.
+ :type share_provisioned_bandwidth_mibps: int
:param lease_access_conditions: Parameter group. Default value is None.
:type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2021,6 +2112,8 @@ def set_properties( # pylint: disable=inconsistent-return-statements
paid_bursting_enabled=paid_bursting_enabled,
paid_bursting_max_bandwidth_mibps=paid_bursting_max_bandwidth_mibps,
paid_bursting_max_iops=paid_bursting_max_iops,
+ share_provisioned_iops=share_provisioned_iops,
+ share_provisioned_bandwidth_mibps=share_provisioned_bandwidth_mibps,
file_request_intent=self._config.file_request_intent,
restype=restype,
comp=comp,
@@ -2048,6 +2141,28 @@ def set_properties( # pylint: disable=inconsistent-return-statements
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-share-quota"] = self._deserialize("int", response.headers.get("x-ms-share-quota"))
+ response_headers["x-ms-share-provisioned-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-iops")
+ )
+ response_headers["x-ms-share-provisioned-bandwidth-mibps"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-bandwidth-mibps")
+ )
+ response_headers["x-ms-share-included-burst-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-included-burst-iops")
+ )
+ response_headers["x-ms-share-max-burst-credits-for-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-max-burst-credits-for-iops")
+ )
+ response_headers["x-ms-share-next-allowed-quota-downgrade-time"] = self._deserialize(
+ "rfc-1123", response.headers.get("x-ms-share-next-allowed-quota-downgrade-time")
+ )
+ response_headers["x-ms-share-next-allowed-provisioned-iops-downgrade-time"] = self._deserialize(
+ "rfc-1123", response.headers.get("x-ms-share-next-allowed-provisioned-iops-downgrade-time")
+ )
+ response_headers["x-ms-share-next-allowed-provisioned-bandwidth-downgrade-time"] = self._deserialize(
+ "rfc-1123", response.headers.get("x-ms-share-next-allowed-provisioned-bandwidth-downgrade-time")
+ )
if cls:
return cls(pipeline_response, None, response_headers) # type: ignore
@@ -2060,6 +2175,7 @@ def set_metadata( # pylint: disable=inconsistent-return-statements
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Sets one or more user-defined name-value pairs for the specified share.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -2076,7 +2192,7 @@ def set_metadata( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2138,6 +2254,7 @@ def get_access_policy(
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> List[_models.SignedIdentifier]:
+ # pylint: disable=line-too-long
"""Returns information about stored access policies specified on the share.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -2151,7 +2268,7 @@ def get_access_policy(
:rtype: list[~azure.storage.fileshare.models.SignedIdentifier]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2217,6 +2334,7 @@ def set_access_policy( # pylint: disable=inconsistent-return-statements
share_acl: Optional[List[_models.SignedIdentifier]] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Sets a stored access policy for use with shared access signatures.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -2232,7 +2350,7 @@ def set_access_policy( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2303,6 +2421,7 @@ def get_statistics(
lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
**kwargs: Any
) -> _models.ShareStats:
+ # pylint: disable=line-too-long
"""Retrieves statistics related to the share.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -2316,7 +2435,7 @@ def get_statistics(
:rtype: ~azure.storage.fileshare.models.ShareStats
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2383,6 +2502,7 @@ def restore( # pylint: disable=inconsistent-return-statements
deleted_share_version: Optional[str] = None,
**kwargs: Any
) -> None:
+ # pylint: disable=line-too-long
"""Restores a previously deleted Share.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
@@ -2404,7 +2524,7 @@ def restore( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2455,6 +2575,19 @@ def restore( # pylint: disable=inconsistent-return-statements
)
response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-share-quota"] = self._deserialize("int", response.headers.get("x-ms-share-quota"))
+ response_headers["x-ms-share-provisioned-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-iops")
+ )
+ response_headers["x-ms-share-provisioned-bandwidth-mibps"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-bandwidth-mibps")
+ )
+ response_headers["x-ms-share-included-burst-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-included-burst-iops")
+ )
+ response_headers["x-ms-share-max-burst-credits-for-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-max-burst-credits-for-iops")
+ )
if cls:
return cls(pipeline_response, None, response_headers) # type: ignore
diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_models.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_models.py
index a6f60e3c124e..b7a2bc0d8cf8 100644
--- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_models.py
+++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_models.py
@@ -538,6 +538,10 @@ class ShareProperties(DictMixin):
"""The maximum throughput the file share can support in MiB/s."""
paid_bursting_iops: Optional[int] = None
"""The maximum IOPS the file share can support."""
+ next_provisioned_iops_downgrade: Optional["datetime"]
+ """The share's next allowed provisioned throughput downgrade time."""
+ next_provisioned_bandwidth_downgrade: Optional["datetime"]
+ """The share's next allowed provisioned bandwidth downgrade time."""
def __init__(self, **kwargs: Any) -> None:
self.name = None # type: ignore [assignment]
@@ -565,6 +569,12 @@ def __init__(self, **kwargs: Any) -> None:
self.paid_bursting_enabled = kwargs.get('x-ms-share-paid-bursting-enabled')
self.paid_bursting_bandwidth_mibps = kwargs.get('x-ms-share-paid-bursting-max-bandwidth-mibps')
self.paid_bursting_iops = kwargs.get('x-ms-share-paid-bursting-max-iops')
+ self.included_burst_iops = kwargs.get('x-ms-share-included-burst-iops')
+ self.max_burst_credits_for_iops = kwargs.get('x-ms-share-max-burst-credits-for-iops')
+ self.next_provisioned_iops_downgrade = ( # pylint: disable=name-too-long
+ kwargs.get('x-ms-share-next-allowed-provisioned-iops-downgrade-time'))
+ self.next_provisioned_bandwidth_downgrade = ( # pylint: disable=name-too-long
+ kwargs.get('x-ms-share-next-allowed-provisioned-bandwidth-downgrade-time'))
@classmethod
def _from_generated(cls, generated):
@@ -593,6 +603,12 @@ def _from_generated(cls, generated):
props.paid_bursting_enabled = generated.properties.paid_bursting_enabled
props.paid_bursting_bandwidth_mibps = generated.properties.paid_bursting_max_bandwidth_mibps
props.paid_bursting_iops = generated.properties.paid_bursting_max_iops
+ props.included_burst_iops = generated.properties.included_burst_iops
+ props.max_burst_credits_for_iops = generated.properties.max_burst_credits_for_iops
+ props.next_provisioned_iops_downgrade = ( # pylint: disable=name-too-long
+ generated.properties.next_allowed_provisioned_iops_downgrade_time)
+ props.next_provisioned_bandwidth_downgrade = ( # pylint: disable=name-too-long
+ generated.properties.next_allowed_provisioned_bandwidth_downgrade_time)
return props
diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_serialize.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_serialize.py
index 3102691db726..4091f5e1ea30 100644
--- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_serialize.py
+++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_serialize.py
@@ -46,6 +46,7 @@
'2024-05-04',
'2024-08-04',
'2024-11-04',
+ '2025-01-05',
]
diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_client.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_client.py
index 65f930d1bcf5..098d972b15fb 100644
--- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_client.py
+++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_client.py
@@ -320,7 +320,7 @@ def create_share(self, **kwargs: Any) -> Dict[str, Any]:
The quota to be allotted.
:keyword access_tier:
Specifies the access tier of the share.
- Possible values: 'TransactionOptimized', 'Hot', 'Cool'
+ Possible values: 'TransactionOptimized', 'Hot', 'Cool', 'Premium'
:paramtype access_tier: str or ~azure.storage.fileshare.models.ShareAccessTier
.. versionadded:: 12.4.0
@@ -341,6 +341,8 @@ def create_share(self, **kwargs: Any) -> Dict[str, Any]:
:keyword bool paid_bursting_enabled: This property enables paid bursting.
:keyword int paid_bursting_bandwidth_mibps: The maximum throughput the file share can support in MiB/s.
:keyword int paid_bursting_iops: The maximum IOPS the file share can support.
+ :keyword int provisioned_iops: The provisioned IOPS of the share, stored on the share object.
+ :keyword int provisioned_bandwidth_mibps: The provisioned throughput of the share, stored on the share object.
:returns: Share-updated property dict (Etag and last modified).
:rtype: dict[str, Any]
@@ -361,6 +363,8 @@ def create_share(self, **kwargs: Any) -> Dict[str, Any]:
protocols = kwargs.pop('protocols', None)
paid_bursting_bandwidth_mibps = kwargs.pop('paid_bursting_bandwidth_mibps', None)
paid_bursting_iops = kwargs.pop('paid_bursting_iops', None)
+ share_provisioned_iops = kwargs.pop('provisioned_iops', None)
+ share_provisioned_bandwidth_mibps = kwargs.pop('provisioned_bandwidth_mibps', None)
if protocols and protocols not in ['NFS', 'SMB', ShareProtocols.SMB, ShareProtocols.NFS]:
raise ValueError("The enabled protocol must be set to either SMB or NFS.")
if root_squash and protocols not in ['NFS', ShareProtocols.NFS]:
@@ -378,6 +382,8 @@ def create_share(self, **kwargs: Any) -> Dict[str, Any]:
enabled_protocols=protocols,
paid_bursting_max_bandwidth_mibps=paid_bursting_bandwidth_mibps,
paid_bursting_max_iops=paid_bursting_iops,
+ share_provisioned_iops=share_provisioned_iops,
+ share_provisioned_bandwidth_mibps=share_provisioned_bandwidth_mibps,
cls=return_response_headers,
headers=headers,
**kwargs))
@@ -589,7 +595,7 @@ def set_share_properties(self, **kwargs: Any) -> Dict[str, Any]:
:keyword access_tier:
Specifies the access tier of the share.
- Possible values: 'TransactionOptimized', 'Hot', and 'Cool'
+ Possible values: 'TransactionOptimized', 'Hot', 'Cool', 'Premium'
:paramtype access_tier: str or ~azure.storage.fileshare.models.ShareAccessTier
:keyword int quota:
Specifies the maximum size of the share, in gigabytes.
@@ -610,6 +616,8 @@ def set_share_properties(self, **kwargs: Any) -> Dict[str, Any]:
:keyword bool paid_bursting_enabled: This property enables paid bursting.
:keyword int paid_bursting_bandwidth_mibps: The maximum throughput the file share can support in MiB/s.
:keyword int paid_bursting_iops: The maximum IOPS the file share can support.
+ :keyword int provisioned_iops: The provisioned IOPS of the share, stored on the share object.
+ :keyword int provisioned_bandwidth_mibps: The provisioned throughput of the share, stored on the share object.
:returns: Share-updated property dict (Etag and last modified).
:rtype: dict[str, Any]
@@ -629,6 +637,8 @@ def set_share_properties(self, **kwargs: Any) -> Dict[str, Any]:
root_squash = kwargs.pop('root_squash', None)
paid_bursting_bandwidth_mibps = kwargs.pop('paid_bursting_bandwidth_mibps', None)
paid_bursting_iops = kwargs.pop('paid_bursting_iops', None)
+ share_provisioned_iops = kwargs.pop('provisioned_iops', None)
+ share_provisioned_bandwidth_mibps = kwargs.pop('provisioned_bandwidth_mibps', None)
if all(parameter is None for parameter in [access_tier, quota, root_squash]):
raise ValueError("set_share_properties should be called with at least one parameter.")
try:
@@ -640,6 +650,8 @@ def set_share_properties(self, **kwargs: Any) -> Dict[str, Any]:
lease_access_conditions=access_conditions,
paid_bursting_max_bandwidth_mibps=paid_bursting_bandwidth_mibps,
paid_bursting_max_iops=paid_bursting_iops,
+ share_provisioned_iops=share_provisioned_iops,
+ share_provisioned_bandwidth_mibps=share_provisioned_bandwidth_mibps,
cls=return_response_headers,
**kwargs))
except HttpResponseError as error:
diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_service_client.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_service_client.py
index d586696ba704..4ef878f93e28 100644
--- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_service_client.py
+++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_service_client.py
@@ -343,6 +343,8 @@ def create_share(self, share_name: str, **kwargs: Any) -> ShareClient:
This value is not tracked or validated on the client. To configure client-side network timesouts
see `here `__.
+ :keyword int provisioned_iops: The provisioned IOPS of the share, stored on the share object.
+ :keyword int provisioned_bandwidth_mibps: The provisioned throughput of the share, stored on the share object.
:return: A ShareClient for the newly created Share.
:rtype: ~azure.storage.fileshare.ShareClient
@@ -358,9 +360,18 @@ def create_share(self, share_name: str, **kwargs: Any) -> ShareClient:
metadata = kwargs.pop('metadata', None)
quota = kwargs.pop('quota', None)
timeout = kwargs.pop('timeout', None)
+ provisioned_iops = kwargs.pop('provisioned_iops', None)
+ provisioned_bandwidth_mibps = kwargs.pop('provisioned_bandwidth_mibps', None)
share = self.get_share_client(share_name)
kwargs.setdefault('merge_span', True)
- share.create_share(metadata=metadata, quota=quota, timeout=timeout, **kwargs)
+ share.create_share(
+ metadata=metadata,
+ quota=quota,
+ timeout=timeout,
+ provisioned_iops=provisioned_iops,
+ provisioned_bandwidth_mibps=provisioned_bandwidth_mibps,
+ **kwargs
+ )
return share
@distributed_trace
diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/base_client_async.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/base_client_async.py
index 8e81643f5cce..6186b29db107 100644
--- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/base_client_async.py
+++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/base_client_async.py
@@ -127,16 +127,16 @@ def _create_pipeline(
hosts = self._hosts
policies = [
QueueMessagePolicy(),
- config.headers_policy,
config.proxy_policy,
config.user_agent_policy,
StorageContentValidation(),
- StorageRequestHook(**kwargs),
- self._credential_policy,
ContentDecodePolicy(response_encoding="utf-8"),
AsyncRedirectPolicy(**kwargs),
StorageHosts(hosts=hosts, **kwargs),
config.retry_policy,
+ config.headers_policy,
+ StorageRequestHook(**kwargs),
+ self._credential_policy,
config.logging_policy,
AsyncStorageResponseHook(**kwargs),
DistributedTracingPolicy(**kwargs),
diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/models.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/models.py
index 6fa1091fdd8f..403e6b8bea37 100644
--- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/models.py
+++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/models.py
@@ -70,6 +70,7 @@ class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta):
# Blob values
APPEND_POSITION_CONDITION_NOT_MET = "AppendPositionConditionNotMet"
+ BLOB_ACCESS_TIER_NOT_SUPPORTED_FOR_ACCOUNT_TYPE = "BlobAccessTierNotSupportedForAccountType"
BLOB_ALREADY_EXISTS = "BlobAlreadyExists"
BLOB_NOT_FOUND = "BlobNotFound"
BLOB_OVERWRITTEN = "BlobOverwritten"
@@ -154,6 +155,8 @@ class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta):
DELETE_PENDING = "DeletePending"
DIRECTORY_NOT_EMPTY = "DirectoryNotEmpty"
FILE_LOCK_CONFLICT = "FileLockConflict"
+ FILE_SHARE_PROVISIONED_BANDWIDTH_DOWNGRADE_NOT_ALLOWED = "FileShareProvisionedBandwidthDowngradeNotAllowed"
+ FILE_SHARE_PROVISIONED_IOPS_DOWNGRADE_NOT_ALLOWED = "FileShareProvisionedIopsDowngradeNotAllowed"
INVALID_FILE_OR_DIRECTORY_PATH_NAME = "InvalidFileOrDirectoryPathName"
PARENT_NOT_FOUND = "ParentNotFound"
READ_ONLY_ATTRIBUTE = "ReadOnlyAttribute"
diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.py
index 4a63cd54e211..4a5d4ccf34e2 100644
--- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.py
+++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.py
@@ -210,10 +210,10 @@ def __init__(
@classmethod
def from_file_url(
- cls, file_url: str,
- snapshot: Optional[Union[str, Dict[str, Any]]] = None,
- credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long
- **kwargs: Any
+ cls, file_url: str,
+ snapshot: Optional[Union[str, Dict[str, Any]]] = None,
+ credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long
+ **kwargs: Any
) -> Self:
"""A client to interact with a specific file, although that file may not yet exist.
@@ -249,12 +249,12 @@ def _format_url(self, hostname: str):
@classmethod
def from_connection_string(
- cls, conn_str: str,
- share_name: str,
- file_path: str,
- snapshot: Optional[Union[str, Dict[str, Any]]] = None,
- credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long
- **kwargs: Any
+ cls, conn_str: str,
+ share_name: str,
+ file_path: str,
+ snapshot: Optional[Union[str, Dict[str, Any]]] = None,
+ credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long
+ **kwargs: Any
) -> Self:
"""Create ShareFileClient from a Connection String.
@@ -352,13 +352,13 @@ async def exists(self, **kwargs: Any) -> bool:
@distributed_trace_async
async def create_file(
- self, size: int,
- file_attributes: Optional[Union[str, "NTFSAttributes"]] = "none",
- file_creation_time: Optional[Union[str, datetime]] = "now",
- file_last_write_time: Optional[Union[str, datetime]] = "now",
- file_permission: Optional[str] = None,
- permission_key: Optional[str] = None,
- **kwargs: Any
+ self, size: int,
+ file_attributes: Optional[Union[str, "NTFSAttributes"]] = "none",
+ file_creation_time: Optional[Union[str, datetime]] = "now",
+ file_last_write_time: Optional[Union[str, datetime]] = "now",
+ file_permission: Optional[str] = None,
+ permission_key: Optional[str] = None,
+ **kwargs: Any
) -> Dict[str, Any]:
"""Creates a new file.
@@ -471,14 +471,14 @@ async def create_file(
@distributed_trace_async
async def upload_file(
- self, data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[AnyStr]],
- length: Optional[int] = None,
- file_attributes: Union[str, "NTFSAttributes"] = "none",
- file_creation_time: Optional[Union[str, datetime]] = "now",
- file_last_write_time: Optional[Union[str, datetime]] = "now",
- file_permission: Optional[str] = None,
- permission_key: Optional[str] = None,
- **kwargs
+ self, data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[AnyStr]],
+ length: Optional[int] = None,
+ file_attributes: Union[str, "NTFSAttributes"] = "none",
+ file_creation_time: Optional[Union[str, datetime]] = "now",
+ file_last_write_time: Optional[Union[str, datetime]] = "now",
+ file_permission: Optional[str] = None,
+ permission_key: Optional[str] = None,
+ **kwargs
) -> Dict[str, Any]:
"""Uploads a new file.
@@ -643,6 +643,9 @@ async def start_copy_from_url(self, source_url: str, **kwargs: Any) -> Dict[str,
This parameter was introduced in API version '2019-07-07'.
+ :keyword file_permission_format:
+ Specifies the format in which the permission is returned. If not specified, SDDL will be the default.
+ :paramtype file_permission_format: Literal['sddl', 'binary']
:keyword file_attributes:
This value can be set to "source" to copy file attributes from the source file to the target file,
or to clear all attributes, it can be set to "None". Otherwise it can be set to a list of attributes
@@ -786,9 +789,9 @@ async def abort_copy(self, copy_id: Union[str, FileProperties], **kwargs: Any) -
@distributed_trace_async
async def download_file(
- self, offset: Optional[int] = None,
- length: Optional[int] = None,
- **kwargs: Any
+ self, offset: Optional[int] = None,
+ length: Optional[int] = None,
+ **kwargs: Any
) -> StorageStreamDownloader:
"""Downloads a file to the StorageStreamDownloader. The readall() method must
be used to read all the content or readinto() must be used to download the file into
@@ -841,6 +844,9 @@ async def download_file(
:dedent: 16
:caption: Download a file.
"""
+ if length is not None and offset is None:
+ raise ValueError("Offset value must not be None if length is set.")
+
range_end = None
if length is not None:
if offset is None:
@@ -1065,13 +1071,13 @@ async def get_file_properties(self, **kwargs: Any) -> FileProperties:
@distributed_trace_async
async def set_http_headers(
- self, content_settings: "ContentSettings",
- file_attributes: Union[str, "NTFSAttributes"] = "preserve",
- file_creation_time: Optional[Union[str, datetime]] = "preserve",
- file_last_write_time: Optional[Union[str, datetime]] = "preserve",
- file_permission: Optional[str] = None,
- permission_key: Optional[str] = None,
- **kwargs: Any
+ self, content_settings: "ContentSettings",
+ file_attributes: Union[str, "NTFSAttributes"] = "preserve",
+ file_creation_time: Optional[Union[str, datetime]] = "preserve",
+ file_last_write_time: Optional[Union[str, datetime]] = "preserve",
+ file_permission: Optional[str] = None,
+ permission_key: Optional[str] = None,
+ **kwargs: Any
) -> Dict[str, Any]:
"""Sets HTTP headers on the file.
@@ -1200,10 +1206,10 @@ async def set_file_metadata(self, metadata: Optional[Dict[str, Any]] = None, **k
@distributed_trace_async
async def upload_range(
- self, data: bytes,
- offset: int,
- length: int,
- **kwargs: Any
+ self, data: bytes,
+ offset: int,
+ length: int,
+ **kwargs: Any
) -> Dict[str, Any]:
"""Upload a range of bytes to a file.
@@ -1276,11 +1282,11 @@ async def upload_range(
@distributed_trace_async
async def upload_range_from_url(
- self, source_url: str,
- offset: int,
- length: int,
- source_offset: int,
- **kwargs: Any
+ self, source_url: str,
+ offset: int,
+ length: int,
+ source_offset: int,
+ **kwargs: Any
) -> Dict[str, Any]:
"""
Writes the bytes from one Azure File endpoint into the specified range of another Azure File endpoint.
@@ -1363,9 +1369,9 @@ async def upload_range_from_url(
@distributed_trace_async
async def get_ranges(
- self, offset: Optional[int] = None,
- length: Optional[int] = None,
- **kwargs: Any
+ self, offset: Optional[int] = None,
+ length: Optional[int] = None,
+ **kwargs: Any
) -> List[Dict[str, int]]:
"""Returns the list of valid page ranges for a file or snapshot
of a file.
@@ -1404,12 +1410,12 @@ async def get_ranges(
@distributed_trace_async
async def get_ranges_diff(
- self, previous_sharesnapshot: Union[str, Dict[str, Any]],
- offset: Optional[int] = None,
- length: Optional[int] = None,
- *,
- include_renames: Optional[bool] = None,
- **kwargs: Any
+ self, previous_sharesnapshot: Union[str, Dict[str, Any]],
+ offset: Optional[int] = None,
+ length: Optional[int] = None,
+ *,
+ include_renames: Optional[bool] = None,
+ **kwargs: Any
) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]]:
"""Returns the list of valid page ranges for a file or snapshot
of a file.
diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_client_async.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_client_async.py
index a9f814e95bc6..a7d0f5a4d2f0 100644
--- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_client_async.py
+++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_client_async.py
@@ -315,7 +315,7 @@ async def create_share(self, **kwargs: Any) -> Dict[str, Any]:
The quota to be allotted.
:keyword access_tier:
Specifies the access tier of the share.
- Possible values: 'TransactionOptimized', 'Hot', 'Cool'
+ Possible values: 'TransactionOptimized', 'Hot', 'Cool', 'Premium'
:paramtype access_tier: str or ~azure.storage.fileshare.models.ShareAccessTier
.. versionadded:: 12.4.0
@@ -336,6 +336,8 @@ async def create_share(self, **kwargs: Any) -> Dict[str, Any]:
:keyword bool paid_bursting_enabled: This property enables paid bursting.
:keyword int paid_bursting_bandwidth_mibps: The maximum throughput the file share can support in MiB/s.
:keyword int paid_bursting_iops: The maximum IOPS the file share can support.
+ :keyword int provisioned_iops: The provisioned IOPS of the share, stored on the share object.
+ :keyword int provisioned_bandwidth_mibps: The provisioned throughput of the share, stored on the share object.
:returns: Share-updated property dict (Etag and last modified).
:rtype: dict[str, Any]
@@ -356,6 +358,8 @@ async def create_share(self, **kwargs: Any) -> Dict[str, Any]:
protocols = kwargs.pop('protocols', None)
paid_bursting_bandwidth_mibps = kwargs.pop('paid_bursting_bandwidth_mibps', None)
paid_bursting_iops = kwargs.pop('paid_bursting_iops', None)
+ share_provisioned_iops = kwargs.pop('provisioned_iops', None)
+ share_provisioned_bandwidth_mibps = kwargs.pop('provisioned_bandwidth_mibps', None)
if protocols and protocols not in ['NFS', 'SMB', ShareProtocols.SMB, ShareProtocols.NFS]:
raise ValueError("The enabled protocol must be set to either SMB or NFS.")
if root_squash and protocols not in ['NFS', ShareProtocols.NFS]:
@@ -373,6 +377,8 @@ async def create_share(self, **kwargs: Any) -> Dict[str, Any]:
enabled_protocols=protocols,
paid_bursting_max_bandwidth_mibps=paid_bursting_bandwidth_mibps,
paid_bursting_max_iops=paid_bursting_iops,
+ share_provisioned_iops=share_provisioned_iops,
+ share_provisioned_bandwidth_mibps=share_provisioned_bandwidth_mibps,
cls=return_response_headers,
headers=headers,
**kwargs))
@@ -583,7 +589,7 @@ async def set_share_properties(self, **kwargs: Any) -> Dict[str, Any]:
:keyword access_tier:
Specifies the access tier of the share.
- Possible values: 'TransactionOptimized', 'Hot', and 'Cool'
+ Possible values: 'TransactionOptimized', 'Hot', 'Cool', 'Premium'
:paramtype access_tier: str or ~azure.storage.fileshare.models.ShareAccessTier
:keyword int quota:
Specifies the maximum size of the share, in gigabytes.
@@ -604,6 +610,8 @@ async def set_share_properties(self, **kwargs: Any) -> Dict[str, Any]:
:keyword bool paid_bursting_enabled: This property enables paid bursting.
:keyword int paid_bursting_bandwidth_mibps: The maximum throughput the file share can support in MiB/s.
:keyword int paid_bursting_iops: The maximum IOPS the file share can support.
+ :keyword int provisioned_iops: The provisioned IOPS of the share, stored on the share object.
+ :keyword int provisioned_bandwidth_mibps: The provisioned throughput of the share, stored on the share object.
:returns: Share-updated property dict (Etag and last modified).
:rtype: dict[str, Any]
@@ -623,6 +631,8 @@ async def set_share_properties(self, **kwargs: Any) -> Dict[str, Any]:
root_squash = kwargs.pop('root_squash', None)
paid_bursting_bandwidth_mibps = kwargs.pop('paid_bursting_bandwidth_mibps', None)
paid_bursting_iops = kwargs.pop('paid_bursting_iops', None)
+ share_provisioned_iops = kwargs.pop('provisioned_iops', None)
+ share_provisioned_bandwidth_mibps = kwargs.pop('provisioned_bandwidth_mibps', None)
if all(parameter is None for parameter in [access_tier, quota, root_squash]):
raise ValueError("set_share_properties should be called with at least one parameter.")
try:
@@ -634,6 +644,8 @@ async def set_share_properties(self, **kwargs: Any) -> Dict[str, Any]:
lease_access_conditions=access_conditions,
paid_bursting_max_bandwidth_mibps=paid_bursting_bandwidth_mibps,
paid_bursting_max_iops=paid_bursting_iops,
+ share_provisioned_iops=share_provisioned_iops,
+ share_provisioned_bandwidth_mibps=share_provisioned_bandwidth_mibps,
cls=return_response_headers,
**kwargs))
except HttpResponseError as error:
diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_service_client_async.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_service_client_async.py
index 225c7ab62476..bf33ac78d12d 100644
--- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_service_client_async.py
+++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_service_client_async.py
@@ -346,6 +346,8 @@ async def create_share(self, share_name: str, **kwargs: Any) -> ShareClient:
This value is not tracked or validated on the client. To configure client-side network timesouts
see `here `__.
+ :keyword int provisioned_iops: The provisioned IOPS of the share, stored on the share object.
+ :keyword int provisioned_bandwidth_mibps: The provisioned throughput of the share, stored on the share object.
:return: A ShareClient for the newly created Share.
:rtype: ~azure.storage.fileshare.aio.ShareClient
@@ -361,9 +363,18 @@ async def create_share(self, share_name: str, **kwargs: Any) -> ShareClient:
metadata = kwargs.pop('metadata', None)
quota = kwargs.pop('quota', None)
timeout = kwargs.pop('timeout', None)
+ provisioned_iops = kwargs.pop('provisioned_iops', None)
+ provisioned_bandwidth_mibps = kwargs.pop('provisioned_bandwidth_mibps', None)
share = self.get_share_client(share_name)
kwargs.setdefault('merge_span', True)
- await share.create_share(metadata=metadata, quota=quota, timeout=timeout, **kwargs)
+ await share.create_share(
+ metadata=metadata,
+ quota=quota,
+ timeout=timeout,
+ provisioned_iops=provisioned_iops,
+ provisioned_bandwidth_mibps=provisioned_bandwidth_mibps,
+ **kwargs
+ )
return share
@distributed_trace_async
diff --git a/sdk/storage/azure-storage-file-share/swagger/README.md b/sdk/storage/azure-storage-file-share/swagger/README.md
index 37a4e927030a..c2b60e4f668b 100644
--- a/sdk/storage/azure-storage-file-share/swagger/README.md
+++ b/sdk/storage/azure-storage-file-share/swagger/README.md
@@ -16,7 +16,7 @@ autorest --v3 --python
### Settings
``` yaml
-input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/storage/data-plane/Microsoft.FileStorage/stable/2024-11-04/file.json
+input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/storage/data-plane/Microsoft.FileStorage/stable/2025-01-05/file.json
output-folder: ../azure/storage/fileshare/_generated
namespace: azure.storage.fileshare
no-namespace-folders: true
diff --git a/sdk/storage/azure-storage-file-share/tests/test_file.py b/sdk/storage/azure-storage-file-share/tests/test_file.py
index 307ed37092c2..52c92bf8c0d1 100644
--- a/sdk/storage/azure-storage-file-share/tests/test_file.py
+++ b/sdk/storage/azure-storage-file-share/tests/test_file.py
@@ -3779,6 +3779,7 @@ def test_file_permission_format(self, **kwargs):
"ABAgAAAAAABSAAAAAgAgAAAAAkAKkAEgABBQAAAAAABRUAAABZUbgXZnJdJWRjOwuMmS4AAQUA"
"AAAAAAUVAAAAoGXPfnhLm1/nfIdwr/1IAQEFAAAAAAAFFQAAAKBlz354S5tf53yHcAECAAA=")
+ # Create file
source_file.create_file(
1024,
file_permission=user_given_permission_binary,
@@ -3789,6 +3790,7 @@ def test_file_permission_format(self, **kwargs):
assert props is not None
assert props.permission_key is not None
+ # Rename file
new_file = source_file.rename_file(
'file2',
file_permission=user_given_permission_binary,
@@ -3804,6 +3806,7 @@ def test_file_permission_format(self, **kwargs):
)
assert server_returned_permission == user_given_permission_binary
+ # Set HTTP headers
content_settings = ContentSettings(
content_language='spanish',
content_disposition='inline'
@@ -3825,6 +3828,23 @@ def test_file_permission_format(self, **kwargs):
)
assert server_returned_permission == user_given_permission_sddl
+ # Copy file
+ file_client = ShareFileClient(
+ self.account_url(storage_account_name, "file"),
+ share_name=self.share_name,
+ file_path='filecopy',
+ credential=storage_account_key
+ )
+ copy = file_client.start_copy_from_url(
+ new_file.url,
+ file_permission=user_given_permission_binary,
+ file_permission_format="binary"
+ )
+ assert copy is not None
+ assert copy['copy_status'] == 'success'
+ assert copy['copy_id'] is not None
+
new_file.delete_file()
+ file_client.delete_file()
# ------------------------------------------------------------------------------
diff --git a/sdk/storage/azure-storage-file-share/tests/test_file_async.py b/sdk/storage/azure-storage-file-share/tests/test_file_async.py
index 9f1edd055be4..4480ea5917f7 100644
--- a/sdk/storage/azure-storage-file-share/tests/test_file_async.py
+++ b/sdk/storage/azure-storage-file-share/tests/test_file_async.py
@@ -156,8 +156,8 @@ async def _wait_for_async_copy(self, share_name, file_path):
properties = await file_client.get_file_properties()
assert properties.copy.status == 'success'
- async def assertFileEqual(self, file_client, expected_data):
- content = await file_client.download_file()
+ async def assertFileEqual(self, file_client, expected_data, **kwargs):
+ content = await file_client.download_file(**kwargs)
actual_data = await content.readall()
assert actual_data == expected_data
@@ -3942,4 +3942,21 @@ async def test_file_permission_format(self, **kwargs):
)
assert server_returned_permission == user_given_permission_sddl
+ # Copy file
+ file_client = ShareFileClient(
+ self.account_url(storage_account_name, "file"),
+ share_name=self.share_name,
+ file_path='filecopy',
+ credential=storage_account_key
+ )
+ copy = await file_client.start_copy_from_url(
+ new_file.url,
+ file_permission=user_given_permission_binary,
+ file_permission_format="binary"
+ )
+ assert copy is not None
+ assert copy['copy_status'] == 'success'
+ assert copy['copy_id'] is not None
+
await new_file.delete_file()
+ await file_client.delete_file()
diff --git a/sdk/storage/azure-storage-file-share/tests/test_share.py b/sdk/storage/azure-storage-file-share/tests/test_share.py
index 94edec51b032..204916b93839 100644
--- a/sdk/storage/azure-storage-file-share/tests/test_share.py
+++ b/sdk/storage/azure-storage-file-share/tests/test_share.py
@@ -1785,6 +1785,82 @@ def test_share_lease_with_oauth(self, **kwargs):
lease.release()
share_client.delete_share()
+ @FileSharePreparer()
+ @recorded_by_proxy
+ def test_create_share_access_tier_premium(self, **kwargs):
+ premium_storage_file_account_name = kwargs.pop("premium_storage_file_account_name")
+ premium_storage_file_account_key = kwargs.pop("premium_storage_file_account_key")
+
+ try:
+ self._setup(premium_storage_file_account_name, premium_storage_file_account_key)
+
+ share = self._get_share_reference()
+ share.create_share(access_tier='Premium')
+ props = share.get_share_properties()
+ assert props.access_tier == 'Premium'
+ finally:
+ self._delete_shares()
+
+ @FileSharePreparer()
+ @recorded_by_proxy
+ def test_set_share_properties_access_tier_premium(self, **kwargs):
+ premium_storage_file_account_name = kwargs.pop("premium_storage_file_account_name")
+ premium_storage_file_account_key = kwargs.pop("premium_storage_file_account_key")
+
+ try:
+ self._setup(premium_storage_file_account_name, premium_storage_file_account_key)
+
+ share = self._get_share_reference()
+ share.create_share()
+ share.set_share_properties(access_tier='Premium')
+ props = share.get_share_properties()
+ assert props.access_tier == 'Premium'
+ finally:
+ self._delete_shares()
+
+ @pytest.mark.playback_test_only
+ @FileSharePreparer()
+ @recorded_by_proxy
+ def test_provisioned_billing_v2(self, **kwargs):
+ storage_account_name = kwargs.pop("storage_account_name")
+ storage_account_key = kwargs.pop("storage_account_key")
+
+ try:
+ self._setup(storage_account_name, storage_account_key)
+
+ share_name = self.get_resource_name(TEST_SHARE_PREFIX)
+ share = self.fsc.get_share_client(share_name)
+ self.test_shares.append(share_name)
+
+ share.create_share(provisioned_iops=500, provisioned_bandwidth_mibps=150)
+ props = share.get_share_properties()
+ assert props is not None
+ assert props.provisioned_iops == 500
+ assert props.provisioned_bandwidth == 150
+ assert props.included_burst_iops is not None
+ assert props.max_burst_credits_for_iops is not None
+ assert props.next_provisioned_iops_downgrade is not None
+ assert props.next_provisioned_bandwidth_downgrade is not None
+
+ share.set_share_properties(
+ access_tier="Hot",
+ provisioned_iops=3000,
+ provisioned_bandwidth_mibps=125
+ )
+
+ shares = list(self.fsc.list_shares())
+
+ assert shares is not None
+ assert len(shares) >= 1
+ assert shares[0].name == share_name
+ assert shares[0].provisioned_iops == 3000
+ assert shares[0].provisioned_bandwidth == 125
+ assert shares[0].included_burst_iops is not None
+ assert shares[0].max_burst_credits_for_iops is not None
+ assert shares[0].next_provisioned_iops_downgrade is not None
+ assert shares[0].next_provisioned_bandwidth_downgrade is not None
+ finally:
+ self._delete_shares()
# ------------------------------------------------------------------------------
if __name__ == '__main__':
diff --git a/sdk/storage/azure-storage-file-share/tests/test_share_async.py b/sdk/storage/azure-storage-file-share/tests/test_share_async.py
index fa0e2db24eb1..4929a805b9ce 100644
--- a/sdk/storage/azure-storage-file-share/tests/test_share_async.py
+++ b/sdk/storage/azure-storage-file-share/tests/test_share_async.py
@@ -1821,3 +1821,82 @@ async def test_share_lease_with_oauth(self, **kwargs):
await lease.release()
await share_client.delete_share()
+
+ @FileSharePreparer()
+ @recorded_by_proxy_async
+ async def test_create_share_access_tier_premium(self, **kwargs):
+ premium_storage_file_account_name = kwargs.pop("premium_storage_file_account_name")
+ premium_storage_file_account_key = kwargs.pop("premium_storage_file_account_key")
+
+ try:
+ self._setup(premium_storage_file_account_name, premium_storage_file_account_key)
+
+ share = self._get_share_reference()
+ await share.create_share(access_tier='Premium')
+ props = await share.get_share_properties()
+ assert props.access_tier == 'Premium'
+ finally:
+ await self._delete_shares()
+
+ @FileSharePreparer()
+ @recorded_by_proxy_async
+ async def test_set_share_properties_access_tier_premium(self, **kwargs):
+ premium_storage_file_account_name = kwargs.pop("premium_storage_file_account_name")
+ premium_storage_file_account_key = kwargs.pop("premium_storage_file_account_key")
+
+ try:
+ self._setup(premium_storage_file_account_name, premium_storage_file_account_key)
+
+ share = self._get_share_reference()
+ await share.create_share()
+ await share.set_share_properties(access_tier='Premium')
+ props = await share.get_share_properties()
+ assert props.access_tier == 'Premium'
+ finally:
+ await self._delete_shares()
+
+ @pytest.mark.playback_test_only
+ @FileSharePreparer()
+ @recorded_by_proxy_async
+ async def test_provisioned_billing_v2(self, **kwargs):
+ storage_account_name = kwargs.pop("storage_account_name")
+ storage_account_key = kwargs.pop("storage_account_key")
+
+ try:
+ self._setup(storage_account_name, storage_account_key)
+
+ share_name = self.get_resource_name(TEST_SHARE_PREFIX)
+ share = self.fsc.get_share_client(share_name)
+ self.test_shares.append(share_name)
+
+ await share.create_share(provisioned_iops=500, provisioned_bandwidth_mibps=150)
+ props = await share.get_share_properties()
+ assert props is not None
+ assert props.provisioned_iops == 500
+ assert props.provisioned_bandwidth == 150
+ assert props.included_burst_iops is not None
+ assert props.max_burst_credits_for_iops is not None
+ assert props.next_provisioned_iops_downgrade is not None
+ assert props.next_provisioned_bandwidth_downgrade is not None
+
+ await share.set_share_properties(
+ access_tier="Hot",
+ provisioned_iops=3000,
+ provisioned_bandwidth_mibps=125
+ )
+
+ shares = []
+ async for share in self.fsc.list_shares():
+ shares.append(share)
+
+ assert shares is not None
+ assert len(shares) >= 1
+ assert shares[0].name == share_name
+ assert shares[0].provisioned_iops == 3000
+ assert shares[0].provisioned_bandwidth == 125
+ assert shares[0].included_burst_iops is not None
+ assert shares[0].max_burst_credits_for_iops is not None
+ assert shares[0].next_provisioned_iops_downgrade is not None
+ assert shares[0].next_provisioned_bandwidth_downgrade is not None
+ finally:
+ await self._delete_shares()
diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/base_client_async.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/base_client_async.py
index 8e81643f5cce..6186b29db107 100644
--- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/base_client_async.py
+++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/base_client_async.py
@@ -127,16 +127,16 @@ def _create_pipeline(
hosts = self._hosts
policies = [
QueueMessagePolicy(),
- config.headers_policy,
config.proxy_policy,
config.user_agent_policy,
StorageContentValidation(),
- StorageRequestHook(**kwargs),
- self._credential_policy,
ContentDecodePolicy(response_encoding="utf-8"),
AsyncRedirectPolicy(**kwargs),
StorageHosts(hosts=hosts, **kwargs),
config.retry_policy,
+ config.headers_policy,
+ StorageRequestHook(**kwargs),
+ self._credential_policy,
config.logging_policy,
AsyncStorageResponseHook(**kwargs),
DistributedTracingPolicy(**kwargs),
diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/models.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/models.py
index 507678b32317..c8949723449b 100644
--- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/models.py
+++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/models.py
@@ -70,6 +70,7 @@ class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta):
# Blob values
APPEND_POSITION_CONDITION_NOT_MET = "AppendPositionConditionNotMet"
+ BLOB_ACCESS_TIER_NOT_SUPPORTED_FOR_ACCOUNT_TYPE = "BlobAccessTierNotSupportedForAccountType"
BLOB_ALREADY_EXISTS = "BlobAlreadyExists"
BLOB_NOT_FOUND = "BlobNotFound"
BLOB_OVERWRITTEN = "BlobOverwritten"
@@ -154,6 +155,8 @@ class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta):
DELETE_PENDING = "DeletePending"
DIRECTORY_NOT_EMPTY = "DirectoryNotEmpty"
FILE_LOCK_CONFLICT = "FileLockConflict"
+ FILE_SHARE_PROVISIONED_BANDWIDTH_DOWNGRADE_NOT_ALLOWED = "FileShareProvisionedBandwidthDowngradeNotAllowed"
+ FILE_SHARE_PROVISIONED_IOPS_DOWNGRADE_NOT_ALLOWED = "FileShareProvisionedIopsDowngradeNotAllowed"
INVALID_FILE_OR_DIRECTORY_PATH_NAME = "InvalidFileOrDirectoryPathName"
PARENT_NOT_FOUND = "ParentNotFound"
READ_ONLY_ATTRIBUTE = "ReadOnlyAttribute"