From 9bf2277d7651a663d9a32ed4758acd2dd1e5e450 Mon Sep 17 00:00:00 2001 From: Peter Wu Date: Mon, 14 Apr 2025 10:36:50 -0400 Subject: [PATCH 1/6] Ported the change to get_api_version over from blob client branch --- .../storage/blob/_blob_service_client.py | 23 +++++++++++-------- .../azure/storage/blob/_container_client.py | 5 +++- .../azure/storage/blob/_serialize.py | 22 +++++++++++------- .../blob/aio/_blob_service_client_async.py | 23 +++++++++++-------- .../blob/aio/_container_client_async.py | 5 +++- 5 files changed, 48 insertions(+), 30 deletions(-) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client.py index f6e17cb756f0..749fac377db3 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client.py @@ -121,6 +121,9 @@ class BlobServiceClient(StorageAccountHostsMixin, StorageEncryptionMixin): def __init__( self, account_url: str, credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + *, + api_version: Optional[str] = None, + # TODO **kwargs: Any ) -> None: parsed_url, sas_token = _parse_url(account_url=account_url) @@ -128,7 +131,7 @@ def __init__( self._query_str, credential = self._format_query_string(sas_token, credential) super(BlobServiceClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) self._client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) - self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._client._config.version = get_api_version(api_version) # type: ignore [assignment] self._configure_encryption(kwargs) def _format_url(self, hostname): @@ -240,7 +243,7 @@ def get_account_information(self, **kwargs: Any) -> Dict[str, str]: :caption: Getting account information for the blob service. """ try: - return self._client.service.get_account_info(cls=return_response_headers, **kwargs) # type: ignore + return self._client.service.get_account_info(cls=return_response_headers, **kwargs) # type: ignore except HttpResponseError as error: process_storage_error(error) @@ -284,7 +287,7 @@ def get_service_stats(self, **kwargs: Any) -> Dict[str, Any]: """ timeout = kwargs.pop('timeout', None) try: - stats = self._client.service.get_statistics( # type: ignore + stats = self._client.service.get_statistics( # type: ignore timeout=timeout, use_location=LocationMode.SECONDARY, **kwargs) return service_stats_deserialize(stats) except HttpResponseError as error: @@ -391,7 +394,7 @@ def set_service_properties( logging=analytics_logging, hour_metrics=hour_metrics, minute_metrics=minute_metrics, - cors=CorsRule._to_generated(cors), # pylint: disable=protected-access + cors=CorsRule._to_generated(cors), # pylint: disable=protected-access default_service_version=target_version, delete_retention_policy=delete_retention_policy, static_website=static_website @@ -648,7 +651,7 @@ def _rename_container(self, name: str, new_name: str, **kwargs: Any) -> Containe except AttributeError: kwargs['source_lease_id'] = lease try: - renamed_container._client.container.rename(name, **kwargs) # pylint: disable = protected-access + renamed_container._client.container.rename(name, **kwargs) # pylint: disable=protected-access return renamed_container except HttpResponseError as error: process_storage_error(error) @@ -685,7 +688,7 @@ def undelete_container( warnings.warn("`new_name` is no longer supported.", DeprecationWarning) container = self.get_container_client(new_name or deleted_container_name) try: - container._client.container.restore(deleted_container_name=deleted_container_name, # pylint: disable = protected-access + container._client.container.restore(deleted_container_name=deleted_container_name, # pylint: disable=protected-access deleted_container_version=deleted_container_version, timeout=kwargs.pop('timeout', None), **kwargs) return container @@ -718,8 +721,8 @@ def get_container_client(self, container: Union[ContainerProperties, str]) -> Co else: container_name = container _pipeline = Pipeline( - transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access - policies=self._pipeline._impl_policies # pylint: disable = protected-access + transport=TransportWrapper(self._pipeline._transport), # pylint: disable=protected-access + policies=self._pipeline._impl_policies # pylint: disable=protected-access ) return ContainerClient( self.url, container_name=container_name, @@ -776,8 +779,8 @@ def get_blob_client( else: container_name = container _pipeline = Pipeline( - transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access - policies=self._pipeline._impl_policies # pylint: disable = protected-access + transport=TransportWrapper(self._pipeline._transport), # pylint: disable=protected-access + policies=self._pipeline._impl_policies # pylint: disable=protected-access ) return BlobClient( self.url, container_name=container_name, blob_name=blob_name, snapshot=snapshot, diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py index 783df6bc753e..89ad69433eed 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py @@ -137,6 +137,9 @@ def __init__( self, account_url: str, container_name: str, credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + *, + api_version: Optional[str] = None, + # TODO **kwargs: Any ) -> None: parsed_url, sas_token = _parse_url(account_url=account_url, container_name=container_name) @@ -146,7 +149,7 @@ def __init__( self._raw_credential = credential if credential else sas_token self._query_str, credential = self._format_query_string(sas_token, credential) super(ContainerClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) - self._api_version = get_api_version(kwargs) + self._api_version = get_api_version(api_version) self._client = self._build_generated_client() self._configure_encryption(kwargs) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py index 316e321cd8af..f9e6c3e14db6 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py @@ -31,6 +31,7 @@ if TYPE_CHECKING: from ._lease import BlobLeaseClient + from aio._lease_async import BlobLeaseClient as AsyncBlobLeaseClient _SUPPORTED_API_VERSIONS = [ @@ -90,12 +91,16 @@ def _get_match_headers( return if_match, if_none_match -def get_access_conditions(lease: Optional[Union["BlobLeaseClient", str]]) -> Optional[LeaseAccessConditions]: - try: - lease_id = lease.id # type: ignore - except AttributeError: - lease_id = lease # type: ignore - return LeaseAccessConditions(lease_id=lease_id) if lease_id else None +def get_access_conditions( + lease: Optional[Union["BlobLeaseClient", "AsyncBlobLeaseClient", str]] +) -> Optional[LeaseAccessConditions]: + if lease is None: + return None + if hasattr(lease, "id"): + lease_id = lease.id # type: ignore + else: + lease_id = lease # type: ignore + return LeaseAccessConditions(lease_id=lease_id) def get_modify_conditions(kwargs: Dict[str, Any]) -> ModifiedAccessConditions: @@ -143,18 +148,19 @@ def get_container_cpk_scope_info(kwargs: Dict[str, Any]) -> Optional[ContainerCp return None -def get_api_version(kwargs: Dict[str, Any]) -> str: - api_version = kwargs.get('api_version', None) +def get_api_version(api_version: Optional[str] = None) -> str: if api_version and api_version not in _SUPPORTED_API_VERSIONS: versions = '\n'.join(_SUPPORTED_API_VERSIONS) raise ValueError(f"Unsupported API version '{api_version}'. Please select from:\n{versions}") return api_version or _SUPPORTED_API_VERSIONS[-1] + def get_version_id(self_vid: Optional[str], kwargs: Dict[str, Any]) -> Optional[str]: if 'version_id' in kwargs: return cast(str, kwargs.pop('version_id')) return self_vid + def serialize_blob_tags_header(tags: Optional[Dict[str, str]] = None) -> Optional[str]: if tags is None: return None diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_service_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_service_client_async.py index 8f76aa98c8cf..0eb43d3a2d95 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_service_client_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_service_client_async.py @@ -128,6 +128,9 @@ class BlobServiceClient( # type: ignore [misc] def __init__( self, account_url: str, credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + *, + api_version: Optional[str] = None, + # TODO **kwargs: Any ) -> None: kwargs['retry_policy'] = kwargs.get('retry_policy') or ExponentialRetry(**kwargs) @@ -136,7 +139,7 @@ def __init__( self._query_str, credential = self._format_query_string(sas_token, credential) super(BlobServiceClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) self._client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) - self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._client._config.version = get_api_version(api_version) # type: ignore [assignment] self._configure_encryption(kwargs) def _format_url(self, hostname): @@ -248,7 +251,7 @@ async def get_account_information(self, **kwargs: Any) -> Dict[str, str]: :caption: Getting account information for the blob service. """ try: - return await self._client.service.get_account_info(cls=return_response_headers, **kwargs) # type: ignore + return await self._client.service.get_account_info(cls=return_response_headers, **kwargs) # type: ignore except HttpResponseError as error: process_storage_error(error) @@ -292,7 +295,7 @@ async def get_service_stats(self, **kwargs: Any) -> Dict[str, Any]: """ timeout = kwargs.pop('timeout', None) try: - stats = await self._client.service.get_statistics( # type: ignore + stats = await self._client.service.get_statistics( # type: ignore timeout=timeout, use_location=LocationMode.SECONDARY, **kwargs) return service_stats_deserialize(stats) except HttpResponseError as error: @@ -399,7 +402,7 @@ async def set_service_properties( logging=analytics_logging, hour_metrics=hour_metrics, minute_metrics=minute_metrics, - cors=CorsRule._to_generated(cors), # pylint: disable=protected-access + cors=CorsRule._to_generated(cors), # pylint: disable=protected-access default_service_version=target_version, delete_retention_policy=delete_retention_policy, static_website=static_website @@ -656,7 +659,7 @@ async def _rename_container(self, name: str, new_name: str, **kwargs: Any) -> Co except AttributeError: kwargs['source_lease_id'] = lease try: - await renamed_container._client.container.rename(name, **kwargs) # pylint: disable = protected-access + await renamed_container._client.container.rename(name, **kwargs) # pylint: disable=protected-access return renamed_container except HttpResponseError as error: process_storage_error(error) @@ -693,7 +696,7 @@ async def undelete_container( warnings.warn("`new_name` is no longer supported.", DeprecationWarning) container = self.get_container_client(new_name or deleted_container_name) try: - await container._client.container.restore(deleted_container_name=deleted_container_name, # pylint: disable = protected-access + await container._client.container.restore(deleted_container_name=deleted_container_name, # pylint: disable=protected-access deleted_container_version=deleted_container_version, timeout=kwargs.pop('timeout', None), **kwargs) return container @@ -726,8 +729,8 @@ def get_container_client(self, container: Union[ContainerProperties, str]) -> Co else: container_name = container _pipeline = AsyncPipeline( - transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable = protected-access - policies=self._pipeline._impl_policies #type: ignore [arg-type] # pylint: disable = protected-access + transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable=protected-access + policies=self._pipeline._impl_policies #type: ignore [arg-type] # pylint: disable=protected-access ) return ContainerClient( self.url, container_name=container_name, @@ -786,9 +789,9 @@ def get_blob_client( else: container_name = container _pipeline = AsyncPipeline( - transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable = protected-access + transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable=protected-access policies=cast(Iterable["AsyncHTTPPolicy"], - self._pipeline._impl_policies) # pylint: disable = protected-access + self._pipeline._impl_policies) # pylint: disable=protected-access ) return BlobClient( self.url, container_name=container_name, blob_name=blob_name, snapshot=snapshot, diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py index 306e3acf5519..eeb97625131b 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py @@ -129,6 +129,9 @@ def __init__( self, account_url: str, container_name: str, credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + *, + api_version: Optional[str] = None, + # TODO **kwargs: Any ) -> None: kwargs['retry_policy'] = kwargs.get('retry_policy') or ExponentialRetry(**kwargs) @@ -139,7 +142,7 @@ def __init__( self._raw_credential = credential if credential else sas_token self._query_str, credential = self._format_query_string(sas_token, credential) super(ContainerClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) - self._api_version = get_api_version(kwargs) + self._api_version = get_api_version(api_version) self._client = self._build_generated_client() self._configure_encryption(kwargs) From 10fcbee4b05086b288150afbfe7e12015c72c7d3 Mon Sep 17 00:00:00 2001 From: Peter Wu Date: Mon, 14 Apr 2025 14:51:21 -0400 Subject: [PATCH 2/6] [WIP] Container Client Named Keywords --- .../azure/storage/blob/_blob_client.py | 1348 ++++++++++++++--- .../storage/blob/_blob_client_helpers.py | 377 +++-- .../azure/storage/blob/_container_client.py | 276 +++- .../storage/blob/_container_client_helpers.py | 53 +- .../azure/storage/blob/_serialize.py | 5 +- .../storage/blob/aio/_blob_client_async.py | 1262 ++++++++++++--- .../blob/aio/_container_client_async.py | 292 +++- 7 files changed, 2966 insertions(+), 647 deletions(-) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py index 90049ff88e32..6b4f1d3b9203 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py @@ -3,13 +3,14 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=too-many-lines, docstring-keyword-should-match-keyword-only +# pylint: disable=too-many-lines, too-many-locals import warnings from datetime import datetime from functools import partial from typing import ( - Any, AnyStr, cast, Dict, IO, Iterable, List, Optional, overload, Tuple, Union, + Any, AnyStr, Callable, cast, Dict, IO, Iterable, + List, Literal, Optional, overload, Tuple, Union, TYPE_CHECKING ) from typing_extensions import Self @@ -20,6 +21,7 @@ from azure.core.tracing.decorator import distributed_trace from ._blob_client_helpers import ( _abort_copy_options, + _acquire_lease_options, _append_block_from_url_options, _append_block_options, _clear_page_options, @@ -31,7 +33,9 @@ _download_blob_options, _format_url, _from_blob_url, + _get_blob_properties_options, _get_blob_tags_options, + _get_block_list_options, _get_block_list_result, _get_page_ranges_options, _parse_url, @@ -41,7 +45,9 @@ _set_blob_metadata_options, _set_blob_tags_options, _set_http_headers_options, + _set_premium_page_blob_tier_options, _set_sequence_number_options, + _set_standard_blob_tier_options, _stage_block_from_url_options, _stage_block_options, _start_copy_from_url_options, @@ -59,18 +65,19 @@ from ._download import StorageStreamDownloader from ._encryption import StorageEncryptionMixin, _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION from ._generated import AzureBlobStorage -from ._generated.models import CpkInfo from ._lease import BlobLeaseClient -from ._models import BlobBlock, BlobProperties, BlobQueryError, BlobType, PageRange, PageRangePaged +from ._models import ( + BlobBlock, + BlobProperties, + BlobQueryError, + BlobType, + PageRange, + PageRangePaged +) from ._quick_query_helper import BlobQueryReader from ._shared.base_client import parse_connection_str, StorageAccountHostsMixin, TransportWrapper from ._shared.response_handlers import process_storage_error, return_response_headers -from ._serialize import ( - get_access_conditions, - get_api_version, - get_modify_conditions, - get_version_id -) +from ._serialize import get_api_version from ._upload_helpers import ( upload_append_blob, upload_block_blob, @@ -78,8 +85,17 @@ ) if TYPE_CHECKING: + from azure.core import MatchConditions from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential, TokenCredential - from azure.storage.blob import ContainerClient + from azure.storage.blob import ( + ContainerClient, + CustomerProvidedEncryptionKey, + DelimitedTextDialect, + DelimitedJsonDialect, + QuickQueryDialect, + ArrowDialect + ) + from ._generated.models import RehydratePriority from ._models import ( ContentSettings, ImmutabilityPolicy, @@ -165,12 +181,25 @@ def __init__( blob_name: str, snapshot: Optional[Union[str, Dict[str, Any]]] = None, credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + *, + api_version: Optional[str] = None, + secondary_hostname: Optional[str] = None, + version_id: Optional[str] = None, + audience: Optional[str] = None, + max_block_size: int = 4 * 1024 * 1024, + max_page_size: int = 4 * 1024 * 1024, + max_chunk_get_size: int = 4 * 1024 * 1024, + max_single_put_size: int = 64 * 1024 * 1024, + max_single_get_size: int = 32 * 1024 * 1024, + min_large_block_upload_threshold: int = 4 * 1024 * 1024 + 1, + use_byte_buffer: Optional[bool] = None, **kwargs: Any ) -> None: parsed_url, sas_token, path_snapshot = _parse_url( account_url=account_url, container_name=container_name, - blob_name=blob_name) + blob_name=blob_name + ) self.container_name = container_name self.blob_name = blob_name @@ -180,14 +209,28 @@ def __init__( self.snapshot = snapshot['snapshot'] else: self.snapshot = snapshot or path_snapshot - self.version_id = kwargs.pop('version_id', None) + self.version_id = version_id # This parameter is used for the hierarchy traversal. Give precedence to credential. self._raw_credential = credential if credential else sas_token self._query_str, credential = self._format_query_string(sas_token, credential, snapshot=self.snapshot) - super(BlobClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) + super(BlobClient, self).__init__( + parsed_url, + service='blob', + credential=credential, + secondary_hostname=secondary_hostname, + audience=audience, + max_block_size=max_block_size, + max_page_size=max_page_size, + max_chunk_get_size=max_chunk_get_size, + max_single_put_size=max_single_put_size, + max_single_get_size=max_single_get_size, + min_large_block_upload_threshold=min_large_block_upload_threshold, + use_byte_buffer=use_byte_buffer, + **kwargs + ) self._client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) - self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._client._config.version = get_api_version(api_version) # type: ignore [assignment] self._configure_encryption(kwargs) def _format_url(self, hostname: str) -> str: @@ -204,6 +247,18 @@ def from_blob_url( cls, blob_url: str, credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long snapshot: Optional[Union[str, Dict[str, Any]]] = None, + *, + api_version: Optional[str] = None, + secondary_hostname: Optional[str] = None, + version_id: Optional[str] = None, + audience: Optional[str] = None, + max_block_size: int = 4 * 1024 * 1024, + max_page_size: int = 4 * 1024 * 1024, + max_chunk_get_size: int = 4 * 1024 * 1024, + max_single_put_size: int = 64 * 1024 * 1024, + max_single_get_size: int = 32 * 1024 * 1024, + min_large_block_upload_threshold: int = 4 * 1024 * 1024 + 1, + use_byte_buffer: Optional[bool] = None, **kwargs: Any ) -> Self: """Create BlobClient from a blob url. This doesn't support customized blob url with '/' in blob name. @@ -226,23 +281,60 @@ def from_blob_url( ~azure.core.credentials.AzureNamedKeyCredential or ~azure.core.credentials.AzureSasCredential or ~azure.core.credentials.TokenCredential or - str or dict[str, str] or None + str or Dict[str, str] or None :param str snapshot: The optional blob snapshot on which to operate. This can be the snapshot ID string or the response returned from :func:`create_snapshot`. If specified, this will override the snapshot in the url. + :keyword str api_version: + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. + + .. versionadded:: 12.2.0 + + :keyword str secondary_hostname: + The hostname of the secondary endpoint. :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. :keyword str audience: The audience to use when requesting tokens for Azure Active Directory authentication. Only has an effect when credential is of type TokenCredential. The value could be https://storage.azure.com/ (default) or https://.blob.core.windows.net. + :keyword int max_block_size: The maximum chunk size for uploading a block blob in chunks. + Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_put_size: + If the blob size is less than or equal max_single_put_size, then the blob will be + uploaded with only one http PUT request. If the blob size is larger than max_single_put_size, + the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB. + :keyword int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient + algorithm when uploading a block blob. Defaults to 4*1024*1024+1. + :keyword bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False. + :keyword int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_get_size: The maximum size for a blob to be downloaded in a single call, + the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. + :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. + Defaults to 4*1024*1024, or 4MB. :returns: A Blob client. :rtype: ~azure.storage.blob.BlobClient """ account_url, container_name, blob_name, path_snapshot = _from_blob_url(blob_url=blob_url, snapshot=snapshot) return cls( - account_url, container_name=container_name, blob_name=blob_name, - snapshot=path_snapshot, credential=credential, **kwargs + account_url, + container_name=container_name, + blob_name=blob_name, + snapshot=path_snapshot, + credential=credential, + version_id=version_id, + audience=audience, + api_version=api_version, + secondary_hostname=secondary_hostname, + max_block_size=max_block_size, + max_page_size=max_page_size, + max_chunk_get_size=max_chunk_get_size, + max_single_put_size=max_single_put_size, + max_single_get_size=max_single_get_size, + min_large_block_upload_threshold=min_large_block_upload_threshold, + use_byte_buffer=use_byte_buffer, + **kwargs ) @classmethod @@ -252,6 +344,18 @@ def from_connection_string( blob_name: str, snapshot: Optional[Union[str, Dict[str, Any]]] = None, credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + *, + api_version: Optional[str] = None, + secondary_hostname: Optional[str] = None, + version_id: Optional[str] = None, + audience: Optional[str] = None, + max_block_size: int = 4 * 1024 * 1024, + max_page_size: int = 4 * 1024 * 1024, + max_chunk_get_size: int = 4 * 1024 * 1024, + max_single_put_size: int = 64 * 1024 * 1024, + max_single_get_size: int = 32 * 1024 * 1024, + min_large_block_upload_threshold: int = 4 * 1024 * 1024 + 1, + use_byte_buffer: Optional[bool] = None, **kwargs: Any ) -> Self: """Create BlobClient from a Connection String. @@ -278,12 +382,34 @@ def from_connection_string( ~azure.core.credentials.AzureNamedKeyCredential or ~azure.core.credentials.AzureSasCredential or ~azure.core.credentials.TokenCredential or - str or dict[str, str] or None + str or Dict[str, str] or None + :keyword str api_version: + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. + + .. versionadded:: 12.2.0 + + :keyword str secondary_hostname: + The hostname of the secondary endpoint. :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. :keyword str audience: The audience to use when requesting tokens for Azure Active Directory authentication. Only has an effect when credential is of type TokenCredential. The value could be https://storage.azure.com/ (default) or https://.blob.core.windows.net. + :keyword int max_block_size: The maximum chunk size for uploading a block blob in chunks. + Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_put_size: + If the blob size is less than or equal max_single_put_size, then the blob will be + uploaded with only one http PUT request. If the blob size is larger than max_single_put_size, + the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB. + :keyword int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient + algorithm when uploading a block blob. Defaults to 4*1024*1024+1. + :keyword bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False. + :keyword int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_get_size: The maximum size for a blob to be downloaded in a single call, + the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. + :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. + Defaults to 4*1024*1024, or 4MB. :returns: A Blob client. :rtype: ~azure.storage.blob.BlobClient @@ -297,11 +423,24 @@ def from_connection_string( :caption: Creating the BlobClient from a connection string. """ account_url, secondary, credential = parse_connection_str(conn_str, credential, 'blob') - if 'secondary_hostname' not in kwargs: - kwargs['secondary_hostname'] = secondary return cls( - account_url, container_name=container_name, blob_name=blob_name, - snapshot=snapshot, credential=credential, **kwargs + account_url, + container_name=container_name, + blob_name=blob_name, + snapshot=snapshot, + credential=credential, + version_id=version_id, + audience=audience, + api_version=api_version, + secondary_hostname=secondary or secondary_hostname, + max_block_size=max_block_size, + max_page_size=max_page_size, + max_chunk_get_size=max_chunk_get_size, + max_single_put_size=max_single_put_size, + max_single_get_size=max_single_get_size, + min_large_block_upload_threshold=min_large_block_upload_threshold, + use_byte_buffer=use_byte_buffer, + **kwargs ) @distributed_trace @@ -312,7 +451,7 @@ def get_account_information(self, **kwargs: Any) -> Dict[str, str]: The keys in the returned dictionary include 'sku_name' and 'account_kind'. :returns: A dict of account information (SKU and account type). - :rtype: dict(str, str) + :rtype: Dict[str, str] """ try: return cast(Dict[str, str], self._client.blob.get_account_info(cls=return_response_headers, **kwargs)) @@ -324,6 +463,25 @@ def upload_blob_from_url( self, source_url: str, *, metadata: Optional[Dict[str, str]] = None, + overwrite: Optional[bool] = None, + include_source_blob_properties: bool = True, + tags: Optional[Dict[str, str]] = None, + source_content_md5: Optional[bytearray] = None, + source_if_modified_since: Optional[datetime] = None, + source_if_unmodified_since: Optional[datetime] = None, + source_etag: Optional[str] = None, + source_match_condition: Optional["MatchConditions"] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + destination_lease: Optional[Union[BlobLeaseClient, str]] = None, + timeout: Optional[int] = None, + content_settings: Optional["ContentSettings"] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + encryption_scope: Optional[str] = None, + standard_blob_tier: Optional["StandardBlobTier"] = None, + source_authorization: Optional[str] = None, **kwargs: Any ) -> Dict[str, Any]: """ @@ -342,7 +500,7 @@ def upload_blob_from_url( https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= https://otheraccount.blob.core.windows.net/mycontainer/myblob?sastoken - :keyword dict(str, str) metadata: + :keyword Dict[str, str] metadata: Name-value pairs associated with the blob as metadata. :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. If True, upload_blob will overwrite the existing data. If set to False, the @@ -355,7 +513,7 @@ def upload_blob_from_url( and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) - :paramtype tags: dict(str, str) + :paramtype tags: Dict[str, str] :keyword bytearray source_content_md5: Specify the md5 that is used to verify the integrity of the source bytes. :keyword ~datetime.datetime source_if_modified_since: @@ -425,12 +583,32 @@ def upload_blob_from_url( :returns: Blob-updated property Dict (Etag and last modified) :rtype: Dict[str, Any] """ - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _upload_blob_from_url_options( source_url=source_url, metadata=metadata, - **kwargs) + overwrite=overwrite, + include_source_blob_properties=include_source_blob_properties, + tags=tags, + source_content_md5=source_content_md5, + source_if_modified_since=source_if_modified_since, + source_if_unmodified_since=source_if_unmodified_since, + source_etag=source_etag, + source_match_condition=source_match_condition, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + destination_lease=destination_lease, + timeout=timeout, + content_settings=content_settings, + cpk=cpk, + encryption_scope=encryption_scope, + standard_blob_tier=standard_blob_tier, + source_authorization=source_authorization, + **kwargs + ) try: return cast(Dict[str, Any], self._client.block_blob.put_blob_from_url(**options)) except HttpResponseError as error: @@ -442,6 +620,28 @@ def upload_blob( blob_type: Union[str, BlobType] = BlobType.BLOCKBLOB, length: Optional[int] = None, metadata: Optional[Dict[str, str]] = None, + *, + tags: Optional[Dict[str, str]] = None, + overwrite: bool = False, + content_settings: Optional["ContentSettings"] = None, + validate_content: bool = False, + lease: Optional[BlobLeaseClient] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + premium_page_blob_tier: Optional["PremiumPageBlobTier"] = None, + immutability_policy: Optional["ImmutabilityPolicy"] = None, + legal_hold: Optional[bool] = None, + standard_blob_tier: Optional["StandardBlobTier"] = None, + maxsize_condition: Optional[int] = None, + max_concurrency: int = 1, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + encryption_scope: Optional[str] = None, + encoding: str = 'UTF-8', + progress_hook: Optional[Callable[[int, Optional[int]], None]] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Any]: """Creates a new blob from a data source with automatic chunking. @@ -455,7 +655,7 @@ def upload_blob( should be supplied for optimal performance. :param metadata: Name-value pairs associated with the blob as metadata. - :type metadata: dict(str, str) + :type metadata: Dict[str, str] :keyword tags: Name-value pairs associated with the blob as tag. Tags are case-sensitive. The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, @@ -465,7 +665,7 @@ def upload_blob( .. versionadded:: 12.4.0 - :paramtype tags: dict(str, str) + :paramtype tags: Dict[str, str] :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. If True, upload_blob will overwrite the existing data. If set to False, the operation will fail with ResourceExistsError. The exception to the above is with Append @@ -584,13 +784,34 @@ def upload_blob( """ if self.require_encryption and not self.key_encryption_key: raise ValueError("Encryption required but no key was provided.") - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _upload_blob_options( data=data, blob_type=blob_type, length=length, metadata=metadata, + tags=tags, + overwrite=overwrite, + content_settings=content_settings, + validate_content=validate_content, + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + premium_page_blob_tier=premium_page_blob_tier, + immutability_policy=immutability_policy, + legal_hold=legal_hold, + standard_blob_tier=standard_blob_tier, + maxsize_condition=maxsize_condition, + max_concurrency=max_concurrency, + cpk=cpk, + encryption_scope=encryption_scope, + encoding=encoding, + progress_hook=progress_hook, + timeout=timeout, encryption_options={ 'required': self.require_encryption, 'version': self.encryption_version, @@ -600,7 +821,8 @@ def upload_blob( config=self._config, sdk_moniker=self._sdk_moniker, client=self._client, - **kwargs) + **kwargs + ) if blob_type == BlobType.BlockBlob: return upload_block_blob(**options) if blob_type == BlobType.PageBlob: @@ -612,7 +834,19 @@ def download_blob( self, offset: Optional[int] = None, length: Optional[int] = None, *, + version_id: Optional[str] = None, + validate_content: bool = False, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + max_concurrency: int = 1, encoding: str, + progress_hook: Optional[Callable[[int, int], None]] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> StorageStreamDownloader[str]: ... @@ -622,7 +856,19 @@ def download_blob( self, offset: Optional[int] = None, length: Optional[int] = None, *, + version_id: Optional[str] = None, + validate_content: bool = False, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + max_concurrency: int = 1, encoding: None = None, + progress_hook: Optional[Callable[[int, int], None]] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> StorageStreamDownloader[bytes]: ... @@ -632,7 +878,19 @@ def download_blob( self, offset: Optional[int] = None, length: Optional[int] = None, *, - encoding: Union[str, None] = None, + version_id: Optional[str] = None, + validate_content: bool = False, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + max_concurrency: int = 1, + encoding: Optional[str] = None, + progress_hook: Optional[Callable[[int, int], None]] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Union[StorageStreamDownloader[str], StorageStreamDownloader[bytes]]: """Downloads a blob to the StorageStreamDownloader. The readall() method must @@ -731,15 +989,26 @@ def download_blob( raise ValueError("Encryption required but no key was provided.") if length is not None and offset is None: raise ValueError("Offset value must not be None if length is set.") - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _download_blob_options( blob_name=self.blob_name, container_name=self.container_name, - version_id=get_version_id(self.version_id, kwargs), + version_id=version_id or self.version_id, offset=offset, length=length, encoding=encoding, + validate_content=validate_content, + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + cpk=cpk, + max_concurrency=max_concurrency, + progress_hook=progress_hook, + timeout=timeout, encryption_options={ 'required': self.require_encryption, 'version': self.encryption_version, @@ -749,11 +1018,27 @@ def download_blob( config=self._config, sdk_moniker=self._sdk_moniker, client=self._client, - **kwargs) + **kwargs + ) return StorageStreamDownloader(**options) @distributed_trace - def query_blob(self, query_expression: str, **kwargs: Any) -> BlobQueryReader: + def query_blob( + self, query_expression: str, + *, + on_error: Optional[Callable[[BlobQueryError], None]] = None, + blob_format: Optional[Union["DelimitedTextDialect", "DelimitedJsonDialect", "QuickQueryDialect", str]] = None, + output_format: Optional[Union["DelimitedTextDialect", "DelimitedJsonDialect", "QuickQueryDialect", List["ArrowDialect"], str]] = None, # pylint: disable=line-too-long + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> BlobQueryReader: """Enables users to select/project on blob/or blob snapshot data by providing simple query expressions. This operations returns a BlobQueryReader, users need to use readall() or readinto() to get query data. @@ -771,16 +1056,23 @@ def query_blob(self, query_expression: str, **kwargs: Any) -> BlobQueryReader: .. note:: "ParquetDialect" is in preview, so some features may not work as intended. - :paramtype blob_format: ~azure.storage.blob.DelimitedTextDialect or ~azure.storage.blob.DelimitedJsonDialect - or ~azure.storage.blob.QuickQueryDialect or str + :paramtype blob_format: + ~azure.storage.blob.DelimitedTextDialect or + ~azure.storage.blob.DelimitedJsonDialect or + ~azure.storage.blob.QuickQueryDialect or + str :keyword output_format: Optional. Defines the output serialization for the data stream. By default the data will be returned as it is represented in the blob (Parquet formats default to DelimitedTextDialect). By providing an output format, the blob data will be reformatted according to that profile. This value can be a DelimitedTextDialect or a DelimitedJsonDialect or ArrowDialect. These dialects can be passed through their respective classes, the QuickQueryDialect enum or as a string - :paramtype output_format: ~azure.storage.blob.DelimitedTextDialect or ~azure.storage.blob.DelimitedJsonDialect - or List[~azure.storage.blob.ArrowDialect] or ~azure.storage.blob.QuickQueryDialect or str + :paramtype output_format: + ~azure.storage.blob.DelimitedTextDialect or + ~azure.storage.blob.DelimitedJsonDialect or + List[~azure.storage.blob.ArrowDialect] or + ~azure.storage.blob.QuickQueryDialect or + str :keyword lease: Required if the blob has an active lease. Value can be a BlobLeaseClient object or the lease ID as a string. @@ -831,12 +1123,25 @@ def query_blob(self, query_expression: str, **kwargs: Any) -> BlobQueryReader: :dedent: 4 :caption: select/project on blob/or blob snapshot data by providing simple query expressions. """ - errors = kwargs.pop("on_error", None) error_cls = kwargs.pop("error_cls", BlobQueryError) encoding = kwargs.pop("encoding", None) - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") - options, delimiter = _quick_query_options(self.snapshot, query_expression, **kwargs) + options, delimiter = _quick_query_options( + self.snapshot, + query_expression, + blob_format=blob_format, + output_format=output_format, + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + cpk=cpk, + timeout=timeout, + **kwargs + ) try: headers, raw_response_body = self._client.blob.query(**options) except HttpResponseError as error: @@ -844,15 +1149,28 @@ def query_blob(self, query_expression: str, **kwargs: Any) -> BlobQueryReader: return BlobQueryReader( name=self.blob_name, container=self.container_name, - errors=errors, + errors=on_error, record_delimiter=delimiter, encoding=encoding, headers=headers, response=raw_response_body, - error_cls=error_cls) + error_cls=error_cls + ) @distributed_trace - def delete_blob(self, delete_snapshots: Optional[str] = None, **kwargs: Any) -> None: + def delete_blob( + self, delete_snapshots: Optional[str] = None, + *, + version_id: Optional[str] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: """Marks the specified blob for deletion. The blob is later deleted during garbage collection. @@ -925,16 +1243,24 @@ def delete_blob(self, delete_snapshots: Optional[str] = None, **kwargs: Any) -> """ options = _delete_blob_options( snapshot=self.snapshot, - version_id=get_version_id(self.version_id, kwargs), + version_id=version_id or self.version_id, delete_snapshots=delete_snapshots, - **kwargs) + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + timeout=timeout, + **kwargs + ) try: self._client.blob.delete(**options) except HttpResponseError as error: process_storage_error(error) @distributed_trace - def undelete_blob(self, **kwargs: Any) -> None: + def undelete_blob(self, *, timeout: Optional[int] = None, **kwargs: Any) -> None: """Restores soft-deleted blobs or snapshots. Operation will only be successful if used within the specified number of days @@ -962,12 +1288,12 @@ def undelete_blob(self, **kwargs: Any) -> None: :caption: Undeleting a blob. """ try: - self._client.blob.undelete(timeout=kwargs.pop('timeout', None), **kwargs) + self._client.blob.undelete(timeout=timeout, **kwargs) except HttpResponseError as error: process_storage_error(error) @distributed_trace - def exists(self, **kwargs: Any) -> bool: + def exists(self, *, version_id: Optional[str] = None, timeout: Optional[int] = None, **kwargs: Any) -> bool: """ Returns True if a blob exists with the defined parameters, and returns False otherwise. @@ -984,12 +1310,13 @@ def exists(self, **kwargs: Any) -> bool: :returns: boolean :rtype: bool """ - version_id = get_version_id(self.version_id, kwargs) try: self._client.blob.get_properties( snapshot=self.snapshot, - version_id=version_id, - **kwargs) + version_id=version_id or self.version_id, + timeout=timeout, + **kwargs + ) return True # Encrypted with CPK except ResourceExistsError: @@ -1001,7 +1328,19 @@ def exists(self, **kwargs: Any) -> bool: return False @distributed_trace - def get_blob_properties(self, **kwargs: Any) -> BlobProperties: + def get_blob_properties( + self, *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + version_id: Optional[str] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> BlobProperties: """Returns all user-defined metadata, standard HTTP properties, and system properties for the blob. It does not return the content of the blob. @@ -1063,30 +1402,29 @@ def get_blob_properties(self, **kwargs: Any) -> BlobProperties: :dedent: 8 :caption: Getting the properties for a blob. """ - # TODO: extract this out as _get_blob_properties_options - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - version_id = get_version_id(self.version_id, kwargs) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) + if cpk and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _get_blob_properties_options( + lease=lease, + version_id=version_id or self.version_id, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + cpk=cpk, + snapshot=self.snapshot, + timeout=timeout, + **kwargs + ) try: - cls_method = kwargs.pop('cls', None) + cls_method = options.pop('cls', None) if cls_method: - kwargs['cls'] = partial(deserialize_pipeline_response_into_cls, cls_method) + options['cls'] = partial(deserialize_pipeline_response_into_cls, cls_method) blob_props = cast(BlobProperties, self._client.blob.get_properties( - timeout=kwargs.pop('timeout', None), - version_id=version_id, - snapshot=self.snapshot, - lease_access_conditions=access_conditions, - modified_access_conditions=mod_conditions, - cls=kwargs.pop('cls', None) or deserialize_blob_properties, - cpk_info=cpk_info, - **kwargs)) + cls=options.pop('cls', None) or deserialize_blob_properties, + **options + )) except HttpResponseError as error: process_storage_error(error) blob_props.name = self.blob_name @@ -1096,7 +1434,18 @@ def get_blob_properties(self, **kwargs: Any) -> BlobProperties: return blob_props @distributed_trace - def set_http_headers(self, content_settings: Optional["ContentSettings"] = None, **kwargs: Any) -> Dict[str, Any]: + def set_http_headers( + self, content_settings: Optional["ContentSettings"] = None, + *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Any]: """Sets system properties on the blob. If one property is set for the content_settings, all properties will be overridden. @@ -1140,7 +1489,17 @@ def set_http_headers(self, content_settings: Optional["ContentSettings"] = None, :returns: Blob-updated property dict (Etag and last modified) :rtype: Dict[str, Any] """ - options = _set_http_headers_options(content_settings=content_settings, **kwargs) + options = _set_http_headers_options( + content_settings=content_settings, + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + timeout=timeout, + **kwargs + ) try: return cast(Dict[str, Any], self._client.blob.set_http_headers(**options)) except HttpResponseError as error: @@ -1149,6 +1508,16 @@ def set_http_headers(self, content_settings: Optional["ContentSettings"] = None, @distributed_trace def set_blob_metadata( self, metadata: Optional[Dict[str, str]] = None, + *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + encryption_scope: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, datetime]]: """Sets user-defined metadata for the blob as one or more name-value pairs. @@ -1157,7 +1526,7 @@ def set_blob_metadata( Dict containing name and value pairs. Each call to this operation replaces all existing metadata attached to the blob. To remove all metadata from the blob, call this operation with no metadata headers. - :type metadata: dict(str, str) + :type metadata: Dict[str, str] :keyword lease: Required if the blob has an active lease. Value can be a BlobLeaseClient object or the lease ID as a string. @@ -1207,9 +1576,21 @@ def set_blob_metadata( :returns: Blob-updated property dict (Etag and last modified) :rtype: Dict[str, Union[str, datetime]] """ - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _set_blob_metadata_options(metadata=metadata, **kwargs) + options = _set_blob_metadata_options( + metadata=metadata, + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + cpk=cpk, + encryption_scope=encryption_scope, + timeout=timeout, + **kwargs + ) try: return cast(Dict[str, Union[str, datetime]], self._client.blob.set_metadata(**options)) except HttpResponseError as error: @@ -1218,6 +1599,9 @@ def set_blob_metadata( @distributed_trace def set_immutability_policy( self, immutability_policy: "ImmutabilityPolicy", + *, + version_id: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, str]: """The Set Immutability Policy operation sets the immutability policy on the blob. @@ -1243,15 +1627,22 @@ def set_immutability_policy( :returns: Key value pairs of blob tags. :rtype: Dict[str, str] """ - - version_id = get_version_id(self.version_id, kwargs) kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time kwargs['immutability_policy_mode'] = immutability_policy.policy_mode return cast(Dict[str, str], self._client.blob.set_immutability_policy( - cls=return_response_headers, version_id=version_id, **kwargs)) + cls=return_response_headers, + version_id=version_id or self.version_id, + timeout=timeout, + **kwargs + )) @distributed_trace - def delete_immutability_policy(self, **kwargs: Any) -> None: + def delete_immutability_policy( + self, *, + version_id: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: """The Delete Immutability Policy operation deletes the immutability policy on the blob. .. versionadded:: 12.10.0 @@ -1269,12 +1660,20 @@ def delete_immutability_policy(self, **kwargs: Any) -> None: :returns: Key value pairs of blob tags. :rtype: Dict[str, str] """ - - version_id = get_version_id(self.version_id, kwargs) - self._client.blob.delete_immutability_policy(version_id=version_id, **kwargs) + self._client.blob.delete_immutability_policy( + version_id=version_id or self.version_id, + timeout=timeout, + **kwargs + ) @distributed_trace - def set_legal_hold(self, legal_hold: bool, **kwargs: Any) -> Dict[str, Union[str, datetime, bool]]: + def set_legal_hold( + self, legal_hold: bool, + *, + version_id: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime, bool]]: """The Set Legal Hold operation sets a legal hold on the blob. .. versionadded:: 12.10.0 @@ -1294,10 +1693,13 @@ def set_legal_hold(self, legal_hold: bool, **kwargs: Any) -> Dict[str, Union[str :returns: Key value pairs of blob tags. :rtype: Dict[str, Union[str, datetime, bool]] """ - - version_id = get_version_id(self.version_id, kwargs) return cast(Dict[str, Union[str, datetime, bool]], self._client.blob.set_legal_hold( - legal_hold, version_id=version_id, cls=return_response_headers, **kwargs)) + legal_hold, + version_id=version_id or self.version_id, + timeout=timeout, + cls=return_response_headers, + **kwargs + )) @distributed_trace def create_page_blob( @@ -1305,6 +1707,19 @@ def create_page_blob( content_settings: Optional["ContentSettings"] = None, metadata: Optional[Dict[str, str]] = None, premium_page_blob_tier: Optional[Union[str, "PremiumPageBlobTier"]] = None, + *, + tags: Optional[Dict[str, str]] = None, + sequence_number: Optional[int] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + immutability_policy: Optional["ImmutabilityPolicy"] = None, + legal_hold: Optional[bool] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + encryption_scope: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, datetime]]: """Creates a new Page Blob of the specified size. @@ -1317,7 +1732,7 @@ def create_page_blob( language, disposition, md5, and cache control. :param metadata: Name-value pairs associated with the blob as metadata. - :type metadata: dict(str, str) + :type metadata: Dict[str, str] :param ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: A page blob tier value to set the blob to. The tier correlates to the size of the blob and number of allowed IOPS. This is only applicable to page blobs on @@ -1331,7 +1746,7 @@ def create_page_blob( .. versionadded:: 12.4.0 - :paramtype tags: dict(str, str) + :paramtype tags: Dict[str, str] :keyword int sequence_number: Only for Page blobs. The sequence number is a user-controlled value that you can use to track requests. The value of the sequence number must be between 0 @@ -1389,18 +1804,31 @@ def create_page_blob( see `here `__. :returns: Blob-updated property dict (Etag and last modified). - :rtype: dict[str, Any] + :rtype: Dict[str, Any] """ if self.require_encryption or (self.key_encryption_key is not None): raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _create_page_blob_options( size=size, content_settings=content_settings, metadata=metadata, premium_page_blob_tier=premium_page_blob_tier, - **kwargs) + tags=tags, + sequence_number=sequence_number, + lease=lease, + immutability_policy=immutability_policy, + legal_hold=legal_hold, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + cpk=cpk, + encryption_scope=encryption_scope, + timeout=timeout, + **kwargs + ) try: return cast(Dict[str, Any], self._client.page_blob.create(**options)) except HttpResponseError as error: @@ -1410,6 +1838,18 @@ def create_page_blob( def create_append_blob( self, content_settings: Optional["ContentSettings"] = None, metadata: Optional[Dict[str, str]] = None, + *, + tags: Optional[Dict[str, str]] = None, + immutability_policy: Optional["ImmutabilityPolicy"] = None, + legal_hold: Optional[bool] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + encryption_scope: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, datetime]]: """Creates a new Append Blob. This operation creates a new 0-length append blob. The content @@ -1421,7 +1861,7 @@ def create_append_blob( language, disposition, md5, and cache control. :param metadata: Name-value pairs associated with the blob as metadata. - :type metadata: dict(str, str) + :type metadata: Dict[str, str] :keyword tags: Name-value pairs associated with the blob as tag. Tags are case-sensitive. The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, @@ -1431,7 +1871,7 @@ def create_append_blob( .. versionadded:: 12.4.0 - :paramtype tags: dict(str, str) + :paramtype tags: Dict[str, str] :keyword lease: Required if the blob has an active lease. Value can be a BlobLeaseClient object or the lease ID as a string. @@ -1485,16 +1925,28 @@ def create_append_blob( see `here `__. :returns: Blob-updated property dict (Etag and last modified). - :rtype: dict[str, Any] + :rtype: Dict[str, Any] """ if self.require_encryption or (self.key_encryption_key is not None): raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _create_append_blob_options( content_settings=content_settings, metadata=metadata, - **kwargs) + tags=tags, + immutability_policy=immutability_policy, + legal_hold=legal_hold, + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + cpk=cpk, + encryption_scope=encryption_scope, + timeout=timeout, + **kwargs + ) try: return cast(Dict[str, Union[str, datetime]], self._client.append_blob.create(**options)) except HttpResponseError as error: @@ -1503,6 +1955,16 @@ def create_append_blob( @distributed_trace def create_snapshot( self, metadata: Optional[Dict[str, str]] = None, + *, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + encryption_scope: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, datetime]]: """Creates a snapshot of the blob. @@ -1517,7 +1979,7 @@ def create_snapshot( :param metadata: Name-value pairs associated with the blob as metadata. - :type metadata: dict(str, str) + :type metadata: Dict[str, str] :keyword ~datetime.datetime if_modified_since: A DateTime value. Azure expects the date value passed in to be UTC. If timezone is included, any non-UTC datetimes will be converted to UTC. @@ -1564,7 +2026,7 @@ def create_snapshot( see `here `__. :returns: Blob-updated property dict (Snapshot ID, Etag, and last modified). - :rtype: dict[str, Any] + :rtype: Dict[str, Any] .. admonition:: Example: @@ -1575,9 +2037,21 @@ def create_snapshot( :dedent: 8 :caption: Create a snapshot of the blob. """ - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _create_snapshot_options(metadata=metadata, **kwargs) + options = _create_snapshot_options( + metadata=metadata, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + lease=lease, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + cpk=cpk, + encryption_scope=encryption_scope, + timeout=timeout, + **kwargs + ) try: return cast(Dict[str, Any], self._client.blob.create_snapshot(**options)) except HttpResponseError as error: @@ -1588,8 +2062,32 @@ def start_copy_from_url( self, source_url: str, metadata: Optional[Dict[str, str]] = None, incremental_copy: bool = False, + *, + tags: Optional[Union[Dict[str, str], Literal["COPY"]]] = None, + immutability_policy: Optional["ImmutabilityPolicy"] = None, + legal_hold: Optional[bool] = None, + source_if_modified_since: Optional[datetime] = None, + source_if_unmodified_since: Optional[datetime] = None, + source_etag: Optional[str] = None, + source_match_condition: Optional["MatchConditions"] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + destination_lease: Optional[Union[BlobLeaseClient, str]] = None, + source_lease: Optional[Union[BlobLeaseClient, str]] = None, + premium_page_blob_tier: Optional["PremiumPageBlobTier"] = None, + standard_blob_tier: Optional["StandardBlobTier"] = None, + rehydrate_priority: Optional["RehydratePriority"] = None, + seal_destination_blob: Optional[bool] = None, + requires_sync: Optional[bool] = None, + source_authorization: Optional[str] = None, + encryption_scope: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, datetime]]: + """Copies a blob from the given URL. This operation returns a dictionary containing `copy_status` and `copy_id`, @@ -1638,7 +2136,7 @@ def start_copy_from_url( source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. - :type metadata: dict(str, str) + :type metadata: Dict[str, str] :param bool incremental_copy: Copies the snapshot of the source page blob to a destination page blob. The snapshot is copied such that only the differential changes between @@ -1657,7 +2155,7 @@ def start_copy_from_url( .. versionadded:: 12.4.0 - :paramtype tags: dict(str, str) or Literal["COPY"] + :paramtype tags: Dict[str, str] or Literal["COPY"] :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: Specifies the immutability policy of a blob, blob snapshot or blob version. @@ -1687,6 +2185,12 @@ def start_copy_from_url( and act according to the condition specified by the `match_condition` parameter. :keyword ~azure.core.MatchConditions source_match_condition: The source match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + :keyword ~datetime.datetime if_modified_since: A DateTime value. Azure expects the date value passed in to be UTC. If timezone is included, any non-UTC datetimes will be converted to UTC. @@ -1717,12 +2221,6 @@ def start_copy_from_url( Specify this to perform the Copy Blob operation only if the lease ID given matches the active lease ID of the source blob. :paramtype source_lease: ~azure.storage.blob.BlobLeaseClient or str - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: A page blob tier value to set the blob to. The tier correlates to the size of the blob and number of allowed IOPS. This is only applicable to page blobs on @@ -1754,6 +2252,12 @@ def start_copy_from_url( .. versionadded:: 12.10.0 + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: A dictionary of copy properties (etag, last_modified, copy_id, copy_status). :rtype: dict[str, Union[str, ~datetime.datetime]] @@ -1770,7 +2274,30 @@ def start_copy_from_url( source_url=source_url, metadata=metadata, incremental_copy=incremental_copy, - **kwargs) + tags=tags, + immutability_policy=immutability_policy, + legal_hold=legal_hold, + source_if_modified_since=source_if_modified_since, + source_if_unmodified_since=source_if_unmodified_since, + source_etag=source_etag, + source_match_condition=source_match_condition, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + destination_lease=destination_lease, + source_lease=source_lease, + premium_page_blob_tier=premium_page_blob_tier, + standard_blob_tier=standard_blob_tier, + rehydrate_priority=rehydrate_priority, + seal_destination_blob=seal_destination_blob, + requires_sync=requires_sync, + source_authorization=source_authorization, + encryption_scope=encryption_scope, + timeout=timeout, + **kwargs + ) try: if incremental_copy: return cast(Dict[str, Union[str, datetime]], self._client.page_blob.copy_incremental(**options)) @@ -1810,7 +2337,18 @@ def abort_copy( process_storage_error(error) @distributed_trace - def acquire_lease(self, lease_duration: int =-1, lease_id: Optional[str] = None, **kwargs: Any) -> BlobLeaseClient: + def acquire_lease( + self, lease_duration: int = -1, + lease_id: Optional[str] = None, + *, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> BlobLeaseClient: """Requests a new lease. If the blob does not have an active lease, the Blob @@ -1867,11 +2405,30 @@ def acquire_lease(self, lease_duration: int =-1, lease_id: Optional[str] = None, :caption: Acquiring a lease on a blob. """ lease = BlobLeaseClient(self, lease_id=lease_id) - lease.acquire(lease_duration=lease_duration, **kwargs) + options = _acquire_lease_options( + lease_duration=lease_duration, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + timeout=timeout, + **kwargs + ) + lease.acquire(**options) return lease @distributed_trace - def set_standard_blob_tier(self, standard_blob_tier: Union[str, "StandardBlobTier"], **kwargs: Any) -> None: + def set_standard_blob_tier( + self, standard_blob_tier: Union[str, "StandardBlobTier"], + *, + rehydrate_priority: Optional["RehydratePriority"] = None, + version_id: Optional[str] = None, + if_tags_match_condition: Optional[str] = None, + lease: Optional[Union["BlobLeaseClient", str]] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: """This operation sets the tier on a block blob. A block blob's tier determines Hot/Cool/Archive storage type. @@ -1900,34 +2457,30 @@ def set_standard_blob_tier(self, standard_blob_tier: Union[str, "StandardBlobTie .. versionadded:: 12.4.0 + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. This value is not tracked or validated on the client. To configure client-side network timesouts see `here `__. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str :rtype: None """ - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - version_id = get_version_id(self.version_id, kwargs) - if standard_blob_tier is None: - raise ValueError("A StandardBlobTier must be specified") - if self.snapshot and kwargs.get('version_id'): - raise ValueError("Snapshot and version_id cannot be set at the same time") + options = _set_standard_blob_tier_options( + version_id or self.version_id, + self.snapshot, + standard_blob_tier=standard_blob_tier, + timeout=timeout, + lease=lease, + rehydrate_priority=rehydrate_priority, + if_tags_match_condition=if_tags_match_condition, + **kwargs + ) try: - self._client.blob.set_tier( - tier=standard_blob_tier, - snapshot=self.snapshot, - timeout=kwargs.pop('timeout', None), - modified_access_conditions=mod_conditions, - lease_access_conditions=access_conditions, - version_id=version_id, - **kwargs) + self._client.blob.set_tier(**options) except HttpResponseError as error: process_storage_error(error) @@ -1936,6 +2489,13 @@ def stage_block( self, block_id: str, data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], length: Optional[int] = None, + *, + validate_content: Optional[bool] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + encoding: Optional[str] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + encryption_scope: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Any]: """Creates a new block to be committed as part of a blob. @@ -1981,17 +2541,24 @@ def stage_block( see `here `__. :returns: Blob property dict. - :rtype: dict[str, Any] + :rtype: Dict[str, Any] """ if self.require_encryption or (self.key_encryption_key is not None): raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _stage_block_options( block_id=block_id, data=data, length=length, - **kwargs) + validate_content=validate_content, + lease=lease, + encoding=encoding, + cpk=cpk, + encryption_scope=encryption_scope, + timeout=timeout, + **kwargs + ) try: return cast(Dict[str, Any], self._client.block_blob.stage_block(**options)) except HttpResponseError as error: @@ -2004,6 +2571,12 @@ def stage_block_from_url( source_offset: Optional[int] = None, source_length: Optional[int] = None, source_content_md5: Optional[Union[bytes, bytearray]] = None, + *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + encryption_scope: Optional[str] = None, + source_authorization: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Any]: """Creates a new block to be committed as part of a blob where @@ -2037,19 +2610,19 @@ def stage_block_from_url( .. versionadded:: 12.2.0 + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. This value is not tracked or validated on the client. To configure client-side network timesouts see `here `__. - :keyword str source_authorization: - Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is - the prefix of the source_authorization string. :returns: Blob property dict. - :rtype: dict[str, Any] + :rtype: Dict[str, Any] """ - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _stage_block_from_url_options( block_id=block_id, @@ -2057,7 +2630,13 @@ def stage_block_from_url( source_offset=source_offset, source_length=source_length, source_content_md5=source_content_md5, - **kwargs) + lease=lease, + cpk=cpk, + encryption_scope=encryption_scope, + source_authorization=source_authorization, + timeout=timeout, + **kwargs + ) try: return cast(Dict[str, Any], self._client.block_blob.stage_block_from_url(**options)) except HttpResponseError as error: @@ -2066,6 +2645,10 @@ def stage_block_from_url( @distributed_trace def get_block_list( self, block_list_type: str = "committed", + *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_tags_match_condition: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Tuple[List[BlobBlock], List[BlobBlock]]: """The Get Block List operation retrieves the list of blocks that have @@ -2093,16 +2676,16 @@ def get_block_list( :returns: A tuple of two lists - committed and uncommitted blocks :rtype: Tuple[List[BlobBlock], List[BlobBlock]] """ - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) + options = _get_block_list_options( + block_list_type=block_list_type, + snapshot=self.snapshot, + lease=lease, + if_tags_match_condition=if_tags_match_condition, + timeout=timeout, + **kwargs + ) try: - blocks = self._client.block_blob.get_block_list( - list_type=block_list_type, - snapshot=self.snapshot, - timeout=kwargs.pop('timeout', None), - lease_access_conditions=access_conditions, - modified_access_conditions=mod_conditions, - **kwargs) + blocks = self._client.block_blob.get_block_list(**options) except HttpResponseError as error: process_storage_error(error) return _get_block_list_result(blocks) @@ -2112,6 +2695,21 @@ def commit_block_list( self, block_list: List[BlobBlock], content_settings: Optional["ContentSettings"] = None, metadata: Optional[Dict[str, str]] = None, + *, + tags: Optional[Dict[str, str]] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + immutability_policy: Optional["ImmutabilityPolicy"] = None, + legal_hold: Optional[bool] = None, + validate_content: Optional[bool] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + standard_blob_tier: Optional["StandardBlobTier"] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + encryption_scope: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, datetime]]: """The Commit Block List operation writes a blob by specifying the list of @@ -2124,7 +2722,7 @@ def commit_block_list( language, disposition, md5, and cache control. :param metadata: Name-value pairs associated with the blob as metadata. - :type metadata: dict[str, str] + :type metadata: Dict[str, str] :keyword tags: Name-value pairs associated with the blob as tag. Tags are case-sensitive. The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, @@ -2134,7 +2732,7 @@ def commit_block_list( .. versionadded:: 12.4.0 - :paramtype tags: dict(str, str) + :paramtype tags: Dict[str, str] :keyword lease: Required if the blob has an active lease. Value can be a BlobLeaseClient object or the lease ID as a string. @@ -2203,24 +2801,46 @@ def commit_block_list( see `here `__. :returns: Blob-updated property dict (Etag and last modified). - :rtype: dict(str, Any) + :rtype: Dict[str, Any] """ if self.require_encryption or (self.key_encryption_key is not None): raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _commit_block_list_options( block_list=block_list, content_settings=content_settings, metadata=metadata, - **kwargs) + tags=tags, + lease=lease, + immutability_policy=immutability_policy, + legal_hold=legal_hold, + validate_content=validate_content, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + standard_blob_tier=standard_blob_tier, + cpk=cpk, + encryption_scope=encryption_scope, + timeout=timeout, + **kwargs + ) try: return cast(Dict[str, Any], self._client.block_blob.commit_block_list(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace - def set_premium_page_blob_tier(self, premium_page_blob_tier: "PremiumPageBlobTier", **kwargs: Any) -> None: + def set_premium_page_blob_tier( + self, premium_page_blob_tier: "PremiumPageBlobTier", + *, + if_tags_match_condition: Optional[str] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: """Sets the page blob tiers on the blob. This API is only supported for page blobs on premium accounts. :param premium_page_blob_tier: @@ -2234,34 +2854,43 @@ def set_premium_page_blob_tier(self, premium_page_blob_tier: "PremiumPageBlobTie .. versionadded:: 12.4.0 + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. This value is not tracked or validated on the client. To configure client-side network timesouts see `here `__. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str :rtype: None """ - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) if premium_page_blob_tier is None: raise ValueError("A PremiumPageBlobTier must be specified") + options = _set_premium_page_blob_tier_options( + premium_page_blob_tier=premium_page_blob_tier, + if_tags_match_condition=if_tags_match_condition, + lease=lease, + timeout=timeout, + **kwargs + ) try: - self._client.blob.set_tier( - tier=premium_page_blob_tier, - timeout=kwargs.pop('timeout', None), - lease_access_conditions=access_conditions, - modified_access_conditions=mod_conditions, - **kwargs) + self._client.blob.set_tier(**options) except HttpResponseError as error: process_storage_error(error) @distributed_trace - def set_blob_tags(self, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> Dict[str, Any]: + def set_blob_tags( + self, tags: Optional[Dict[str, str]] = None, + *, + version_id: Optional[str] = None, + validate_content: Optional[bool] = None, + if_tags_match_condition: Optional[str] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Any]: """The Set Tags operation enables users to set tags on a blob or specific blob version, but not snapshot. Each call to this operation replaces all existing tags attached to the blob. To remove all tags from the blob, call this operation with no tags set. @@ -2275,7 +2904,7 @@ def set_blob_tags(self, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) - :type tags: dict(str, str) + :type tags: Dict[str, str] :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to add tags to. @@ -2302,15 +2931,29 @@ def set_blob_tags(self, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> :returns: Blob-updated property dict (Etag and last modified) :rtype: Dict[str, Any] """ - version_id = get_version_id(self.version_id, kwargs) - options = _set_blob_tags_options(version_id=version_id, tags=tags, **kwargs) + options = _set_blob_tags_options( + version_id=version_id or self.version_id, + tags=tags, + validate_content=validate_content, + if_tags_match_condition=if_tags_match_condition, + lease=lease, + timeout=timeout, + **kwargs + ) try: return cast(Dict[str, Any], self._client.blob.set_tags(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace - def get_blob_tags(self, **kwargs: Any) -> Dict[str, str]: + def get_blob_tags( + self, *, + version_id: Optional[str] = None, + if_tags_match_condition: Optional[str] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, str]: """The Get Tags operation enables users to get tags on a blob or specific blob version, or snapshot. .. versionadded:: 12.4.0 @@ -2335,8 +2978,14 @@ def get_blob_tags(self, **kwargs: Any) -> Dict[str, str]: :returns: Key value pairs of blob tags. :rtype: Dict[str, str] """ - version_id = get_version_id(self.version_id, kwargs) - options = _get_blob_tags_options(version_id=version_id, snapshot=self.snapshot, **kwargs) + options = _get_blob_tags_options( + version_id=version_id or self.version_id, + snapshot=self.snapshot, + if_tags_match_condition=if_tags_match_condition, + lease=lease, + timeout=timeout, + **kwargs + ) try: _, tags = self._client.blob.get_tags(**options) return cast(Dict[str, str], parse_tags(tags)) @@ -2348,6 +2997,14 @@ def get_page_ranges( self, offset: Optional[int] = None, length: Optional[int] = None, previous_snapshot_diff: Optional[Union[str, Dict[str, Any]]] = None, + *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]]: """DEPRECATED: Returns the list of valid page ranges for a Page Blob or snapshot @@ -2407,7 +3064,7 @@ def get_page_ranges( :returns: A tuple of two lists of page ranges as dictionaries with 'start' and 'end' keys. The first element are filled page ranges, the 2nd element is cleared page ranges. - :rtype: tuple(list(dict(str, str), list(dict(str, str)) + :rtype: tuple(list(Dict[str, str], list(Dict[str, str]) """ warnings.warn( "get_page_ranges is deprecated, use list_page_ranges instead", @@ -2419,7 +3076,15 @@ def get_page_ranges( offset=offset, length=length, previous_snapshot_diff=previous_snapshot_diff, - **kwargs) + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + timeout=timeout, + **kwargs + ) try: if previous_snapshot_diff: ranges = self._client.page_blob.get_page_ranges_diff(**options) @@ -2431,11 +3096,18 @@ def get_page_ranges( @distributed_trace def list_page_ranges( - self, - *, + self, *, offset: Optional[int] = None, length: Optional[int] = None, previous_snapshot: Optional[Union[str, Dict[str, Any]]] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + results_per_page: Optional[int] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> ItemPaged[PageRange]: """Returns the list of valid page ranges for a Page Blob or snapshot @@ -2500,31 +3172,49 @@ def list_page_ranges( :returns: An iterable (auto-paging) of PageRange. :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.PageRange] """ - results_per_page = kwargs.pop('results_per_page', None) options = _get_page_ranges_options( snapshot=self.snapshot, offset=offset, length=length, previous_snapshot_diff=previous_snapshot, - **kwargs) + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + timeout=timeout, + **kwargs + ) if previous_snapshot: command = partial( self._client.page_blob.get_page_ranges_diff, - **options) + **options + ) else: command = partial( self._client.page_blob.get_page_ranges, - **options) + **options + ) return ItemPaged( - command, results_per_page=results_per_page, - page_iterator_class=PageRangePaged) + command, + results_per_page=results_per_page, + page_iterator_class=PageRangePaged + ) @distributed_trace def get_page_range_diff_for_managed_disk( self, previous_snapshot_url: str, offset: Optional[int] = None, - length:Optional[int] = None, + length: Optional[int] = None, + *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]]: """Returns the list of valid page ranges for a managed disk or snapshot. @@ -2583,14 +3273,21 @@ def get_page_range_diff_for_managed_disk( :returns: A tuple of two lists of page ranges as dictionaries with 'start' and 'end' keys. The first element are filled page ranges, the 2nd element is cleared page ranges. - :rtype: tuple(list(dict(str, str), list(dict(str, str)) + :rtype: tuple(list(Dict[str, str], list(Dict[str, str]) """ options = _get_page_ranges_options( snapshot=self.snapshot, offset=offset, length=length, prev_snapshot_url=previous_snapshot_url, - **kwargs) + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + timeout=timeout, + **kwargs + ) try: ranges = self._client.page_blob.get_page_ranges_diff(**options) except HttpResponseError as error: @@ -2601,6 +3298,14 @@ def get_page_range_diff_for_managed_disk( def set_sequence_number( self, sequence_number_action: Union[str, "SequenceNumberAction"], sequence_number: Optional[str] = None, + *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, datetime]]: """Sets the blob sequence number. @@ -2646,16 +3351,39 @@ def set_sequence_number( see `here `__. :returns: Blob-updated property dict (Etag and last modified). - :rtype: dict(str, Any) + :rtype: Dict[str, Any] """ - options = _set_sequence_number_options(sequence_number_action, sequence_number=sequence_number, **kwargs) + options = _set_sequence_number_options( + sequence_number_action, + sequence_number=sequence_number, + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + timeout=timeout, + **kwargs + ) try: return cast(Dict[str, Any], self._client.page_blob.update_sequence_number(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace - def resize_blob(self, size: int, **kwargs: Any) -> Dict[str, Union[str, datetime]]: + def resize_blob( + self, size: int, + *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + premium_page_blob_tier: Optional["PremiumPageBlobTier"] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """Resizes a page blob to the specified size. If the specified value is less than the current size of the blob, @@ -2702,11 +3430,22 @@ def resize_blob(self, size: int, **kwargs: Any) -> Dict[str, Union[str, datetime see `here `__. :returns: Blob-updated property dict (Etag and last modified). - :rtype: dict(str, Any) + :rtype: Dict[str, Any] """ if kwargs.get('cpk') and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _resize_blob_options(size=size, **kwargs) + options = _resize_blob_options( + size=size, + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + premium_page_blob_tier=premium_page_blob_tier, + timeout=timeout, + **kwargs + ) try: return cast(Dict[str, Any], self._client.page_blob.resize(**options)) except HttpResponseError as error: @@ -2717,6 +3456,21 @@ def upload_page( self, page: bytes, offset: int, length: int, + *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + validate_content: Optional[bool] = None, + if_sequence_number_lte: Optional[int] = None, + if_sequence_number_lt: Optional[int] = None, + if_sequence_number_eq: Optional[int] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + encryption_scope: Optional[str] = None, + encoding: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, datetime]]: """The Upload Pages operation writes a range of pages to a page blob. @@ -2798,17 +3552,32 @@ def upload_page( see `here `__. :returns: Blob-updated property dict (Etag and last modified). - :rtype: dict(str, Any) + :rtype: Dict[str, Any] """ if self.require_encryption or (self.key_encryption_key is not None): raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _upload_page_options( page=page, offset=offset, length=length, - **kwargs) + lease=lease, + validate_content=validate_content, + if_sequence_number_lte=if_sequence_number_lte, + if_sequence_number_lt=if_sequence_number_lt, + if_sequence_number_eq=if_sequence_number_eq, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + cpk=cpk, + encryption_scope=encryption_scope, + encoding=encoding, + timeout=timeout, + **kwargs + ) try: return cast(Dict[str, Any], self._client.page_blob.upload_pages(**options)) except HttpResponseError as error: @@ -2820,6 +3589,25 @@ def upload_pages_from_url( offset: int, length: int, source_offset: int, + *, + source_content_md5: Optional[bytes] = None, + source_if_modified_since: Optional[datetime] = None, + source_if_unmodified_since: Optional[datetime] = None, + source_etag: Optional[str] = None, + source_match_condition: Optional["MatchConditions"] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_sequence_number_lte: Optional[int] = None, + if_sequence_number_lt: Optional[int] = None, + if_sequence_number_eq: Optional[int] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + encryption_scope: Optional[str] = None, + source_authorization: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Any]: """ @@ -2910,27 +3698,45 @@ def upload_pages_from_url( .. versionadded:: 12.2.0 + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. This value is not tracked or validated on the client. To configure client-side network timesouts see `here `__. - :keyword str source_authorization: - Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is - the prefix of the source_authorization string. :returns: Response after uploading pages from specified URL. :rtype: Dict[str, Any] """ if self.require_encryption or (self.key_encryption_key is not None): raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _upload_pages_from_url_options( source_url=source_url, offset=offset, length=length, source_offset=source_offset, + source_content_md5=source_content_md5, + source_if_modified_since=source_if_modified_since, + source_if_unmodified_since=source_if_unmodified_since, + source_etag=source_etag, + source_match_condition=source_match_condition, + lease=lease, + if_sequence_number_lte=if_sequence_number_lte, + if_sequence_number_lt=if_sequence_number_lt, + if_sequence_number_eq=if_sequence_number_eq, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + cpk=cpk, + encryption_scope=encryption_scope, + source_authorization=source_authorization, + timeout=timeout, **kwargs ) try: @@ -2939,7 +3745,23 @@ def upload_pages_from_url( process_storage_error(error) @distributed_trace - def clear_page(self, offset: int, length: int, **kwargs: Any) -> Dict[str, Union[str, datetime]]: + def clear_page( + self, offset: int, + length: int, + *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_sequence_number_lte: Optional[int] = None, + if_sequence_number_lt: Optional[int] = None, + if_sequence_number_eq: Optional[int] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """Clears a range of pages. :param int offset: @@ -3000,15 +3822,26 @@ def clear_page(self, offset: int, length: int, **kwargs: Any) -> Dict[str, Union see `here `__. :returns: Blob-updated property dict (Etag and last modified). - :rtype: dict(str, Any) + :rtype: Dict[str, Any] """ if self.require_encryption or (self.key_encryption_key is not None): raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _clear_page_options( offset=offset, length=length, + lease=lease, + if_sequence_number_lte=if_sequence_number_lte, + if_sequence_number_lt=if_sequence_number_lt, + if_sequence_number_eq=if_sequence_number_eq, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + cpk=cpk, + timeout=timeout, **kwargs ) try: @@ -3020,6 +3853,20 @@ def clear_page(self, offset: int, length: int, **kwargs: Any) -> Dict[str, Union def append_block( self, data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], length: Optional[int] = None, + *, + validate_content: Optional[bool] = None, + maxsize_condition: Optional[int] = None, + appendpos_condition: Optional[int] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + encoding: Optional[str] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + encryption_scope: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, datetime, int]]: """Commits a new block of data to the end of the existing append blob. @@ -3097,15 +3944,28 @@ def append_block( see `here `__. :returns: Blob-updated property dict (Etag, last modified, append offset, committed block count). - :rtype: dict(str, Any) + :rtype: Dict[str, Any] """ if self.require_encryption or (self.key_encryption_key is not None): raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _append_block_options( data=data, length=length, + validate_content=validate_content, + maxsize_condition=maxsize_condition, + appendpos_condition=appendpos_condition, + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + encoding=encoding, + cpk=cpk, + encryption_scope=encryption_scope, + timeout=timeout, **kwargs ) try: @@ -3118,6 +3978,24 @@ def append_block_from_url( self, copy_source_url: str, source_offset: Optional[int] = None, source_length: Optional[int] = None, + *, + source_content_md5: Optional[bytearray] = None, + maxsize_condition: Optional[int] = None, + appendpos_condition: Optional[int] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + source_if_modified_since: Optional[datetime] = None, + source_if_unmodified_since: Optional[datetime] = None, + source_etag: Optional[str] = None, + source_match_condition: Optional["MatchConditions"] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + encryption_scope: Optional[str] = None, + source_authorization: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, datetime, int]]: """ @@ -3202,26 +4080,43 @@ def append_block_from_url( .. versionadded:: 12.2.0 + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. This value is not tracked or validated on the client. To configure client-side network timesouts see `here `__. - :keyword str source_authorization: - Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is - the prefix of the source_authorization string. :returns: Result after appending a new block. :rtype: Dict[str, Union[str, datetime, int]] """ if self.require_encryption or (self.key_encryption_key is not None): raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _append_block_from_url_options( copy_source_url=copy_source_url, source_offset=source_offset, source_length=source_length, + source_content_md5=source_content_md5, + maxsize_condition=maxsize_condition, + appendpos_condition=appendpos_condition, + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + source_if_modified_since=source_if_modified_since, + source_if_unmodified_since=source_if_unmodified_since, + source_etag=source_etag, + source_match_condition=source_match_condition, + cpk=cpk, + encryption_scope=encryption_scope, + source_authorization=source_authorization, + timeout=timeout, **kwargs ) try: @@ -3231,7 +4126,17 @@ def append_block_from_url( process_storage_error(error) @distributed_trace - def seal_append_blob(self, **kwargs: Any) -> Dict[str, Union[str, datetime, int]]: + def seal_append_blob( + self, *, + appendpos_condition: Optional[int] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime, int]]: """The Seal operation seals the Append Blob to make it read-only. .. versionadded:: 12.4.0 @@ -3270,11 +4175,20 @@ def seal_append_blob(self, **kwargs: Any) -> Dict[str, Union[str, datetime, int] see `here `__. :returns: Blob-updated property dict (Etag, last modified, append offset, committed block count). - :rtype: dict(str, Any) + :rtype: Dict[str, Any] """ if self.require_encryption or (self.key_encryption_key is not None): raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - options = _seal_append_blob_options(**kwargs) + options = _seal_append_blob_options( + appendpos_condition=appendpos_condition, + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + timeout=timeout, + **kwargs + ) try: return cast(Dict[str, Any], self._client.append_blob.seal(**options)) except HttpResponseError as error: diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client_helpers.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client_helpers.py index a04f0ea02525..f00ae4089921 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client_helpers.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client_helpers.py @@ -84,11 +84,13 @@ def _parse_url( return parsed_url, sas_token, path_snapshot + def _format_url(container_name: Union[bytes, str], scheme: str, blob_name: str, query_str: str, hostname: str) -> str: if isinstance(container_name, str): container_name = container_name.encode('UTF-8') return f"{scheme}://{hostname}/{quote(container_name)}/{quote(blob_name, safe='~/')}{query_str}" + def _encode_source_url(source_url: str) -> str: parsed_source_url = urlparse(source_url) source_scheme = parsed_source_url.scheme @@ -100,6 +102,7 @@ def _encode_source_url(source_url: str) -> str: result.append(source_query) return '?'.join(result) + def _upload_blob_options( # pylint:disable=too-many-statements data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[bytes]], blob_type: Union[str, BlobType], @@ -111,7 +114,7 @@ def _upload_blob_options( # pylint:disable=too-many-statements client: "AzureBlobStorage", **kwargs: Any ) -> Dict[str, Any]: - encoding = kwargs.pop('encoding', 'UTF-8') + encoding = kwargs.pop('encoding') or 'UTF-8' if isinstance(data, str): data = data.encode(encoding) if length is None: @@ -131,15 +134,18 @@ def _upload_blob_options( # pylint:disable=too-many-statements else: raise TypeError(f"Unsupported data type: {type(data)}") - validate_content = kwargs.pop('validate_content', False) - content_settings = kwargs.pop('content_settings', None) - overwrite = kwargs.pop('overwrite', False) - max_concurrency = kwargs.pop('max_concurrency', 1) - cpk = kwargs.pop('cpk', None) + validate_content = kwargs.pop('validate_content') or False + content_settings = kwargs.pop('content_settings') or None + overwrite = kwargs.pop('overwrite') or False + max_concurrency = kwargs.pop('max_concurrency') or 1 + cpk = kwargs.pop('cpk') or None cpk_info = None if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) + cpk_info = CpkInfo( + encryption_key=cpk.key_value, + encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm + ) kwargs['cpk_info'] = cpk_info headers = kwargs.pop('headers', {}) @@ -171,7 +177,8 @@ def _upload_blob_options( # pylint:disable=too-many-statements config.user_agent_policy.user_agent, sdk_moniker, encryption_options['version'], - kwargs) + kwargs + ) if blob_type == BlobType.BlockBlob: kwargs['client'] = client.block_blob @@ -186,7 +193,8 @@ def _upload_blob_options( # pylint:disable=too-many-statements kwargs['client'] = client.append_blob else: raise ValueError(f"Unsupported BlobType: {blob_type}") - return kwargs + return {k: v for k, v in kwargs.items() if v is not None} + def _upload_blob_from_url_options(source_url: str, **kwargs: Any) -> Dict[str, Any]: metadata = kwargs.pop('metadata', None) @@ -209,13 +217,16 @@ def _upload_blob_from_url_options(source_url: str, **kwargs: Any) -> Dict[str, A cpk = kwargs.pop('cpk', None) cpk_info = None if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) + cpk_info = CpkInfo( + encryption_key=cpk.key_value, + encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm + ) options = { 'copy_source_authorization': source_authorization, 'content_length': 0, - 'copy_source_blob_properties': kwargs.pop('include_source_blob_properties', True), + 'copy_source_blob_properties': kwargs.pop('include_source_blob_properties'), 'source_content_md5': kwargs.pop('source_content_md5', None), 'copy_source': source_url, 'modified_access_conditions': get_modify_conditions(kwargs), @@ -228,11 +239,12 @@ def _upload_blob_from_url_options(source_url: str, **kwargs: Any) -> Dict[str, A 'cpk_scope_info': get_cpk_scope_info(kwargs), 'headers': headers, } - options.update(kwargs) + options.update({k: v for k, v in kwargs.items() if v is not None}) if not overwrite and not _any_conditions(**options): options['modified_access_conditions'].if_none_match = '*' return options + def _download_blob_options( blob_name: str, container_name: str, @@ -276,15 +288,18 @@ def _download_blob_options( raise ValueError("Offset must be provided if length is provided.") length = offset + length - 1 # Service actually uses an end-range inclusive index - validate_content = kwargs.pop('validate_content', False) + validate_content = kwargs.pop('validate_content') or False access_conditions = get_access_conditions(kwargs.pop('lease', None)) mod_conditions = get_modify_conditions(kwargs) cpk = kwargs.pop('cpk', None) cpk_info = None if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) + cpk_info = CpkInfo( + encryption_key=cpk.key_value, + encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm + ) # Add feature flag to user agent for encryption if encryption_options['key'] or encryption_options['resolver']: @@ -292,7 +307,8 @@ def _download_blob_options( config.user_agent_policy.user_agent, sdk_moniker, encryption_options['version'], - kwargs) + kwargs + ) options = { 'clients': client, @@ -309,15 +325,17 @@ def _download_blob_options( 'modified_access_conditions': mod_conditions, 'cpk_info': cpk_info, 'download_cls': kwargs.pop('cls', None) or deserialize_blob_stream, - 'max_concurrency':kwargs.pop('max_concurrency', 1), + 'max_concurrency': kwargs.pop('max_concurrency') or 1, 'encoding': encoding, 'timeout': kwargs.pop('timeout', None), 'name': blob_name, - 'container': container_name} - options.update(kwargs) + 'container': container_name + } + options.update({k: v for k, v in kwargs.items() if v is not None}) return options -def _quick_query_options(snapshot: Optional[str], query_expression: str, **kwargs: Any ) -> Tuple[Dict[str, Any], str]: + +def _quick_query_options(snapshot: Optional[str], query_expression: str, **kwargs: Any) -> Tuple[Dict[str, Any], str]: delimiter = '\n' input_format = kwargs.pop('blob_format', None) if input_format == QuickQueryDialect.DelimitedJson: @@ -376,9 +394,10 @@ def _quick_query_options(snapshot: Optional[str], query_expression: str, **kwarg 'timeout': kwargs.pop('timeout', None), 'cls': return_headers_and_deserialized, } - options.update(kwargs) + options.update({k: v for k, v in kwargs.items() if v is not None}) return options, delimiter + def _generic_delete_blob_options(delete_snapshots: Optional[str] = None, **kwargs: Any) -> Dict[str, Any]: access_conditions = get_access_conditions(kwargs.pop('lease', None)) mod_conditions = get_modify_conditions(kwargs) @@ -389,10 +408,12 @@ def _generic_delete_blob_options(delete_snapshots: Optional[str] = None, **kwarg 'snapshot': kwargs.pop('snapshot', None), # this is added for delete_blobs 'delete_snapshots': delete_snapshots or None, 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions} - options.update(kwargs) + 'modified_access_conditions': mod_conditions + } + options.update({k: v for k, v in kwargs.items() if v is not None}) return options + def _delete_blob_options( snapshot: Optional[str], version_id: Optional[str], @@ -407,6 +428,7 @@ def _delete_blob_options( options['blob_delete_type'] = kwargs.pop('blob_delete_type', None) return options + def _set_http_headers_options(content_settings: Optional["ContentSettings"] = None, **kwargs: Any) -> Dict[str, Any]: access_conditions = get_access_conditions(kwargs.pop('lease', None)) mod_conditions = get_modify_conditions(kwargs) @@ -425,10 +447,12 @@ def _set_http_headers_options(content_settings: Optional["ContentSettings"] = No 'blob_http_headers': blob_headers, 'lease_access_conditions': access_conditions, 'modified_access_conditions': mod_conditions, - 'cls': return_response_headers} - options.update(kwargs) + 'cls': return_response_headers + } + options.update({k: v for k, v in kwargs.items() if v is not None}) return options + def _set_blob_metadata_options(metadata: Optional[Dict[str, str]] = None, **kwargs: Any): headers = kwargs.pop('headers', {}) headers.update(add_metadata_headers(metadata)) @@ -439,8 +463,11 @@ def _set_blob_metadata_options(metadata: Optional[Dict[str, str]] = None, **kwar cpk = kwargs.pop('cpk', None) cpk_info = None if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) + cpk_info = CpkInfo( + encryption_key=cpk.key_value, + encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm + ) options = { 'timeout': kwargs.pop('timeout', None), 'lease_access_conditions': access_conditions, @@ -448,10 +475,12 @@ def _set_blob_metadata_options(metadata: Optional[Dict[str, str]] = None, **kwar 'cpk_scope_info': cpk_scope_info, 'cpk_info': cpk_info, 'cls': return_response_headers, - 'headers': headers} - options.update(kwargs) + 'headers': headers + } + options.update({k: v for k, v in kwargs.items() if v is not None}) return options + def _create_page_blob_options( size: int, content_settings: Optional["ContentSettings"] = None, @@ -479,8 +508,11 @@ def _create_page_blob_options( cpk = kwargs.pop('cpk', None) cpk_info = None if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) + cpk_info = CpkInfo( + encryption_key=cpk.key_value, + encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm + ) immutability_policy = kwargs.pop('immutability_policy', None) if immutability_policy: @@ -509,10 +541,12 @@ def _create_page_blob_options( 'blob_tags_string': blob_tags_string, 'cls': return_response_headers, "tier": tier, - 'headers': headers} - options.update(kwargs) + 'headers': headers + } + options.update({k: v for k, v in kwargs.items() if v is not None}) return options + def _create_append_blob_options( content_settings: Optional["ContentSettings"] = None, metadata: Optional[Dict[str, str]] = None, @@ -537,8 +571,11 @@ def _create_append_blob_options( cpk = kwargs.pop('cpk', None) cpk_info = None if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) + cpk_info = CpkInfo( + encryption_key=cpk.key_value, + encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm + ) immutability_policy = kwargs.pop('immutability_policy', None) if immutability_policy: @@ -557,10 +594,12 @@ def _create_append_blob_options( 'cpk_info': cpk_info, 'blob_tags_string': blob_tags_string, 'cls': return_response_headers, - 'headers': headers} - options.update(kwargs) + 'headers': headers + } + options.update({k: v for k, v in kwargs.items() if v is not None}) return options + def _create_snapshot_options(metadata: Optional[Dict[str, str]] = None, **kwargs: Any) -> Dict[str, Any]: headers = kwargs.pop('headers', {}) headers.update(add_metadata_headers(metadata)) @@ -570,8 +609,11 @@ def _create_snapshot_options(metadata: Optional[Dict[str, str]] = None, **kwargs cpk = kwargs.pop('cpk', None) cpk_info = None if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) + cpk_info = CpkInfo( + encryption_key=cpk.key_value, + encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm + ) options = { 'timeout': kwargs.pop('timeout', None), @@ -580,10 +622,12 @@ def _create_snapshot_options(metadata: Optional[Dict[str, str]] = None, **kwargs 'cpk_scope_info': cpk_scope_info, 'cpk_info': cpk_info, 'cls': return_response_headers, - 'headers': headers} - options.update(kwargs) + 'headers': headers + } + options.update({k: v for k, v in kwargs.items() if v is not None}) return options + def _start_copy_from_url_options( # pylint:disable=too-many-statements source_url: str, metadata: Optional[Dict[str, str]] = None, @@ -593,7 +637,7 @@ def _start_copy_from_url_options( # pylint:disable=too-many-statements source_url = _encode_source_url(source_url=source_url) headers = kwargs.pop('headers', {}) headers.update(add_metadata_headers(metadata)) - if 'source_lease' in kwargs: + if kwargs.get('source_lease') is not None: source_lease = kwargs.pop('source_lease') try: headers['x-ms-source-lease-id'] = source_lease.id @@ -663,9 +707,10 @@ def _start_copy_from_url_options( # pylint:disable=too-many-statements options['source_modified_access_conditions'] = source_mod_conditions options['lease_access_conditions'] = dest_access_conditions options['tier'] = tier.value if tier else None - options.update(kwargs) + options.update({k: v for k, v in kwargs.items() if v is not None}) return options + def _abort_copy_options(copy_id: Union[str, Dict[str, Any], BlobProperties], **kwargs: Any) -> Dict[str, Any]: access_conditions = get_access_conditions(kwargs.pop('lease', None)) if isinstance(copy_id, BlobProperties): @@ -675,10 +720,12 @@ def _abort_copy_options(copy_id: Union[str, Dict[str, Any], BlobProperties], **k options = { 'copy_id': copy_id, 'lease_access_conditions': access_conditions, - 'timeout': kwargs.pop('timeout', None)} - options.update(kwargs) + 'timeout': kwargs.pop('timeout', None) + } + options.update({k: v for k, v in kwargs.items() if v is not None}) return options + def _stage_block_options( block_id: str, data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], @@ -687,7 +734,7 @@ def _stage_block_options( ) -> Dict[str, Any]: block_id = encode_base64(str(block_id)) if isinstance(data, str): - data = data.encode(kwargs.pop('encoding', 'UTF-8')) # type: ignore + data = data.encode(kwargs.pop('encoding') or 'UTF-8') # type: ignore access_conditions = get_access_conditions(kwargs.pop('lease', None)) if length is None: length = get_length(data) @@ -696,13 +743,16 @@ def _stage_block_options( if isinstance(data, bytes): data = data[:length] - validate_content = kwargs.pop('validate_content', False) + validate_content = kwargs.pop('validate_content') or False cpk_scope_info = get_cpk_scope_info(kwargs) cpk = kwargs.pop('cpk', None) cpk_info = None if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) + cpk_info = CpkInfo( + encryption_key=cpk.key_value, + encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm + ) options = { 'block_id': block_id, @@ -716,9 +766,10 @@ def _stage_block_options( 'cpk_info': cpk_info, 'cls': return_response_headers, } - options.update(kwargs) + options.update({k: v for k, v in kwargs.items() if v is not None}) return options + def _stage_block_from_url_options( block_id: str, source_url: str, @@ -743,8 +794,11 @@ def _stage_block_from_url_options( cpk = kwargs.pop('cpk', None) cpk_info = None if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) + cpk_info = CpkInfo( + encryption_key=cpk.key_value, + encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm + ) options = { 'copy_source_authorization': source_authorization, 'block_id': block_id, @@ -758,9 +812,10 @@ def _stage_block_from_url_options( 'cpk_info': cpk_info, 'cls': return_response_headers, } - options.update(kwargs) + options.update({k: v for k, v in kwargs.items() if v is not None}) return options + def _get_block_list_result(blocks: BlockList) -> Tuple[List[BlobBlock], List[BlobBlock]]: committed = [] uncommitted = [] @@ -770,6 +825,7 @@ def _get_block_list_result(blocks: BlockList) -> Tuple[List[BlobBlock], List[Blo uncommitted = [BlobBlock._from_generated(b) for b in blocks.uncommitted_blocks] # pylint: disable=protected-access return committed, uncommitted + def _commit_block_list_options( block_list: List[BlobBlock], content_settings: Optional["ContentSettings"] = None, @@ -802,13 +858,16 @@ def _commit_block_list_options( blob_content_disposition=content_settings.content_disposition ) - validate_content = kwargs.pop('validate_content', False) + validate_content = kwargs.pop('validate_content') or False cpk_scope_info = get_cpk_scope_info(kwargs) cpk = kwargs.pop('cpk', None) cpk_info = None if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) + cpk_info = CpkInfo( + encryption_key=cpk.key_value, + encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm + ) immutability_policy = kwargs.pop('immutability_policy', None) if immutability_policy: @@ -832,14 +891,15 @@ def _commit_block_list_options( 'blob_tags_string': blob_tags_string, 'headers': headers } - options.update(kwargs) + options.update({k: v for k, v in kwargs.items() if v is not None}) return options + def _set_blob_tags_options( version_id: Optional[str], tags: Optional[Dict[str, str]] = None, **kwargs: Any -)-> Dict[str, Any]: +) -> Dict[str, Any]: serialized_tags = serialize_blob_tags(tags) access_conditions = get_access_conditions(kwargs.pop('lease', None)) mod_conditions = get_modify_conditions(kwargs) @@ -849,10 +909,12 @@ def _set_blob_tags_options( 'lease_access_conditions': access_conditions, 'modified_access_conditions': mod_conditions, 'version_id': version_id, - 'cls': return_response_headers} - options.update(kwargs) + 'cls': return_response_headers + } + options.update({k: v for k, v in kwargs.items() if v is not None}) return options + def _get_blob_tags_options(version_id: Optional[str], snapshot: Optional[str], **kwargs: Any) -> Dict[str, Any]: access_conditions = get_access_conditions(kwargs.pop('lease', None)) mod_conditions = get_modify_conditions(kwargs) @@ -863,9 +925,11 @@ def _get_blob_tags_options(version_id: Optional[str], snapshot: Optional[str], * 'lease_access_conditions': access_conditions, 'modified_access_conditions': mod_conditions, 'timeout': kwargs.pop('timeout', None), - 'cls': return_headers_and_deserialized} + 'cls': return_headers_and_deserialized + } return options + def _get_page_ranges_options( snapshot: Optional[str], offset: Optional[int] = None, @@ -887,7 +951,8 @@ def _get_page_ranges_options( 'lease_access_conditions': access_conditions, 'modified_access_conditions': mod_conditions, 'timeout': kwargs.pop('timeout', None), - 'range': page_range} + 'range': page_range + } if previous_snapshot_diff: try: options['prevsnapshot'] = previous_snapshot_diff.snapshot # type: ignore @@ -896,9 +961,10 @@ def _get_page_ranges_options( options['prevsnapshot'] = previous_snapshot_diff['snapshot'] # type: ignore except TypeError: options['prevsnapshot'] = previous_snapshot_diff - options.update(kwargs) + options.update({k: v for k, v in kwargs.items() if v is not None}) return options + def _set_sequence_number_options( sequence_number_action: str, sequence_number: Optional[str] = None, @@ -914,10 +980,12 @@ def _set_sequence_number_options( 'blob_sequence_number': sequence_number, 'lease_access_conditions': access_conditions, 'modified_access_conditions': mod_conditions, - 'cls': return_response_headers} - options.update(kwargs) + 'cls': return_response_headers + } + options.update({k: v for k, v in kwargs.items() if v is not None}) return options + def _resize_blob_options(size: int, **kwargs: Any) -> Dict[str, Any]: access_conditions = get_access_conditions(kwargs.pop('lease', None)) mod_conditions = get_modify_conditions(kwargs) @@ -927,18 +995,23 @@ def _resize_blob_options(size: int, **kwargs: Any) -> Dict[str, Any]: cpk = kwargs.pop('cpk', None) cpk_info = None if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) + cpk_info = CpkInfo( + encryption_key=cpk.key_value, + encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm + ) options = { 'blob_content_length': size, 'timeout': kwargs.pop('timeout', None), 'lease_access_conditions': access_conditions, 'modified_access_conditions': mod_conditions, 'cpk_info': cpk_info, - 'cls': return_response_headers} - options.update(kwargs) + 'cls': return_response_headers + } + options.update({k: v for k, v in kwargs.items() if v is not None}) return options + def _upload_page_options( page: bytes, offset: int, @@ -946,13 +1019,13 @@ def _upload_page_options( **kwargs: Any ) -> Dict[str, Any]: if isinstance(page, str): - page = page.encode(kwargs.pop('encoding', 'UTF-8')) + page = page.encode(kwargs.pop('encoding') or 'UTF-8') if offset is None or offset % 512 != 0: raise ValueError("offset must be an integer that aligns with 512 page size") if length is None or length % 512 != 0: raise ValueError("length must be an integer that aligns with 512 page size") end_range = offset + length - 1 # Reformat to an inclusive range index - content_range = f'bytes={offset}-{end_range}' # type: ignore + content_range = f'bytes={offset}-{end_range}' # type: ignore access_conditions = get_access_conditions(kwargs.pop('lease', None)) seq_conditions = SequenceNumberAccessConditions( if_sequence_number_less_than_or_equal_to=kwargs.pop('if_sequence_number_lte', None), @@ -961,12 +1034,15 @@ def _upload_page_options( ) mod_conditions = get_modify_conditions(kwargs) cpk_scope_info = get_cpk_scope_info(kwargs) - validate_content = kwargs.pop('validate_content', False) + validate_content = kwargs.pop('validate_content') or False cpk = kwargs.pop('cpk', None) cpk_info = None if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) + cpk_info = CpkInfo( + encryption_key=cpk.key_value, + encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm + ) options = { 'body': page[:length], 'content_length': length, @@ -979,10 +1055,12 @@ def _upload_page_options( 'validate_content': validate_content, 'cpk_scope_info': cpk_scope_info, 'cpk_info': cpk_info, - 'cls': return_response_headers} - options.update(kwargs) + 'cls': return_response_headers + } + options.update({k: v for k, v in kwargs.items() if v is not None}) return options + def _upload_pages_from_url_options( source_url: str, offset: int, @@ -1018,8 +1096,11 @@ def _upload_pages_from_url_options( cpk = kwargs.pop('cpk', None) cpk_info = None if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) + cpk_info = CpkInfo( + encryption_key=cpk.key_value, + encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm + ) options = { 'copy_source_authorization': source_authorization, @@ -1035,10 +1116,12 @@ def _upload_pages_from_url_options( 'source_modified_access_conditions': source_mod_conditions, 'cpk_scope_info': cpk_scope_info, 'cpk_info': cpk_info, - 'cls': return_response_headers} - options.update(kwargs) + 'cls': return_response_headers + } + options.update({k: v for k, v in kwargs.items() if v is not None}) return options + def _clear_page_options( offset: int, length: int, @@ -1061,8 +1144,11 @@ def _clear_page_options( cpk = kwargs.pop('cpk', None) cpk_info = None if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) + cpk_info = CpkInfo( + encryption_key=cpk.key_value, + encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm + ) options = { 'content_length': 0, @@ -1072,17 +1158,19 @@ def _clear_page_options( 'sequence_number_access_conditions': seq_conditions, 'modified_access_conditions': mod_conditions, 'cpk_info': cpk_info, - 'cls': return_response_headers} - options.update(kwargs) + 'cls': return_response_headers + } + options.update({k: v for k, v in kwargs.items() if v is not None}) return options + def _append_block_options( data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], length: Optional[int] = None, **kwargs: Any ) -> Dict[str, Any]: if isinstance(data, str): - data = data.encode(kwargs.pop('encoding', 'UTF-8')) + data = data.encode(kwargs.pop('encoding') or 'UTF-8') if length is None: length = get_length(data) if length is None: @@ -1094,7 +1182,7 @@ def _append_block_options( appendpos_condition = kwargs.pop('appendpos_condition', None) maxsize_condition = kwargs.pop('maxsize_condition', None) - validate_content = kwargs.pop('validate_content', False) + validate_content = kwargs.pop('validate_content') or False append_conditions = None if maxsize_condition or appendpos_condition is not None: append_conditions = AppendPositionAccessConditions( @@ -1107,8 +1195,11 @@ def _append_block_options( cpk = kwargs.pop('cpk', None) cpk_info = None if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) + cpk_info = CpkInfo( + encryption_key=cpk.key_value, + encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm + ) options = { 'body': data, 'content_length': length, @@ -1120,10 +1211,12 @@ def _append_block_options( 'validate_content': validate_content, 'cpk_scope_info': cpk_scope_info, 'cpk_info': cpk_info, - 'cls': return_response_headers} - options.update(kwargs) + 'cls': return_response_headers + } + options.update({k: v for k, v in kwargs.items() if v is not None}) return options + def _append_block_from_url_options( copy_source_url: str, source_offset: Optional[int] = None, @@ -1159,8 +1252,11 @@ def _append_block_from_url_options( cpk = kwargs.pop('cpk', None) cpk_info = None if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) + cpk_info = CpkInfo( + encryption_key=cpk.key_value, + encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm + ) options = { 'copy_source_authorization': source_authorization, @@ -1176,10 +1272,12 @@ def _append_block_from_url_options( 'cpk_scope_info': cpk_scope_info, 'cpk_info': cpk_info, 'cls': return_response_headers, - 'timeout': kwargs.pop('timeout', None)} - options.update(kwargs) + 'timeout': kwargs.pop('timeout', None) + } + options.update({k: v for k, v in kwargs.items() if v is not None}) return options + def _seal_append_blob_options(**kwargs: Any) -> Dict[str, Any]: appendpos_condition = kwargs.pop('appendpos_condition', None) append_conditions = None @@ -1195,10 +1293,12 @@ def _seal_append_blob_options(**kwargs: Any) -> Dict[str, Any]: 'lease_access_conditions': access_conditions, 'append_position_access_conditions': append_conditions, 'modified_access_conditions': mod_conditions, - 'cls': return_response_headers} - options.update(kwargs) + 'cls': return_response_headers + } + options.update({k: v for k, v in kwargs.items() if v is not None}) return options + def _from_blob_url( blob_url: str, snapshot: Optional[Union[BlobProperties, str, Dict[str, Any]]] @@ -1243,4 +1343,83 @@ def _from_blob_url( path_snapshot = snapshot['snapshot'] else: path_snapshot = snapshot - return (account_url, container_name, blob_name, path_snapshot) + return account_url, container_name, blob_name, path_snapshot + + +def _get_blob_properties_options(version_id: Optional[str], snapshot: Optional[str], **kwargs: Any) -> Dict[str, Any]: + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo( + encryption_key=cpk.key_value, + encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm + ) + options = { + 'version_id': version_id, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'snapshot': snapshot, + 'cpk_info': cpk_info, + } + options.update({k: v for k, v in kwargs.items() if v is not None}) + return options + + +def _set_standard_blob_tier_options( + version_id: Optional[str], + snapshot: Optional[str], + **kwargs: Any +) -> Dict[str, Any]: + access_conditions = get_access_conditions(kwargs.pop("lease", None)) + mod_conditions = get_modify_conditions(kwargs) + standard_blob_tier = kwargs.pop("standard_blob_tier", None) + if standard_blob_tier is None: + raise ValueError("A StandardBlobTier must be specified") + if snapshot and version_id: + raise ValueError("Snapshot and version_id cannot be set at the same time") + options = { + 'version_id': version_id, + 'tier': standard_blob_tier, + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'snapshot': snapshot, + } + options.update({k: v for k, v in kwargs.items() if v is not None}) + return options + + +def _acquire_lease_options(**kwargs: Any): + return {k: v for k, v in kwargs.items() if v is not None} + + +def _get_block_list_options(block_list_type: str, snapshot: Optional[str], **kwargs: Any) -> Dict[str, Any]: + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + + options = { + 'list_type': block_list_type, + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'snapshot': snapshot, + } + options.update({k: v for k, v in kwargs.items() if v is not None}) + return options + + +def _set_premium_page_blob_tier_options(**kwargs: Any) -> Dict[str, Any]: + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + + options = { + 'tier': kwargs.pop('premium_page_blob_tier'), + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + } + options.update({k: v for k, v in kwargs.items() if v is not None}) + return options diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py index 89ad69433eed..f84b9f4e2dad 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py @@ -21,10 +21,12 @@ from azure.core.tracing.decorator import distributed_trace from ._blob_client import BlobClient from ._container_client_helpers import ( + _delete_container_options, _format_url, _generate_delete_blobs_options, _generate_set_tiers_options, - _parse_url + _parse_url, + _set_container_metadata_options ) from ._deserialize import deserialize_container_properties from ._download import StorageStreamDownloader @@ -55,11 +57,13 @@ ) if TYPE_CHECKING: + from azure.core import MatchConditions from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential, TokenCredential from azure.core.pipeline.transport import HttpResponse # pylint: disable=C4756 from azure.storage.blob import BlobServiceClient from ._models import ( AccessPolicy, + ContainerEncryptionScope, PremiumPageBlobTier, PublicAccess, StandardBlobTier @@ -139,7 +143,15 @@ def __init__( credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long *, api_version: Optional[str] = None, - # TODO + secondary_hostname: Optional[str] = None, + audience: Optional[str] = None, + max_block_size: int = 4 * 1024 * 1024, + max_page_size: int = 4 * 1024 * 1024, + max_chunk_get_size: int = 4 * 1024 * 1024, + max_single_put_size: int = 64 * 1024 * 1024, + max_single_get_size: int = 32 * 1024 * 1024, + min_large_block_upload_threshold: int = 4 * 1024 * 1024 + 1, + use_byte_buffer: Optional[bool] = None, **kwargs: Any ) -> None: parsed_url, sas_token = _parse_url(account_url=account_url, container_name=container_name) @@ -148,7 +160,21 @@ def __init__( # This parameter is used for the hierarchy traversal. Give precedence to credential. self._raw_credential = credential if credential else sas_token self._query_str, credential = self._format_query_string(sas_token, credential) - super(ContainerClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) + super(ContainerClient, self).__init__( + parsed_url, + service='blob', + credential=credential, + secondary_hostname=secondary_hostname, + audience=audience, + max_block_size=max_block_size, + max_page_size=max_page_size, + max_chunk_get_size=max_chunk_get_size, + max_single_put_size=max_single_put_size, + max_single_get_size=max_single_get_size, + min_large_block_upload_threshold=min_large_block_upload_threshold, + use_byte_buffer=use_byte_buffer, + **kwargs + ) self._api_version = get_api_version(api_version) self._client = self._build_generated_client() self._configure_encryption(kwargs) @@ -158,7 +184,7 @@ def _build_generated_client(self) -> AzureBlobStorage: client._config.version = self._api_version # type: ignore [assignment] # pylint: disable=protected-access return client - def _format_url(self, hostname): + def _format_url(self, hostname: str) -> str: return _format_url( container_name=self.container_name, hostname=hostname, @@ -170,6 +196,17 @@ def _format_url(self, hostname): def from_container_url( cls, container_url: str, credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + *, + api_version: Optional[str] = None, + secondary_hostname: Optional[str] = None, + audience: Optional[str] = None, + max_block_size: int = 4 * 1024 * 1024, + max_page_size: int = 4 * 1024 * 1024, + max_chunk_get_size: int = 4 * 1024 * 1024, + max_single_put_size: int = 64 * 1024 * 1024, + max_single_get_size: int = 32 * 1024 * 1024, + min_large_block_upload_threshold: int = 4 * 1024 * 1024 + 1, + use_byte_buffer: Optional[bool] = None, **kwargs: Any ) -> Self: """Create ContainerClient from a container url. @@ -193,6 +230,28 @@ def from_container_url( ~azure.core.credentials.AzureSasCredential or ~azure.core.credentials.TokenCredential or str or dict[str, str] or None + :keyword str api_version: + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. + + .. versionadded:: 12.2.0 + + :keyword str secondary_hostname: + The hostname of the secondary endpoint. + :keyword int max_block_size: The maximum chunk size for uploading a block blob in chunks. + Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_put_size: If the blob size is less than or equal max_single_put_size, + then the blob will be uploaded with only one http PUT request. + If the blob size is larger than max_single_put_size, + the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB. + :keyword int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient + algorithm when uploading a block blob. Defaults to 4*1024*1024+1. + :keyword bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False. + :keyword int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_get_size: The maximum size for a blob to be downloaded in a single call, + the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. + :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, + or 4MB. :keyword str audience: The audience to use when requesting tokens for Azure Active Directory authentication. Only has an effect when credential is of type TokenCredential. The value could be https://storage.azure.com/ (default) or https://.blob.core.windows.net. @@ -216,13 +275,39 @@ def from_container_url( container_name = unquote(container_path[-1]) if not container_name: raise ValueError("Invalid URL. Please provide a URL with a valid container name") - return cls(account_url, container_name=container_name, credential=credential, **kwargs) + return cls( + account_url, + container_name=container_name, + credential=credential, + api_version=api_version, + audience=audience, + secondary_hostname=secondary_hostname, + max_block_size=max_block_size, + max_page_size=max_page_size, + max_chunk_get_size=max_chunk_get_size, + max_single_put_size=max_single_put_size, + max_single_get_size=max_single_get_size, + min_large_block_upload_threshold=min_large_block_upload_threshold, + use_byte_buffer=use_byte_buffer, + **kwargs + ) @classmethod def from_connection_string( cls, conn_str: str, container_name: str, credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + *, + api_version: Optional[str] = None, + secondary_hostname: Optional[str] = None, + audience: Optional[str] = None, + max_block_size: int = 4 * 1024 * 1024, + max_page_size: int = 4 * 1024 * 1024, + max_chunk_get_size: int = 4 * 1024 * 1024, + max_single_put_size: int = 64 * 1024 * 1024, + max_single_get_size: int = 32 * 1024 * 1024, + min_large_block_upload_threshold: int = 4 * 1024 * 1024 + 1, + use_byte_buffer: Optional[bool] = None, **kwargs: Any ) -> Self: """Create ContainerClient from a Connection String. @@ -246,6 +331,28 @@ def from_connection_string( ~azure.core.credentials.AzureSasCredential or ~azure.core.credentials.TokenCredential or str or dict[str, str] or None + :keyword str api_version: + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. + + .. versionadded:: 12.2.0 + + :keyword str secondary_hostname: + The hostname of the secondary endpoint. + :keyword int max_block_size: The maximum chunk size for uploading a block blob in chunks. + Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_put_size: If the blob size is less than or equal max_single_put_size, + then the blob will be uploaded with only one http PUT request. + If the blob size is larger than max_single_put_size, + the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB. + :keyword int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient + algorithm when uploading a block blob. Defaults to 4*1024*1024+1. + :keyword bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False. + :keyword int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_get_size: The maximum size for a blob to be downloaded in a single call, + the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. + :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, + or 4MB. :keyword str audience: The audience to use when requesting tokens for Azure Active Directory authentication. Only has an effect when credential is of type TokenCredential. The value could be https://storage.azure.com/ (default) or https://.blob.core.windows.net. @@ -262,15 +369,30 @@ def from_connection_string( :caption: Creating the ContainerClient from a connection string. """ account_url, secondary, credential = parse_connection_str(conn_str, credential, 'blob') - if 'secondary_hostname' not in kwargs: - kwargs['secondary_hostname'] = secondary return cls( - account_url, container_name=container_name, credential=credential, **kwargs) + account_url, + container_name=container_name, + credential=credential, + api_version=api_version, + audience=audience, + secondary_hostname=secondary_hostname or secondary, + max_block_size=max_block_size, + max_page_size=max_page_size, + max_chunk_get_size=max_chunk_get_size, + max_single_put_size=max_single_put_size, + max_single_get_size=max_single_get_size, + min_large_block_upload_threshold=min_large_block_upload_threshold, + use_byte_buffer=use_byte_buffer, + **kwargs + ) @distributed_trace def create_container( self, metadata: Optional[Dict[str, str]] = None, public_access: Optional[Union["PublicAccess", str]] = None, + *, + container_encryption_scope: Optional[Union[Dict[str, Any], "ContainerEncryptionScope"]] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, "datetime"]]: """ @@ -289,7 +411,7 @@ def create_container( .. versionadded:: 12.2.0 - :paramtype container_encryption_scope: dict or ~azure.storage.blob.ContainerEncryptionScope + :paramtype container_encryption_scope: Dict[str, Any] or ~azure.storage.blob.ContainerEncryptionScope :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. @@ -309,22 +431,28 @@ def create_container( :caption: Creating a container to store blobs. """ headers = kwargs.pop('headers', {}) - timeout = kwargs.pop('timeout', None) - headers.update(add_metadata_headers(metadata)) # type: ignore - container_cpk_scope_info = get_container_cpk_scope_info(kwargs) + headers.update(add_metadata_headers(metadata)) # type: ignore + container_cpk_scope_info = get_container_cpk_scope_info(container_encryption_scope) try: - return self._client.container.create( # type: ignore + return self._client.container.create( # type: ignore timeout=timeout, access=public_access, container_cpk_scope_info=container_cpk_scope_info, cls=return_response_headers, headers=headers, - **kwargs) + **kwargs + ) except HttpResponseError as error: process_storage_error(error) @distributed_trace - def _rename_container(self, new_name: str, **kwargs: Any) -> "ContainerClient": + def _rename_container( + self, new_name: str, + *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> "ContainerClient": """Renames a container. Operation is successful only if the source container exists. @@ -344,10 +472,9 @@ def _rename_container(self, new_name: str, **kwargs: Any) -> "ContainerClient": :returns: The renamed container client. :rtype: ~azure.storage.blob.ContainerClient """ - lease = kwargs.pop('lease', None) - try: + if lease and hasattr(lease, "id"): kwargs['source_lease_id'] = lease.id - except AttributeError: + else: kwargs['source_lease_id'] = lease try: renamed_container = ContainerClient( @@ -356,13 +483,22 @@ def _rename_container(self, new_name: str, **kwargs: Any) -> "ContainerClient": _pipeline=self._pipeline, _location_mode=self._location_mode, _hosts=self._hosts, require_encryption=self.require_encryption, encryption_version=self.encryption_version, key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function) - renamed_container._client.container.rename(self.container_name, **kwargs) # pylint: disable = protected-access + renamed_container._client.container.rename(self.container_name, timeout=timeout, **kwargs) # pylint: disable = protected-access return renamed_container except HttpResponseError as error: process_storage_error(error) @distributed_trace - def delete_container(self, **kwargs: Any) -> None: + def delete_container( + self, *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: """ Marks the specified container for deletion. The container and any blobs contained within it are later deleted during garbage collection. @@ -406,23 +542,30 @@ def delete_container(self, **kwargs: Any) -> None: :dedent: 12 :caption: Delete a container. """ - lease = kwargs.pop('lease', None) - access_conditions = get_access_conditions(lease) - mod_conditions = get_modify_conditions(kwargs) - timeout = kwargs.pop('timeout', None) + options = _delete_container_options( + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + timeout=timeout, + **kwargs + ) try: - self._client.container.delete( - timeout=timeout, - lease_access_conditions=access_conditions, - modified_access_conditions=mod_conditions, - **kwargs) + self._client.container.delete(**options) except HttpResponseError as error: process_storage_error(error) @distributed_trace def acquire_lease( - self, lease_duration: int =-1, + self, lease_duration: int = -1, lease_id: Optional[str] = None, + *, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> BlobLeaseClient: """ @@ -473,10 +616,18 @@ def acquire_lease( :dedent: 8 :caption: Acquiring a lease on the container. """ - lease = BlobLeaseClient(self, lease_id=lease_id) # type: ignore + lease = BlobLeaseClient(self, lease_id=lease_id) # type: ignore kwargs.setdefault('merge_span', True) - timeout = kwargs.pop('timeout', None) - lease.acquire(lease_duration=lease_duration, timeout=timeout, **kwargs) + if etag is not None: + kwargs['etag'] = etag + lease.acquire( + lease_duration=lease_duration, + timeout=timeout, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + match_condition=match_condition, + **kwargs + ) return lease @distributed_trace @@ -487,7 +638,7 @@ def get_account_information(self, **kwargs: Any) -> Dict[str, str]: The keys in the returned dictionary include 'sku_name' and 'account_kind'. :returns: A dict of account information (SKU and account type). - :rtype: dict(str, str) + :rtype: Dict[str, str] """ try: return self._client.container.get_account_info(cls=return_response_headers, **kwargs) # type: ignore @@ -495,7 +646,12 @@ def get_account_information(self, **kwargs: Any) -> Dict[str, str]: process_storage_error(error) @distributed_trace - def get_container_properties(self, **kwargs: Any) -> ContainerProperties: + def get_container_properties( + self, *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> ContainerProperties: """Returns all user-defined metadata and system properties for the specified container. The data returned does not include the container's list of blobs. @@ -521,22 +677,21 @@ def get_container_properties(self, **kwargs: Any) -> ContainerProperties: :dedent: 12 :caption: Getting properties on the container. """ - lease = kwargs.pop('lease', None) access_conditions = get_access_conditions(lease) - timeout = kwargs.pop('timeout', None) try: response = self._client.container.get_properties( timeout=timeout, lease_access_conditions=access_conditions, cls=deserialize_container_properties, - **kwargs) + **kwargs + ) except HttpResponseError as error: process_storage_error(error) response.name = self.container_name - return response # type: ignore + return response # type: ignore @distributed_trace - def exists(self, **kwargs: Any) -> bool: + def exists(self, *, timeout: Optional[int] = None, **kwargs: Any) -> bool: """ Returns True if a container exists and returns False otherwise. @@ -550,7 +705,7 @@ def exists(self, **kwargs: Any) -> bool: :rtype: bool """ try: - self._client.container.get_properties(**kwargs) + self._client.container.get_properties(timeout=timeout, **kwargs) return True except HttpResponseError as error: try: @@ -561,8 +716,15 @@ def exists(self, **kwargs: Any) -> bool: @distributed_trace def set_container_metadata( self, metadata: Optional[Dict[str, str]] = None, + *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + timeout: Optional[int] = None, **kwargs: Any - ) -> Dict[str, Union[str, "datetime"]]: + ) -> Dict[str, Union[str, datetime]]: """Sets one or more user-defined name-value pairs for the specified container. Each call to this operation replaces all existing metadata attached to the container. To remove all metadata from the container, @@ -609,20 +771,18 @@ def set_container_metadata( :dedent: 12 :caption: Setting metadata on the container. """ - headers = kwargs.pop('headers', {}) - headers.update(add_metadata_headers(metadata)) - lease = kwargs.pop('lease', None) - access_conditions = get_access_conditions(lease) - mod_conditions = get_modify_conditions(kwargs) - timeout = kwargs.pop('timeout', None) + options = _set_container_metadata_options( + metadata, + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + timeout=timeout, + **kwargs + ) try: - return self._client.container.set_metadata( # type: ignore - timeout=timeout, - lease_access_conditions=access_conditions, - modified_access_conditions=mod_conditions, - cls=return_response_headers, - headers=headers, - **kwargs) + return self._client.container.set_metadata(**options) except HttpResponseError as error: process_storage_error(error) @@ -992,7 +1152,7 @@ def upload_blob( should be supplied for optimal performance. :param metadata: Name-value pairs associated with the blob as metadata. - :type metadata: dict(str, str) + :type metadata: Dict[str, str] :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. If True, upload_blob will overwrite the existing data. If set to False, the operation will fail with ResourceExistsError. The exception to the above is with Append @@ -1478,7 +1638,7 @@ def set_standard_blob_tier_blobs( timeout for subrequest: key: 'timeout', value type: int - :type blobs: str or dict(str, Any) or ~azure.storage.blob.BlobProperties + :type blobs: str or Dict[str, Any] or ~azure.storage.blob.BlobProperties :keyword ~azure.storage.blob.RehydratePriority rehydrate_priority: Indicates the priority with which to rehydrate an archived blob :keyword str if_tags_match_condition: @@ -1547,7 +1707,7 @@ def set_premium_page_blob_tier_blobs( timeout for subrequest: key: 'timeout', value type: int - :type blobs: str or dict(str, Any) or ~azure.storage.blob.BlobProperties + :type blobs: str or Dict[str, Any] or ~azure.storage.blob.BlobProperties :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client_helpers.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client_helpers.py index 82edd48dffb8..dfcb1c272617 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client_helpers.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client_helpers.py @@ -12,12 +12,18 @@ from ._blob_client_helpers import _generic_delete_blob_options from ._generated import AzureBlobStorage from ._models import BlobProperties +from ._serialize import get_access_conditions, get_modify_conditions from ._shared.base_client import parse_query +from ._shared.request_handlers import add_metadata_headers +from ._shared.response_handlers import return_response_headers if TYPE_CHECKING: - from azure.storage.blob import RehydratePriority from urllib.parse import ParseResult - from ._generated.models import LeaseAccessConditions, ModifiedAccessConditions + from ._generated.models import ( + LeaseAccessConditions, + ModifiedAccessConditions, + RehydratePriority + ) from ._models import PremiumPageBlobTier, StandardBlobTier @@ -227,12 +233,13 @@ def _generate_set_tiers_options( rehydrate_priority = kwargs.pop('rehydrate_priority', None) if_tags = kwargs.pop('if_tags_match_condition', None) url_prepend = kwargs.pop('url_prepend', None) - kwargs.update({'raise_on_any_failure': raise_on_any_failure, - 'sas': query_str.replace('?', '&'), - 'timeout': '&timeout=' + str(timeout) if timeout else "", - 'path': container_name, - 'restype': 'restype=container&' - }) + kwargs.update({ + 'raise_on_any_failure': raise_on_any_failure, + 'sas': query_str.replace('?', '&'), + 'timeout': '&timeout=' + str(timeout) if timeout else "", + 'path': container_name, + 'restype': 'restype=container&' + }) reqs = [] for blob in blobs: @@ -264,3 +271,33 @@ def _generate_set_tiers_options( reqs.append(req) return reqs, kwargs + + +def _delete_container_options(**kwargs: Any) -> Dict[str, Any]: + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + + options = { + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + } + options.update({k: v for k, v in kwargs.items() if v is not None}) + return options + + +def _set_container_metadata_options(metadata: Optional[Dict[str, str]], **kwargs: Any) -> Dict[str, Any]: + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + + options = { + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cls': return_response_headers, + 'headers': headers + } + options.update({k: v for k, v in kwargs.items() if v is not None}) + return options diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py index f9e6c3e14db6..1e7d7a651f94 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py @@ -131,8 +131,9 @@ def get_cpk_scope_info(kwargs: Dict[str, Any]) -> Optional[CpkScopeInfo]: return None -def get_container_cpk_scope_info(kwargs: Dict[str, Any]) -> Optional[ContainerCpkScopeInfo]: - encryption_scope = kwargs.pop('container_encryption_scope', None) +def get_container_cpk_scope_info( + encryption_scope: Optional[Union[Dict[str, Any], ContainerEncryptionScope]] +) -> Optional[ContainerCpkScopeInfo]: if encryption_scope: if isinstance(encryption_scope, ContainerEncryptionScope): return ContainerCpkScopeInfo( diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py index 7cb074487f58..21f34fb47119 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py @@ -3,13 +3,14 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=too-many-lines, docstring-keyword-should-match-keyword-only +# pylint: disable=too-many-lines, too-many-locals import warnings from datetime import datetime from functools import partial from typing import ( - Any, AnyStr, AsyncIterable, cast, Dict, IO, Iterable, List, Optional, overload, Tuple, Union, + Any, AnyStr, AsyncIterable, Awaitable, Callable, cast, Dict, + IO, Iterable, List, Literal, Optional, overload, Tuple, Union, TYPE_CHECKING ) from typing_extensions import Self @@ -31,6 +32,7 @@ from .._blob_client import StorageAccountHostsMixin from .._blob_client_helpers import ( _abort_copy_options, + _acquire_lease_options, _append_block_from_url_options, _append_block_options, _clear_page_options, @@ -42,7 +44,9 @@ _download_blob_options, _format_url, _from_blob_url, + _get_blob_properties_options, _get_blob_tags_options, + _get_block_list_options, _get_block_list_result, _get_page_ranges_options, _parse_url, @@ -51,7 +55,9 @@ _set_blob_metadata_options, _set_blob_tags_options, _set_http_headers_options, + _set_premium_page_blob_tier_options, _set_sequence_number_options, + _set_standard_blob_tier_options, _stage_block_from_url_options, _stage_block_options, _start_copy_from_url_options, @@ -68,18 +74,20 @@ ) from .._encryption import StorageEncryptionMixin, _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION from .._generated.aio import AzureBlobStorage -from .._generated.models import CpkInfo from .._models import BlobType, BlobBlock, BlobProperties, PageRange -from .._serialize import get_access_conditions, get_api_version, get_modify_conditions, get_version_id +from .._serialize import get_api_version from .._shared.base_client_async import AsyncStorageAccountHostsMixin, AsyncTransportWrapper, parse_connection_str from .._shared.policies_async import ExponentialRetry from .._shared.response_handlers import process_storage_error, return_response_headers if TYPE_CHECKING: + from azure.core import MatchConditions from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential from azure.core.credentials_async import AsyncTokenCredential from azure.core.pipeline.policies import AsyncHTTPPolicy + from azure.storage.blob import CustomerProvidedEncryptionKey from azure.storage.blob.aio import ContainerClient + from .._generated.models import RehydratePriority from .._models import ( ContentSettings, ImmutabilityPolicy, @@ -156,18 +164,31 @@ class BlobClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin, Storag :caption: Creating the BlobClient from a SAS URL to a blob. """ def __init__( - self, account_url: str, - container_name: str, - blob_name: str, - snapshot: Optional[Union[str, Dict[str, Any]]] = None, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any + self, account_url: str, + container_name: str, + blob_name: str, + snapshot: Optional[Union[str, Dict[str, Any]]] = None, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + *, + api_version: Optional[str] = None, + secondary_hostname: Optional[str] = None, + version_id: Optional[str] = None, + audience: Optional[str] = None, + max_block_size: int = 4 * 1024 * 1024, + max_page_size: int = 4 * 1024 * 1024, + max_chunk_get_size: int = 4 * 1024 * 1024, + max_single_put_size: int = 64 * 1024 * 1024, + max_single_get_size: int = 32 * 1024 * 1024, + min_large_block_upload_threshold: int = 4 * 1024 * 1024 + 1, + use_byte_buffer: Optional[bool] = None, + **kwargs: Any ) -> None: kwargs['retry_policy'] = kwargs.get('retry_policy') or ExponentialRetry(**kwargs) parsed_url, sas_token, path_snapshot = _parse_url( account_url=account_url, container_name=container_name, - blob_name=blob_name) + blob_name=blob_name + ) self.container_name = container_name self.blob_name = blob_name @@ -177,14 +198,28 @@ def __init__( self.snapshot = snapshot['snapshot'] else: self.snapshot = snapshot or path_snapshot - self.version_id = kwargs.pop('version_id', None) + self.version_id = version_id # This parameter is used for the hierarchy traversal. Give precedence to credential. self._raw_credential = credential if credential else sas_token self._query_str, credential = self._format_query_string(sas_token, credential, snapshot=self.snapshot) - super(BlobClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) + super(BlobClient, self).__init__( + parsed_url, + service='blob', + credential=credential, + secondary_hostname=secondary_hostname, + audience=audience, + max_block_size=max_block_size, + max_page_size=max_page_size, + max_chunk_get_size=max_chunk_get_size, + max_single_put_size=max_single_put_size, + max_single_get_size=max_single_get_size, + min_large_block_upload_threshold=min_large_block_upload_threshold, + use_byte_buffer=use_byte_buffer, + **kwargs + ) self._client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) - self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._client._config.version = get_api_version(api_version) # type: ignore [assignment] self._configure_encryption(kwargs) def _format_url(self, hostname: str) -> str: @@ -201,6 +236,18 @@ def from_blob_url( cls, blob_url: str, credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long snapshot: Optional[Union[str, Dict[str, Any]]] = None, + *, + api_version: Optional[str] = None, + secondary_hostname: Optional[str] = None, + version_id: Optional[str] = None, + audience: Optional[str] = None, + max_block_size: int = 4 * 1024 * 1024, + max_page_size: int = 4 * 1024 * 1024, + max_chunk_get_size: int = 4 * 1024 * 1024, + max_single_put_size: int = 64 * 1024 * 1024, + max_single_get_size: int = 32 * 1024 * 1024, + min_large_block_upload_threshold: int = 4 * 1024 * 1024 + 1, + use_byte_buffer: Optional[bool] = None, **kwargs: Any ) -> Self: """Create BlobClient from a blob url. This doesn't support customized blob url with '/' in blob name. @@ -224,18 +271,55 @@ def from_blob_url( The optional blob snapshot on which to operate. This can be the snapshot ID string or the response returned from :func:`create_snapshot`. If specified, this will override the snapshot in the url. + :keyword str api_version: + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. + + .. versionadded:: 12.2.0 + + :keyword str secondary_hostname: + The hostname of the secondary endpoint. :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. :keyword str audience: The audience to use when requesting tokens for Azure Active Directory authentication. Only has an effect when credential is of type TokenCredential. The value could be https://storage.azure.com/ (default) or https://.blob.core.windows.net. + :keyword int max_block_size: The maximum chunk size for uploading a block blob in chunks. + Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_put_size: + If the blob size is less than or equal max_single_put_size, then the blob will be + uploaded with only one http PUT request. If the blob size is larger than max_single_put_size, + the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB. + :keyword int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient + algorithm when uploading a block blob. Defaults to 4*1024*1024+1. + :keyword bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False. + :keyword int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_get_size: The maximum size for a blob to be downloaded in a single call, + the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. + :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. + Defaults to 4*1024*1024, or 4MB. :returns: A Blob client. :rtype: ~azure.storage.blob.BlobClient """ account_url, container_name, blob_name, path_snapshot = _from_blob_url(blob_url=blob_url, snapshot=snapshot) return cls( - account_url, container_name=container_name, blob_name=blob_name, - snapshot=path_snapshot, credential=credential, **kwargs + account_url, + container_name=container_name, + blob_name=blob_name, + snapshot=path_snapshot, + credential=credential, + version_id=version_id, + audience=audience, + api_version=api_version, + secondary_hostname=secondary_hostname, + max_block_size=max_block_size, + max_page_size=max_page_size, + max_chunk_get_size=max_chunk_get_size, + max_single_put_size=max_single_put_size, + max_single_get_size=max_single_get_size, + min_large_block_upload_threshold=min_large_block_upload_threshold, + use_byte_buffer=use_byte_buffer, + **kwargs ) @classmethod @@ -245,6 +329,18 @@ def from_connection_string( blob_name: str, snapshot: Optional[Union[str, Dict[str, Any]]] = None, credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + *, + api_version: Optional[str] = None, + secondary_hostname: Optional[str] = None, + version_id: Optional[str] = None, + audience: Optional[str] = None, + max_block_size: int = 4 * 1024 * 1024, + max_page_size: int = 4 * 1024 * 1024, + max_chunk_get_size: int = 4 * 1024 * 1024, + max_single_put_size: int = 64 * 1024 * 1024, + max_single_get_size: int = 32 * 1024 * 1024, + min_large_block_upload_threshold: int = 4 * 1024 * 1024 + 1, + use_byte_buffer: Optional[bool] = None, **kwargs: Any ) -> Self: """Create BlobClient from a Connection String. @@ -268,11 +364,33 @@ def from_connection_string( If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" should be the storage account key. :type credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] # pylint: disable=line-too-long + :keyword str api_version: + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. + + .. versionadded:: 12.2.0 + + :keyword str secondary_hostname: + The hostname of the secondary endpoint. :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. :keyword str audience: The audience to use when requesting tokens for Azure Active Directory authentication. Only has an effect when credential is of type TokenCredential. The value could be https://storage.azure.com/ (default) or https://.blob.core.windows.net. + :keyword int max_block_size: The maximum chunk size for uploading a block blob in chunks. + Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_put_size: + If the blob size is less than or equal max_single_put_size, then the blob will be + uploaded with only one http PUT request. If the blob size is larger than max_single_put_size, + the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB. + :keyword int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient + algorithm when uploading a block blob. Defaults to 4*1024*1024+1. + :keyword bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False. + :keyword int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_get_size: The maximum size for a blob to be downloaded in a single call, + the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. + :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. + Defaults to 4*1024*1024, or 4MB. :returns: A Blob client. :rtype: ~azure.storage.blob.BlobClient @@ -286,11 +404,24 @@ def from_connection_string( :caption: Creating the BlobClient from a connection string. """ account_url, secondary, credential = parse_connection_str(conn_str, credential, 'blob') - if 'secondary_hostname' not in kwargs: - kwargs['secondary_hostname'] = secondary return cls( - account_url, container_name=container_name, blob_name=blob_name, - snapshot=snapshot, credential=credential, **kwargs + account_url, + container_name=container_name, + blob_name=blob_name, + snapshot=snapshot, + credential=credential, + version_id=version_id, + audience=audience, + api_version=api_version, + secondary_hostname=secondary or secondary_hostname, + max_block_size=max_block_size, + max_page_size=max_page_size, + max_chunk_get_size=max_chunk_get_size, + max_single_put_size=max_single_put_size, + max_single_get_size=max_single_get_size, + min_large_block_upload_threshold=min_large_block_upload_threshold, + use_byte_buffer=use_byte_buffer, + **kwargs ) @distributed_trace_async @@ -301,11 +432,13 @@ async def get_account_information(self, **kwargs: Any) -> Dict[str, str]: The keys in the returned dictionary include 'sku_name' and 'account_kind'. :returns: A dict of account information (SKU and account type). - :rtype: dict(str, str) + :rtype: Dict[str, str] """ try: - return cast(Dict[str, str], - await self._client.blob.get_account_info(cls=return_response_headers, **kwargs)) + return cast( + Dict[str, str], + await self._client.blob.get_account_info(cls=return_response_headers, **kwargs) + ) except HttpResponseError as error: process_storage_error(error) @@ -314,6 +447,25 @@ async def upload_blob_from_url( self, source_url: str, *, metadata: Optional[Dict[str, str]] = None, + overwrite: Optional[bool] = None, + include_source_blob_properties: bool = True, + tags: Optional[Dict[str, str]] = None, + source_content_md5: Optional[bytearray] = None, + source_if_modified_since: Optional[datetime] = None, + source_if_unmodified_since: Optional[datetime] = None, + source_etag: Optional[str] = None, + source_match_condition: Optional["MatchConditions"] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + destination_lease: Optional[Union[BlobLeaseClient, str]] = None, + timeout: Optional[int] = None, + content_settings: Optional["ContentSettings"] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + encryption_scope: Optional[str] = None, + standard_blob_tier: Optional["StandardBlobTier"] = None, + source_authorization: Optional[str] = None, **kwargs: Any ) -> Dict[str, Any]: """ @@ -332,7 +484,7 @@ async def upload_blob_from_url( https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= https://otheraccount.blob.core.windows.net/mycontainer/myblob?sastoken - :keyword dict(str, str) metadata: + :keyword Dict[str, str] metadata: Name-value pairs associated with the blob as metadata. :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. If True, upload_blob will overwrite the existing data. If set to False, the @@ -345,7 +497,7 @@ async def upload_blob_from_url( and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) - :paramtype tags: dict(str, str) + :paramtype tags: Dict[str, str] :keyword bytearray source_content_md5: Specify the md5 that is used to verify the integrity of the source bytes. :keyword ~datetime.datetime source_if_modified_since: @@ -415,12 +567,32 @@ async def upload_blob_from_url( :returns: Response from creating a new block blob for a given URL. :rtype: Dict[str, Any] """ - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _upload_blob_from_url_options( source_url=source_url, metadata=metadata, - **kwargs) + overwrite=overwrite, + include_source_blob_properties=include_source_blob_properties, + tags=tags, + source_content_md5=source_content_md5, + source_if_modified_since=source_if_modified_since, + source_if_unmodified_since=source_if_unmodified_since, + source_etag=source_etag, + source_match_condition=source_match_condition, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + destination_lease=destination_lease, + timeout=timeout, + content_settings=content_settings, + cpk=cpk, + encryption_scope=encryption_scope, + standard_blob_tier=standard_blob_tier, + source_authorization=source_authorization, + **kwargs + ) try: return cast(Dict[str, Any], await self._client.block_blob.put_blob_from_url(**options)) except HttpResponseError as error: @@ -432,6 +604,27 @@ async def upload_blob( blob_type: Union[str, BlobType] = BlobType.BLOCKBLOB, length: Optional[int] = None, metadata: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + overwrite: bool = False, + content_settings: Optional["ContentSettings"] = None, + validate_content: bool = False, + lease: Optional[BlobLeaseClient] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_conditions: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + premium_page_blob_tier: Optional["PremiumPageBlobTier"] = None, + immutability_policy: Optional["ImmutabilityPolicy"] = None, + legal_hold: Optional[bool] = None, + standard_blob_tier: Optional["StandardBlobTier"] = None, + maxsize_condition: Optional[int] = None, + max_concurrency: int = 1, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + encryption_scope: Optional[str] = None, + encoding: str = 'UTF-8', + progress_hook: Optional[Callable[[int, Optional[int]], Awaitable[None]]] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Any]: """Creates a new blob from a data source with automatic chunking. @@ -445,7 +638,7 @@ async def upload_blob( should be supplied for optimal performance. :param metadata: Name-value pairs associated with the blob as metadata. - :type metadata: dict(str, str) + :type metadata: Dict[str, str] :keyword tags: Name-value pairs associated with the blob as tag. Tags are case-sensitive. The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, @@ -455,7 +648,7 @@ async def upload_blob( .. versionadded:: 12.4.0 - :paramtype tags: dict(str, str) + :paramtype tags: Dict[str, str] :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. If True, upload_blob will overwrite the existing data. If set to False, the operation will fail with ResourceExistsError. The exception to the above is with Append @@ -563,7 +756,7 @@ async def upload_blob( multiple calls to the Azure service and the timeout will apply to each call individually. :returns: Blob-updated property dict (Etag and last modified) - :rtype: dict[str, Any] + :rtype: Dict[str, Any] .. admonition:: Example: @@ -576,13 +769,34 @@ async def upload_blob( """ if self.require_encryption and not self.key_encryption_key: raise ValueError("Encryption required but no key was provided.") - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _upload_blob_options( data=data, blob_type=blob_type, length=length, metadata=metadata, + tags=tags, + overwrite=overwrite, + content_settings=content_settings, + validate_content=validate_content, + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_conditions=match_conditions, + if_tags_match_condition=if_tags_match_condition, + premium_page_blob_tier=premium_page_blob_tier, + immutability_policy=immutability_policy, + legal_hold=legal_hold, + standard_blob_tier=standard_blob_tier, + maxsize_condition=maxsize_condition, + max_concurrency=max_concurrency, + cpk=cpk, + encryption_scope=encryption_scope, + encoding=encoding, + progress_hook=progress_hook, + timeout=timeout, encryption_options={ 'required': self.require_encryption, 'version': self.encryption_version, @@ -592,7 +806,8 @@ async def upload_blob( config=self._config, sdk_moniker=self._sdk_moniker, client=self._client, - **kwargs) + **kwargs + ) if blob_type == BlobType.BlockBlob: return cast(Dict[str, Any], await upload_block_blob(**options)) if blob_type == BlobType.PageBlob: @@ -604,7 +819,19 @@ async def download_blob( self, offset: Optional[int] = None, length: Optional[int] = None, *, + version_id: Optional[str] = None, + validate_content: bool = False, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + max_concurrency: int = 1, encoding: str, + progress_hook: Optional[Callable[[int, int], Awaitable[None]]] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> StorageStreamDownloader[str]: ... @@ -614,7 +841,19 @@ async def download_blob( self, offset: Optional[int] = None, length: Optional[int] = None, *, + version_id: Optional[str] = None, + validate_content: bool = False, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + max_concurrency: int = 1, encoding: None = None, + progress_hook: Optional[Callable[[int, int], Awaitable[None]]] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> StorageStreamDownloader[bytes]: ... @@ -624,7 +863,19 @@ async def download_blob( self, offset: Optional[int] = None, length: Optional[int] = None, *, - encoding: Union[str, None] = None, + version_id: Optional[str] = None, + validate_content: bool = False, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + max_concurrency: int = 1, + encoding: Optional[str] = None, + progress_hook: Optional[Callable[[int, int], Awaitable[None]]] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Union[StorageStreamDownloader[str], StorageStreamDownloader[bytes]]: """Downloads a blob to the StorageStreamDownloader. The readall() method must @@ -728,10 +979,21 @@ async def download_blob( options = _download_blob_options( blob_name=self.blob_name, container_name=self.container_name, - version_id=get_version_id(self.version_id, kwargs), + version_id=version_id or self.version_id, offset=offset, length=length, encoding=encoding, + validate_content=validate_content, + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + cpk=cpk, + max_concurrency=max_concurrency, + progress_hook=progress_hook, + timeout=timeout, encryption_options={ 'required': self.require_encryption, 'version': self.encryption_version, @@ -741,13 +1003,26 @@ async def download_blob( config=self._config, sdk_moniker=self._sdk_moniker, client=self._client, - **kwargs) + **kwargs + ) downloader = StorageStreamDownloader(**options) await downloader._setup() # pylint: disable=protected-access return downloader @distributed_trace_async - async def delete_blob(self, delete_snapshots: Optional[str] = None, **kwargs: Any) -> None: + async def delete_blob( + self, delete_snapshots: Optional[str] = None, + *, + version_id: Optional[str] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: """Marks the specified blob for deletion. The blob is later deleted during garbage collection. @@ -820,16 +1095,24 @@ async def delete_blob(self, delete_snapshots: Optional[str] = None, **kwargs: An """ options = _delete_blob_options( snapshot=self.snapshot, - version_id=get_version_id(self.version_id, kwargs), + version_id=version_id or self.version_id, delete_snapshots=delete_snapshots, - **kwargs) + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + timeout=timeout, + **kwargs + ) try: await self._client.blob.delete(**options) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def undelete_blob(self, **kwargs: Any) -> None: + async def undelete_blob(self, *, timeout: Optional[int] = None, **kwargs: Any) -> None: """Restores soft-deleted blobs or snapshots. Operation will only be successful if used within the specified number of days @@ -857,12 +1140,12 @@ async def undelete_blob(self, **kwargs: Any) -> None: :caption: Undeleting a blob. """ try: - await self._client.blob.undelete(timeout=kwargs.pop('timeout', None), **kwargs) + await self._client.blob.undelete(timeout=timeout, **kwargs) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def exists(self, **kwargs: Any) -> bool: + async def exists(self, *, version_id: Optional[str] = None, timeout: Optional[int] = None, **kwargs: Any) -> bool: """ Returns True if a blob exists with the defined parameters, and returns False otherwise. @@ -879,12 +1162,13 @@ async def exists(self, **kwargs: Any) -> bool: :returns: boolean :rtype: bool """ - version_id = get_version_id(self.version_id, kwargs) try: await self._client.blob.get_properties( snapshot=self.snapshot, - version_id=version_id, - **kwargs) + version_id=version_id or self.version_id, + timeout=timeout, + **kwargs + ) return True # Encrypted with CPK except ResourceExistsError: @@ -896,7 +1180,19 @@ async def exists(self, **kwargs: Any) -> bool: return False @distributed_trace_async - async def get_blob_properties(self, **kwargs: Any) -> BlobProperties: + async def get_blob_properties( + self, *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + version_id: Optional[str] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> BlobProperties: """Returns all user-defined metadata, standard HTTP properties, and system properties for the blob. It does not return the content of the blob. @@ -958,29 +1254,29 @@ async def get_blob_properties(self, **kwargs: Any) -> BlobProperties: :dedent: 12 :caption: Getting the properties for a blob. """ - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - version_id = get_version_id(self.version_id, kwargs) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) + if cpk and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _get_blob_properties_options( + lease=lease, + version_id=version_id or self.version_id, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + cpk=cpk, + snapshot=self.snapshot, + timeout=timeout, + **kwargs + ) try: - cls_method = kwargs.pop('cls', None) + cls_method = options.pop('cls', None) if cls_method: - kwargs['cls'] = partial(deserialize_pipeline_response_into_cls, cls_method) + options['cls'] = partial(deserialize_pipeline_response_into_cls, cls_method) blob_props = await self._client.blob.get_properties( - timeout=kwargs.pop('timeout', None), - version_id=version_id, - snapshot=self.snapshot, - lease_access_conditions=access_conditions, - modified_access_conditions=mod_conditions, - cls=kwargs.pop('cls', None) or deserialize_blob_properties, - cpk_info=cpk_info, - **kwargs) + cls=options.pop('cls', None) or deserialize_blob_properties, + **options + ) except HttpResponseError as error: process_storage_error(error) blob_props.name = self.blob_name @@ -992,6 +1288,14 @@ async def get_blob_properties(self, **kwargs: Any) -> BlobProperties: @distributed_trace_async async def set_http_headers( self, content_settings: Optional["ContentSettings"] = None, + *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Any]: """Sets system properties on the blob. @@ -1037,7 +1341,17 @@ async def set_http_headers( :returns: Blob-updated property dict (Etag and last modified) :rtype: Dict[str, Any] """ - options = _set_http_headers_options(content_settings=content_settings, **kwargs) + options = _set_http_headers_options( + content_settings=content_settings, + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + timeout=timeout, + **kwargs + ) try: return cast(Dict[str, Any], await self._client.blob.set_http_headers(**options)) except HttpResponseError as error: @@ -1046,6 +1360,16 @@ async def set_http_headers( @distributed_trace_async async def set_blob_metadata( self, metadata: Optional[Dict[str, str]] = None, + *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + encryption_scope: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, datetime]]: """Sets user-defined metadata for the blob as one or more name-value pairs. @@ -1054,7 +1378,7 @@ async def set_blob_metadata( Dict containing name and value pairs. Each call to this operation replaces all existing metadata attached to the blob. To remove all metadata from the blob, call this operation with no metadata headers. - :type metadata: dict(str, str) + :type metadata: Dict[str, str] :keyword lease: Required if the blob has an active lease. Value can be a BlobLeaseClient object or the lease ID as a string. @@ -1104,9 +1428,21 @@ async def set_blob_metadata( :returns: Blob-updated property dict (Etag and last modified) :rtype: Dict[str, Union[str, datetime]] """ - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _set_blob_metadata_options(metadata=metadata, **kwargs) + options = _set_blob_metadata_options( + metadata=metadata, + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + cpk=cpk, + encryption_scope=encryption_scope, + timeout=timeout, + **kwargs + ) try: return cast(Dict[str, Union[str, datetime]], await self._client.blob.set_metadata(**options)) except HttpResponseError as error: @@ -1115,6 +1451,9 @@ async def set_blob_metadata( @distributed_trace_async async def set_immutability_policy( self, immutability_policy: "ImmutabilityPolicy", + *, + version_id: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, str]: """The Set Immutability Policy operation sets the immutability policy on the blob. @@ -1140,15 +1479,22 @@ async def set_immutability_policy( :returns: Key value pairs of blob tags. :rtype: Dict[str, str] """ - - version_id = get_version_id(self.version_id, kwargs) kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time kwargs['immutability_policy_mode'] = immutability_policy.policy_mode return cast(Dict[str, str], await self._client.blob.set_immutability_policy( - cls=return_response_headers,version_id=version_id, **kwargs)) + cls=return_response_headers, + version_id=version_id or self.version_id, + timeout=timeout, + **kwargs + )) @distributed_trace_async - async def delete_immutability_policy(self, **kwargs: Any) -> None: + async def delete_immutability_policy( + self, *, + version_id: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: """The Delete Immutability Policy operation deletes the immutability policy on the blob. .. versionadded:: 12.10.0 @@ -1166,12 +1512,20 @@ async def delete_immutability_policy(self, **kwargs: Any) -> None: :returns: Key value pairs of blob tags. :rtype: Dict[str, str] """ - - version_id = get_version_id(self.version_id, kwargs) - await self._client.blob.delete_immutability_policy(version_id=version_id, **kwargs) + await self._client.blob.delete_immutability_policy( + version_id=version_id or self.version_id, + timeout=timeout, + **kwargs + ) @distributed_trace_async - async def set_legal_hold(self, legal_hold: bool, **kwargs: Any) -> Dict[str, Union[str, datetime, bool]]: + async def set_legal_hold( + self, legal_hold: bool, + *, + version_id: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime, bool]]: """The Set Legal Hold operation sets a legal hold on the blob. .. versionadded:: 12.10.0 @@ -1191,10 +1545,13 @@ async def set_legal_hold(self, legal_hold: bool, **kwargs: Any) -> Dict[str, Uni :returns: Key value pairs of blob tags. :rtype: Dict[str, Union[str, datetime, bool]] """ - - version_id = get_version_id(self.version_id, kwargs) return cast(Dict[str, Union[str, datetime, bool]], await self._client.blob.set_legal_hold( - legal_hold, version_id=version_id, cls=return_response_headers, **kwargs)) + legal_hold, + version_id=version_id or self.version_id, + timeout=timeout, + cls=return_response_headers, + **kwargs + )) @distributed_trace_async async def create_page_blob( @@ -1202,6 +1559,19 @@ async def create_page_blob( content_settings: Optional["ContentSettings"] = None, metadata: Optional[Dict[str, str]] = None, premium_page_blob_tier: Optional[Union[str, "PremiumPageBlobTier"]] = None, + *, + tags: Optional[Dict[str, str]] = None, + sequence_number: Optional[int] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + immutability_policy: Optional["ImmutabilityPolicy"] = None, + legal_hold: Optional[bool] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + encryption_scope: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, datetime]]: """Creates a new Page Blob of the specified size. @@ -1214,7 +1584,7 @@ async def create_page_blob( language, disposition, md5, and cache control. :param metadata: Name-value pairs associated with the blob as metadata. - :type metadata: dict(str, str) + :type metadata: Dict[str, str] :param ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: A page blob tier value to set the blob to. The tier correlates to the size of the blob and number of allowed IOPS. This is only applicable to page blobs on @@ -1228,7 +1598,7 @@ async def create_page_blob( .. versionadded:: 12.4.0 - :paramtype tags: dict(str, str) + :paramtype tags: Dict[str, str] :keyword int sequence_number: Only for Page blobs. The sequence number is a user-controlled value that you can use to track requests. The value of the sequence number must be between 0 @@ -1286,18 +1656,31 @@ async def create_page_blob( see `here `__. :returns: Blob-updated property dict (Etag and last modified). - :rtype: dict[str, Any] + :rtype: Dict[str, Any] """ if self.require_encryption or (self.key_encryption_key is not None): raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _create_page_blob_options( size=size, content_settings=content_settings, metadata=metadata, premium_page_blob_tier=premium_page_blob_tier, - **kwargs) + tags=tags, + sequence_number=sequence_number, + lease=lease, + immutability_policy=immutability_policy, + legal_hold=legal_hold, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + cpk=cpk, + encryption_scope=encryption_scope, + timeout=timeout, + **kwargs + ) try: return cast(Dict[str, Any], await self._client.page_blob.create(**options)) except HttpResponseError as error: @@ -1307,6 +1690,18 @@ async def create_page_blob( async def create_append_blob( self, content_settings: Optional["ContentSettings"] = None, metadata: Optional[Dict[str, str]] = None, + *, + tags: Optional[Dict[str, str]] = None, + immutability_policy: Optional["ImmutabilityPolicy"] = None, + legal_hold: Optional[bool] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + encryption_scope: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, datetime]]: """Creates a new Append Blob. This operation creates a new 0-length append blob. The content @@ -1318,7 +1713,7 @@ async def create_append_blob( language, disposition, md5, and cache control. :param metadata: Name-value pairs associated with the blob as metadata. - :type metadata: dict(str, str) + :type metadata: Dict[str, str] :keyword tags: Name-value pairs associated with the blob as tag. Tags are case-sensitive. The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, @@ -1328,7 +1723,7 @@ async def create_append_blob( .. versionadded:: 12.4.0 - :paramtype tags: dict(str, str) + :paramtype tags: Dict[str, str] :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: Specifies the immutability policy of a blob, blob snapshot or blob version. @@ -1382,16 +1777,28 @@ async def create_append_blob( see `here `__. :returns: Blob-updated property dict (Etag and last modified). - :rtype: dict[str, Any] + :rtype: Dict[str, Any] """ if self.require_encryption or (self.key_encryption_key is not None): raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _create_append_blob_options( content_settings=content_settings, metadata=metadata, - **kwargs) + tags=tags, + immutability_policy=immutability_policy, + legal_hold=legal_hold, + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + cpk=cpk, + encryption_scope=encryption_scope, + timeout=timeout, + **kwargs + ) try: return cast(Dict[str, Union[str, datetime]], await self._client.append_blob.create(**options)) except HttpResponseError as error: @@ -1400,6 +1807,16 @@ async def create_append_blob( @distributed_trace_async async def create_snapshot( self, metadata: Optional[Dict[str, str]] = None, + *, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + encryption_scope: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, datetime]]: """Creates a snapshot of the blob. @@ -1414,7 +1831,7 @@ async def create_snapshot( :param metadata: Name-value pairs associated with the blob as metadata. - :type metadata: dict(str, str) + :type metadata: Dict[str, str] :keyword ~datetime.datetime if_modified_since: A DateTime value. Azure expects the date value passed in to be UTC. If timezone is included, any non-UTC datetimes will be converted to UTC. @@ -1462,7 +1879,7 @@ async def create_snapshot( see `here `__. :returns: Blob-updated property dict (Snapshot ID, Etag, and last modified). - :rtype: dict[str, Any] + :rtype: Dict[str, Any] .. admonition:: Example: @@ -1473,9 +1890,21 @@ async def create_snapshot( :dedent: 12 :caption: Create a snapshot of the blob. """ - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _create_snapshot_options(metadata=metadata, **kwargs) + options = _create_snapshot_options( + metadata=metadata, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + lease=lease, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + cpk=cpk, + encryption_scope=encryption_scope, + timeout=timeout, + **kwargs + ) try: return cast(Dict[str, Any], await self._client.blob.create_snapshot(**options)) except HttpResponseError as error: @@ -1486,6 +1915,29 @@ async def start_copy_from_url( self, source_url: str, metadata: Optional[Dict[str, str]] = None, incremental_copy: bool = False, + *, + tags: Optional[Union[Dict[str, str], Literal["COPY"]]] = None, + immutability_policy: Optional["ImmutabilityPolicy"] = None, + legal_hold: Optional[bool] = None, + source_if_modified_since: Optional[datetime] = None, + source_if_unmodified_since: Optional[datetime] = None, + source_etag: Optional[str] = None, + source_match_condition: Optional["MatchConditions"] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + destination_lease: Optional[Union[BlobLeaseClient, str]] = None, + source_lease: Optional[Union[BlobLeaseClient, str]] = None, + premium_page_blob_tier: Optional["PremiumPageBlobTier"] = None, + standard_blob_tier: Optional["StandardBlobTier"] = None, + rehydrate_priority: Optional["RehydratePriority"] = None, + seal_destination_blob: Optional[bool] = None, + requires_sync: Optional[bool] = None, + source_authorization: Optional[str] = None, + encryption_scope: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, datetime]]: """Copies a blob from the given URL. @@ -1536,7 +1988,7 @@ async def start_copy_from_url( source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. - :type metadata: dict(str, str) + :type metadata: Dict[str, str] :param bool incremental_copy: Copies the snapshot of the source page blob to a destination page blob. The snapshot is copied such that only the differential changes between @@ -1555,7 +2007,7 @@ async def start_copy_from_url( .. versionadded:: 12.4.0 - :paramtype tags: dict(str, str) or Literal["COPY"] + :paramtype tags: Dict[str, str] or Literal["COPY"] :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: Specifies the immutability policy of a blob, blob snapshot or blob version. @@ -1621,12 +2073,6 @@ async def start_copy_from_url( Specify this to perform the Copy Blob operation only if the lease ID given matches the active lease ID of the source blob. :paramtype source_lease: ~azure.storage.blob.aio.BlobLeaseClient or str - :keyword int timeout: - Sets the server-side timeout for the operation in seconds. For more details see - https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. - This value is not tracked or validated on the client. To configure client-side network timesouts - see `here `__. :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: A page blob tier value to set the blob to. The tier correlates to the size of the blob and number of allowed IOPS. This is only applicable to page blobs on @@ -1658,6 +2104,12 @@ async def start_copy_from_url( .. versionadded:: 12.10.0 + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: A dictionary of copy properties (etag, last_modified, copy_id, copy_status). :rtype: dict[str, Union[str, ~datetime.datetime]] @@ -1674,7 +2126,30 @@ async def start_copy_from_url( source_url=source_url, metadata=metadata, incremental_copy=incremental_copy, - **kwargs) + tags=tags, + immutability_policy=immutability_policy, + legal_hold=legal_hold, + source_if_modified_since=source_if_modified_since, + source_if_unmodified_since=source_if_unmodified_since, + source_etag=source_etag, + source_match_condition=source_match_condition, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + destination_lease=destination_lease, + source_lease=source_lease, + premium_page_blob_tier=premium_page_blob_tier, + standard_blob_tier=standard_blob_tier, + rehydrate_priority=rehydrate_priority, + seal_destination_blob=seal_destination_blob, + requires_sync=requires_sync, + source_authorization=source_authorization, + encryption_scope=encryption_scope, + timeout=timeout, + **kwargs + ) try: if incremental_copy: return cast(Dict[str, Union[str, datetime]], await self._client.page_blob.copy_incremental(**options)) @@ -1715,8 +2190,15 @@ async def abort_copy( @distributed_trace_async async def acquire_lease( - self, lease_duration: int =-1, + self, lease_duration: int = -1, lease_id: Optional[str] = None, + *, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> BlobLeaseClient: """Requests a new lease. @@ -1775,11 +2257,30 @@ async def acquire_lease( :caption: Acquiring a lease on a blob. """ lease = BlobLeaseClient(self, lease_id=lease_id) - await lease.acquire(lease_duration=lease_duration, **kwargs) + options = _acquire_lease_options( + lease_duration=lease_duration, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + timeout=timeout, + **kwargs + ) + await lease.acquire(**options) return lease @distributed_trace_async - async def set_standard_blob_tier(self, standard_blob_tier: Union[str, "StandardBlobTier"], **kwargs: Any) -> None: + async def set_standard_blob_tier( + self, standard_blob_tier: Union[str, "StandardBlobTier"], + *, + rehydrate_priority: Optional["RehydratePriority"] = None, + version_id: Optional[str] = None, + if_tags_match_condition: Optional[str] = None, + lease: Optional[Union["BlobLeaseClient", str]] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: """This operation sets the tier on a block blob. A block blob's tier determines Hot/Cool/Archive storage type. @@ -1795,37 +2296,43 @@ async def set_standard_blob_tier(self, standard_blob_tier: Union[str, "StandardB :type standard_blob_tier: str or ~azure.storage.blob.StandardBlobTier :keyword ~azure.storage.blob.RehydratePriority rehydrate_priority: Indicates the priority with which to rehydrate an archived blob + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to download. + + .. versionadded:: 12.4.0 + + This keyword argument was introduced in API version '2019-12-12'. :keyword str if_tags_match_condition: Specify a SQL where clause on blob tags to operate only on blob with a matching value. eg. ``\"\\\"tagname\\\"='my tag'\"`` .. versionadded:: 12.4.0 + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. This value is not tracked or validated on the client. To configure client-side network timesouts see `here `__. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str :rtype: None """ - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - version_id = get_version_id(self.version_id, kwargs) - if standard_blob_tier is None: - raise ValueError("A StandardBlobTier must be specified") + options = _set_standard_blob_tier_options( + version_id or self.version_id, + self.snapshot, + standard_blob_tier=standard_blob_tier, + timeout=timeout, + lease=lease, + rehydrate_priority=rehydrate_priority, + if_tags_match_condition=if_tags_match_condition, + **kwargs + ) try: - await self._client.blob.set_tier( - tier=standard_blob_tier, - timeout=kwargs.pop('timeout', None), - modified_access_conditions=mod_conditions, - lease_access_conditions=access_conditions, - version_id=version_id, - **kwargs) + await self._client.blob.set_tier(**options) except HttpResponseError as error: process_storage_error(error) @@ -1834,6 +2341,13 @@ async def stage_block( self, block_id: str, data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], length: Optional[int] = None, + *, + validate_content: Optional[bool] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + encoding: Optional[str] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + encryption_scope: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Any]: """Creates a new block to be committed as part of a blob. @@ -1883,13 +2397,20 @@ async def stage_block( """ if self.require_encryption or (self.key_encryption_key is not None): raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _stage_block_options( block_id=block_id, data=data, length=length, - **kwargs) + validate_content=validate_content, + lease=lease, + encoding=encoding, + cpk=cpk, + encryption_scope=encryption_scope, + timeout=timeout, + **kwargs + ) try: return cast(Dict[str, Any], await self._client.block_blob.stage_block(**options)) except HttpResponseError as error: @@ -1902,6 +2423,12 @@ async def stage_block_from_url( source_offset: Optional[int] = None, source_length: Optional[int] = None, source_content_md5: Optional[Union[bytes, bytearray]] = None, + *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + encryption_scope: Optional[str] = None, + source_authorization: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Any]: """Creates a new block to be committed as part of a blob where @@ -1935,15 +2462,15 @@ async def stage_block_from_url( .. versionadded:: 12.2.0 + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. This value is not tracked or validated on the client. To configure client-side network timesouts see `here `__. - :keyword str source_authorization: - Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is - the prefix of the source_authorization string. :returns: Blob property dict. :rtype: Dict[str, Any] """ @@ -1955,7 +2482,13 @@ async def stage_block_from_url( source_offset=source_offset, source_length=source_length, source_content_md5=source_content_md5, - **kwargs) + lease=lease, + cpk=cpk, + encryption_scope=encryption_scope, + source_authorization=source_authorization, + timeout=timeout, + **kwargs + ) try: return cast(Dict[str, Any], await self._client.block_blob.stage_block_from_url(**options)) except HttpResponseError as error: @@ -1964,6 +2497,10 @@ async def stage_block_from_url( @distributed_trace_async async def get_block_list( self, block_list_type: str = "committed", + *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_tags_match_condition: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Tuple[List[BlobBlock], List[BlobBlock]]: """The Get Block List operation retrieves the list of blocks that have @@ -1992,16 +2529,16 @@ async def get_block_list( :returns: A tuple of two lists - committed and uncommitted blocks :rtype: Tuple[List[BlobBlock], List[BlobBlock]] """ - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) + options = _get_block_list_options( + block_list_type=block_list_type, + snapshot=self.snapshot, + lease=lease, + if_tags_match_condition=if_tags_match_condition, + timeout=timeout, + **kwargs + ) try: - blocks = await self._client.block_blob.get_block_list( - list_type=block_list_type, - snapshot=self.snapshot, - timeout=kwargs.pop('timeout', None), - lease_access_conditions=access_conditions, - modified_access_conditions=mod_conditions, - **kwargs) + blocks = await self._client.block_blob.get_block_list(**options) except HttpResponseError as error: process_storage_error(error) return _get_block_list_result(blocks) @@ -2011,6 +2548,21 @@ async def commit_block_list( self, block_list: List[BlobBlock], content_settings: Optional["ContentSettings"] = None, metadata: Optional[Dict[str, str]] = None, + *, + tags: Optional[Dict[str, str]] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + immutability_policy: Optional["ImmutabilityPolicy"] = None, + legal_hold: Optional[bool] = None, + validate_content: Optional[bool] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + standard_blob_tier: Optional["StandardBlobTier"] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + encryption_scope: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, datetime]]: """The Commit Block List operation writes a blob by specifying the list of @@ -2023,7 +2575,7 @@ async def commit_block_list( language, disposition, md5, and cache control. :param metadata: Name-value pairs associated with the blob as metadata. - :type metadata: dict[str, str] + :type metadata: Dict[str, str] :keyword tags: Name-value pairs associated with the blob as tag. Tags are case-sensitive. The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, @@ -2033,7 +2585,7 @@ async def commit_block_list( .. versionadded:: 12.4.0 - :paramtype tags: dict(str, str) + :paramtype tags: Dict[str, str] :keyword lease: Required if the blob has an active lease. Value can be a BlobLeaseClient object or the lease ID as a string. @@ -2103,24 +2655,46 @@ async def commit_block_list( see `here `__. :returns: Blob-updated property dict (Etag and last modified). - :rtype: dict(str, Any) + :rtype: Dict[str, Any] """ if self.require_encryption or (self.key_encryption_key is not None): raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _commit_block_list_options( block_list=block_list, content_settings=content_settings, metadata=metadata, - **kwargs) + tags=tags, + lease=lease, + immutability_policy=immutability_policy, + legal_hold=legal_hold, + validate_content=validate_content, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + standard_blob_tier=standard_blob_tier, + cpk=cpk, + encryption_scope=encryption_scope, + timeout=timeout, + **kwargs + ) try: return cast(Dict[str, Any], await self._client.block_blob.commit_block_list(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def set_premium_page_blob_tier(self, premium_page_blob_tier: "PremiumPageBlobTier", **kwargs: Any) -> None: + async def set_premium_page_blob_tier( + self, premium_page_blob_tier: "PremiumPageBlobTier", + *, + if_tags_match_condition: Optional[str] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: """Sets the page blob tiers on the blob. This API is only supported for page blobs on premium accounts. :param premium_page_blob_tier: @@ -2134,34 +2708,43 @@ async def set_premium_page_blob_tier(self, premium_page_blob_tier: "PremiumPageB .. versionadded:: 12.4.0 + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. This value is not tracked or validated on the client. To configure client-side network timesouts see `here `__. - :keyword lease: - Required if the blob has an active lease. Value can be a BlobLeaseClient object - or the lease ID as a string. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str :rtype: None """ - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) if premium_page_blob_tier is None: raise ValueError("A PremiumPageBlobTiermust be specified") + options = _set_premium_page_blob_tier_options( + premium_page_blob_tier=premium_page_blob_tier, + if_tags_match_condition=if_tags_match_condition, + lease=lease, + timeout=timeout, + **kwargs + ) try: - await self._client.blob.set_tier( - tier=premium_page_blob_tier, - timeout=kwargs.pop('timeout', None), - lease_access_conditions=access_conditions, - modified_access_conditions=mod_conditions, - **kwargs) + await self._client.blob.set_tier(**options) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def set_blob_tags(self, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> Dict[str, Any]: + async def set_blob_tags( + self, tags: Optional[Dict[str, str]] = None, + *, + version_id: Optional[str] = None, + validate_content: Optional[bool] = None, + if_tags_match_condition: Optional[str] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Any]: """The Set Tags operation enables users to set tags on a blob or specific blob version, but not snapshot. Each call to this operation replaces all existing tags attached to the blob. To remove all tags from the blob, call this operation with no tags set. @@ -2175,7 +2758,7 @@ async def set_blob_tags(self, tags: Optional[Dict[str, str]] = None, **kwargs: A and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) - :type tags: dict(str, str) + :type tags: Dict[str, str] :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to delete. @@ -2202,15 +2785,29 @@ async def set_blob_tags(self, tags: Optional[Dict[str, str]] = None, **kwargs: A :returns: Blob-updated property dict (Etag and last modified) :rtype: Dict[str, Any] """ - version_id = get_version_id(self.version_id, kwargs) - options = _set_blob_tags_options(version_id=version_id, tags=tags, **kwargs) + options = _set_blob_tags_options( + version_id=version_id or self.version_id, + tags=tags, + validate_content=validate_content, + if_tags_match_condition=if_tags_match_condition, + lease=lease, + timeout=timeout, + **kwargs + ) try: return cast(Dict[str, Any], await self._client.blob.set_tags(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def get_blob_tags(self, **kwargs: Any) -> Dict[str, str]: + async def get_blob_tags( + self, *, + version_id: Optional[str] = None, + if_tags_match_condition: Optional[str] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, str]: """The Get Tags operation enables users to get tags on a blob or specific blob version, but not snapshot. .. versionadded:: 12.4.0 @@ -2235,8 +2832,14 @@ async def get_blob_tags(self, **kwargs: Any) -> Dict[str, str]: :returns: Key value pairs of blob tags. :rtype: Dict[str, str] """ - version_id = get_version_id(self.version_id, kwargs) - options = _get_blob_tags_options(version_id=version_id, snapshot=self.snapshot, **kwargs) + options = _get_blob_tags_options( + version_id=version_id or self.version_id, + snapshot=self.snapshot, + if_tags_match_condition=if_tags_match_condition, + lease=lease, + timeout=timeout, + **kwargs + ) try: _, tags = await self._client.blob.get_tags(**options) return cast(Dict[str, str], parse_tags(tags)) @@ -2248,6 +2851,14 @@ async def get_page_ranges( self, offset: Optional[int] = None, length: Optional[int] = None, previous_snapshot_diff: Optional[Union[str, Dict[str, Any]]] = None, + *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]]: """DEPRECATED: Returns the list of valid page ranges for a Page Blob or snapshot @@ -2307,7 +2918,7 @@ async def get_page_ranges( :returns: A tuple of two lists of page ranges as dictionaries with 'start' and 'end' keys. The first element are filled page ranges, the 2nd element is cleared page ranges. - :rtype: tuple(list(dict(str, str), list(dict(str, str)) + :rtype: tuple(list(Dict[str, str], list(Dict[str, str]) """ warnings.warn( "get_page_ranges is deprecated, use list_page_ranges instead", @@ -2319,7 +2930,15 @@ async def get_page_ranges( offset=offset, length=length, previous_snapshot_diff=previous_snapshot_diff, - **kwargs) + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + timeout=timeout, + **kwargs + ) try: if previous_snapshot_diff: ranges = await self._client.page_blob.get_page_ranges_diff(**options) @@ -2331,11 +2950,18 @@ async def get_page_ranges( @distributed_trace def list_page_ranges( - self, - *, + self, *, offset: Optional[int] = None, length: Optional[int] = None, previous_snapshot: Optional[Union[str, Dict[str, Any]]] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + results_per_page: Optional[int] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> AsyncItemPaged[PageRange]: """Returns the list of valid page ranges for a Page Blob or snapshot @@ -2400,31 +3026,49 @@ def list_page_ranges( :returns: An iterable (auto-paging) of PageRange. :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.PageRange] """ - results_per_page = kwargs.pop('results_per_page', None) options = _get_page_ranges_options( snapshot=self.snapshot, offset=offset, length=length, previous_snapshot_diff=previous_snapshot, - **kwargs) + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + timeout=timeout, + **kwargs + ) if previous_snapshot: command = partial( self._client.page_blob.get_page_ranges_diff, - **options) + **options + ) else: command = partial( self._client.page_blob.get_page_ranges, - **options) + **options + ) return AsyncItemPaged( - command, results_per_page=results_per_page, - page_iterator_class=PageRangePaged) + command, + results_per_page=results_per_page, + page_iterator_class=PageRangePaged + ) @distributed_trace_async async def get_page_range_diff_for_managed_disk( self, previous_snapshot_url: str, offset: Optional[int] = None, length: Optional[int] = None, + *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]]: """Returns the list of valid page ranges for a managed disk or snapshot. @@ -2483,14 +3127,21 @@ async def get_page_range_diff_for_managed_disk( :returns: A tuple of two lists of page ranges as dictionaries with 'start' and 'end' keys. The first element are filled page ranges, the 2nd element is cleared page ranges. - :rtype: tuple(list(dict(str, str), list(dict(str, str)) + :rtype: tuple(list(Dict[str, str], list(Dict[str, str]) """ options = _get_page_ranges_options( snapshot=self.snapshot, offset=offset, length=length, prev_snapshot_url=previous_snapshot_url, - **kwargs) + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + timeout=timeout, + **kwargs + ) try: ranges = await self._client.page_blob.get_page_ranges_diff(**options) except HttpResponseError as error: @@ -2501,6 +3152,14 @@ async def get_page_range_diff_for_managed_disk( async def set_sequence_number( self, sequence_number_action: Union[str, "SequenceNumberAction"], sequence_number: Optional[str] = None, + *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, datetime]]: """Sets the blob sequence number. @@ -2546,16 +3205,39 @@ async def set_sequence_number( see `here `__. :returns: Blob-updated property dict (Etag and last modified). - :rtype: dict(str, Any) + :rtype: Dict[str, Any] """ - options = _set_sequence_number_options(sequence_number_action, sequence_number=sequence_number, **kwargs) + options = _set_sequence_number_options( + sequence_number_action, + sequence_number=sequence_number, + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + timeout=timeout, + **kwargs + ) try: return cast(Dict[str, Any], await self._client.page_blob.update_sequence_number(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def resize_blob(self, size: int, **kwargs: Any) -> Dict[str, Union[str, datetime]]: + async def resize_blob( + self, size: int, + *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + premium_page_blob_tier: Optional["PremiumPageBlobTier"] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """Resizes a page blob to the specified size. If the specified value is less than the current size of the blob, @@ -2602,11 +3284,22 @@ async def resize_blob(self, size: int, **kwargs: Any) -> Dict[str, Union[str, da see `here `__. :returns: Blob-updated property dict (Etag and last modified). - :rtype: dict(str, Any) + :rtype: Dict[str, Any] """ if kwargs.get('cpk') and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") - options = _resize_blob_options(size=size, **kwargs) + options = _resize_blob_options( + size=size, + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + premium_page_blob_tier=premium_page_blob_tier, + timeout=timeout, + **kwargs + ) try: return cast(Dict[str, Any], await self._client.page_blob.resize(**options)) except HttpResponseError as error: @@ -2617,6 +3310,21 @@ async def upload_page( self, page: bytes, offset: int, length: int, + *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + validate_content: Optional[bool] = None, + if_sequence_number_lte: Optional[int] = None, + if_sequence_number_lt: Optional[int] = None, + if_sequence_number_eq: Optional[int] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + encryption_scope: Optional[str] = None, + encoding: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, datetime]]: """The Upload Pages operation writes a range of pages to a page blob. @@ -2698,17 +3406,32 @@ async def upload_page( see `here `__. :returns: Blob-updated property dict (Etag and last modified). - :rtype: dict(str, Any) + :rtype: Dict[str, Any] """ if self.require_encryption or (self.key_encryption_key is not None): raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _upload_page_options( page=page, offset=offset, length=length, - **kwargs) + lease=lease, + validate_content=validate_content, + if_sequence_number_lte=if_sequence_number_lte, + if_sequence_number_lt=if_sequence_number_lt, + if_sequence_number_eq=if_sequence_number_eq, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + cpk=cpk, + encryption_scope=encryption_scope, + encoding=encoding, + timeout=timeout, + **kwargs + ) try: return cast(Dict[str, Any], await self._client.page_blob.upload_pages(**options)) except HttpResponseError as error: @@ -2720,6 +3443,25 @@ async def upload_pages_from_url( offset: int, length: int, source_offset: int, + *, + source_content_md5: Optional[bytes] = None, + source_if_modified_since: Optional[datetime] = None, + source_if_unmodified_since: Optional[datetime] = None, + source_etag: Optional[str] = None, + source_match_condition: Optional["MatchConditions"] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_sequence_number_lte: Optional[int] = None, + if_sequence_number_lt: Optional[int] = None, + if_sequence_number_eq: Optional[int] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + encryption_scope: Optional[str] = None, + source_authorization: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Any]: """ @@ -2810,28 +3552,46 @@ async def upload_pages_from_url( .. versionadded:: 12.2.0 + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. This value is not tracked or validated on the client. To configure client-side network timesouts see `here `__. - :keyword str source_authorization: - Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is - the prefix of the source_authorization string. :returns: Response after uploading pages from specified URL. :rtype: Dict[str, Any] """ if self.require_encryption or (self.key_encryption_key is not None): raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _upload_pages_from_url_options( source_url=source_url, offset=offset, length=length, source_offset=source_offset, + source_content_md5=source_content_md5, + source_if_modified_since=source_if_modified_since, + source_if_unmodified_since=source_if_unmodified_since, + source_etag=source_etag, + source_match_condition=source_match_condition, + lease=lease, + if_sequence_number_lte=if_sequence_number_lte, + if_sequence_number_lt=if_sequence_number_lt, + if_sequence_number_eq=if_sequence_number_eq, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + cpk=cpk, + encryption_scope=encryption_scope, + source_authorization=source_authorization, + timeout=timeout, **kwargs ) try: @@ -2840,7 +3600,23 @@ async def upload_pages_from_url( process_storage_error(error) @distributed_trace_async - async def clear_page(self, offset: int, length: int, **kwargs: Any) -> Dict[str, Union[str, datetime]]: + async def clear_page( + self, offset: int, + length: int, + *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_sequence_number_lte: Optional[int] = None, + if_sequence_number_lt: Optional[int] = None, + if_sequence_number_eq: Optional[int] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """Clears a range of pages. :param int offset: @@ -2901,15 +3677,26 @@ async def clear_page(self, offset: int, length: int, **kwargs: Any) -> Dict[str, see `here `__. :returns: Blob-updated property dict (Etag and last modified). - :rtype: dict(str, Any) + :rtype: Dict[str, Any] """ if self.require_encryption or (self.key_encryption_key is not None): raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _clear_page_options( offset=offset, length=length, + lease=lease, + if_sequence_number_lte=if_sequence_number_lte, + if_sequence_number_lt=if_sequence_number_lt, + if_sequence_number_eq=if_sequence_number_eq, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + cpk=cpk, + timeout=timeout, **kwargs ) try: @@ -2921,6 +3708,20 @@ async def clear_page(self, offset: int, length: int, **kwargs: Any) -> Dict[str, async def append_block( self, data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], length: Optional[int] = None, + *, + validate_content: Optional[bool] = None, + maxsize_condition: Optional[int] = None, + appendpos_condition: Optional[int] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + encoding: Optional[str] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + encryption_scope: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, datetime, int]]: """Commits a new block of data to the end of the existing append blob. @@ -2998,15 +3799,28 @@ async def append_block( see `here `__. :returns: Blob-updated property dict (Etag, last modified, append offset, committed block count). - :rtype: dict(str, Any) + :rtype: Dict[str, Any] """ if self.require_encryption or (self.key_encryption_key is not None): raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _append_block_options( data=data, length=length, + validate_content=validate_content, + maxsize_condition=maxsize_condition, + appendpos_condition=appendpos_condition, + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + encoding=encoding, + cpk=cpk, + encryption_scope=encryption_scope, + timeout=timeout, **kwargs ) try: @@ -3019,6 +3833,24 @@ async def append_block_from_url( self, copy_source_url: str, source_offset: Optional[int] = None, source_length: Optional[int] = None, + *, + source_content_md5: Optional[bytearray] = None, + maxsize_condition: Optional[int] = None, + appendpos_condition: Optional[int] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + source_if_modified_since: Optional[datetime] = None, + source_if_unmodified_since: Optional[datetime] = None, + source_etag: Optional[str] = None, + source_match_condition: Optional["MatchConditions"] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + encryption_scope: Optional[str] = None, + source_authorization: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, datetime, int]]: """ @@ -3103,26 +3935,43 @@ async def append_block_from_url( .. versionadded:: 12.2.0 + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. This value is not tracked or validated on the client. To configure client-side network timesouts see `here `__. - :keyword str source_authorization: - Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is - the prefix of the source_authorization string. :returns: Result after appending a new block. :rtype: Dict[str, Union[str, datetime, int]] """ if self.require_encryption or (self.key_encryption_key is not None): raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': + if cpk and self.scheme.lower() != 'https': raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _append_block_from_url_options( copy_source_url=copy_source_url, source_offset=source_offset, source_length=source_length, + source_content_md5=source_content_md5, + maxsize_condition=maxsize_condition, + appendpos_condition=appendpos_condition, + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + source_if_modified_since=source_if_modified_since, + source_if_unmodified_since=source_if_unmodified_since, + source_etag=source_etag, + source_match_condition=source_match_condition, + cpk=cpk, + encryption_scope=encryption_scope, + source_authorization=source_authorization, + timeout=timeout, **kwargs ) try: @@ -3132,7 +3981,17 @@ async def append_block_from_url( process_storage_error(error) @distributed_trace_async - async def seal_append_blob(self, **kwargs: Any) -> Dict[str, Union[str, datetime, int]]: + async def seal_append_blob( + self, *, + appendpos_condition: Optional[int] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime, int]]: """The Seal operation seals the Append Blob to make it read-only. .. versionadded:: 12.4.0 @@ -3171,11 +4030,20 @@ async def seal_append_blob(self, **kwargs: Any) -> Dict[str, Union[str, datetime see `here `__. :returns: Blob-updated property dict (Etag, last modified, append offset, committed block count). - :rtype: dict(str, Any) + :rtype: Dict[str, Any] """ if self.require_encryption or (self.key_encryption_key is not None): raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - options = _seal_append_blob_options(**kwargs) + options = _seal_append_blob_options( + appendpos_condition=appendpos_condition, + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + timeout=timeout, + **kwargs + ) try: return cast(Dict[str, Any], await self._client.append_blob.seal(**options)) except HttpResponseError as error: diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py index eeb97625131b..62c04272e97c 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py @@ -18,7 +18,6 @@ from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import HttpResponseError, ResourceNotFoundError from azure.core.pipeline import AsyncPipeline -from azure.core.pipeline.transport import AsyncHttpResponse # pylint: disable=C4756 from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async @@ -28,10 +27,12 @@ from ._list_blobs_helper import BlobNamesPaged, BlobPropertiesPaged, BlobPrefix from ._models import FilteredBlobPaged from .._container_client_helpers import ( + _delete_container_options, _format_url, _generate_delete_blobs_options, _generate_set_tiers_options, - _parse_url + _parse_url, + _set_container_metadata_options ) from .._deserialize import deserialize_container_properties from .._encryption import StorageEncryptionMixin @@ -51,14 +52,17 @@ ) if TYPE_CHECKING: + from azure.core import MatchConditions from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential from azure.core.credentials_async import AsyncTokenCredential + from azure.core.pipeline.transport import AsyncHttpResponse # pylint: disable=C4756 from ._blob_service_client_async import BlobServiceClient from .._models import ( AccessPolicy, - StandardBlobTier, + ContainerEncryptionScope, PremiumPageBlobTier, - PublicAccess + PublicAccess, + StandardBlobTier, ) @@ -131,7 +135,15 @@ def __init__( credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long *, api_version: Optional[str] = None, - # TODO + secondary_hostname: Optional[str] = None, + audience: Optional[str] = None, + max_block_size: int = 4 * 1024 * 1024, + max_page_size: int = 4 * 1024 * 1024, + max_chunk_get_size: int = 4 * 1024 * 1024, + max_single_put_size: int = 64 * 1024 * 1024, + max_single_get_size: int = 32 * 1024 * 1024, + min_large_block_upload_threshold: int = 4 * 1024 * 1024 + 1, + use_byte_buffer: Optional[bool] = None, **kwargs: Any ) -> None: kwargs['retry_policy'] = kwargs.get('retry_policy') or ExponentialRetry(**kwargs) @@ -141,7 +153,21 @@ def __init__( # This parameter is used for the hierarchy traversal. Give precedence to credential. self._raw_credential = credential if credential else sas_token self._query_str, credential = self._format_query_string(sas_token, credential) - super(ContainerClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) + super(ContainerClient, self).__init__( + parsed_url, + service='blob', + credential=credential, + secondary_hostname=secondary_hostname, + audience=audience, + max_block_size=max_block_size, + max_page_size=max_page_size, + max_chunk_get_size=max_chunk_get_size, + max_single_put_size=max_single_put_size, + max_single_get_size=max_single_get_size, + min_large_block_upload_threshold=min_large_block_upload_threshold, + use_byte_buffer=use_byte_buffer, + **kwargs + ) self._api_version = get_api_version(api_version) self._client = self._build_generated_client() self._configure_encryption(kwargs) @@ -151,7 +177,7 @@ def _build_generated_client(self) -> AzureBlobStorage: client._config.version = self._api_version # type: ignore [assignment] # pylint: disable=protected-access return client - def _format_url(self, hostname): + def _format_url(self, hostname: str) -> str: return _format_url( container_name=self.container_name, hostname=hostname, @@ -163,6 +189,17 @@ def _format_url(self, hostname): def from_container_url( cls, container_url: str, credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + *, + api_version: Optional[str] = None, + secondary_hostname: Optional[str] = None, + audience: Optional[str] = None, + max_block_size: int = 4 * 1024 * 1024, + max_page_size: int = 4 * 1024 * 1024, + max_chunk_get_size: int = 4 * 1024 * 1024, + max_single_put_size: int = 64 * 1024 * 1024, + max_single_get_size: int = 32 * 1024 * 1024, + min_large_block_upload_threshold: int = 4 * 1024 * 1024 + 1, + use_byte_buffer: Optional[bool] = None, **kwargs: Any ) -> Self: """Create ContainerClient from a container url. @@ -186,6 +223,28 @@ def from_container_url( ~azure.core.credentials.AzureSasCredential or ~azure.core.credentials_async.AsyncTokenCredential or str or dict[str, str] or None + :keyword str api_version: + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. + + .. versionadded:: 12.2.0 + + :keyword str secondary_hostname: + The hostname of the secondary endpoint. + :keyword int max_block_size: The maximum chunk size for uploading a block blob in chunks. + Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_put_size: If the blob size is less than or equal max_single_put_size, + then the blob will be uploaded with only one http PUT request. + If the blob size is larger than max_single_put_size, + the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB. + :keyword int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient + algorithm when uploading a block blob. Defaults to 4*1024*1024+1. + :keyword bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False. + :keyword int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_get_size: The maximum size for a blob to be downloaded in a single call, + the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. + :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, + or 4MB. :keyword str audience: The audience to use when requesting tokens for Azure Active Directory authentication. Only has an effect when credential is of type TokenCredential. The value could be https://storage.azure.com/ (default) or https://.blob.core.windows.net. @@ -209,13 +268,39 @@ def from_container_url( container_name = unquote(container_path[-1]) if not container_name: raise ValueError("Invalid URL. Please provide a URL with a valid container name") - return cls(account_url, container_name=container_name, credential=credential, **kwargs) + return cls( + account_url, + container_name=container_name, + credential=credential, + api_version=api_version, + audience=audience, + secondary_hostname=secondary_hostname, + max_block_size=max_block_size, + max_page_size=max_page_size, + max_chunk_get_size=max_chunk_get_size, + max_single_put_size=max_single_put_size, + max_single_get_size=max_single_get_size, + min_large_block_upload_threshold=min_large_block_upload_threshold, + use_byte_buffer=use_byte_buffer, + **kwargs + ) @classmethod def from_connection_string( cls, conn_str: str, container_name: str, credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + *, + api_version: Optional[str] = None, + secondary_hostname: Optional[str] = None, + audience: Optional[str] = None, + max_block_size: int = 4 * 1024 * 1024, + max_page_size: int = 4 * 1024 * 1024, + max_chunk_get_size: int = 4 * 1024 * 1024, + max_single_put_size: int = 64 * 1024 * 1024, + max_single_get_size: int = 32 * 1024 * 1024, + min_large_block_upload_threshold: int = 4 * 1024 * 1024 + 1, + use_byte_buffer: Optional[bool] = None, **kwargs: Any ) -> Self: """Create ContainerClient from a Connection String. @@ -239,6 +324,28 @@ def from_connection_string( ~azure.core.credentials.AzureSasCredential or ~azure.core.credentials_async.AsyncTokenCredential or str or dict[str, str] or None + :keyword str api_version: + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. + + .. versionadded:: 12.2.0 + + :keyword str secondary_hostname: + The hostname of the secondary endpoint. + :keyword int max_block_size: The maximum chunk size for uploading a block blob in chunks. + Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_put_size: If the blob size is less than or equal max_single_put_size, + then the blob will be uploaded with only one http PUT request. + If the blob size is larger than max_single_put_size, + the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB. + :keyword int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient + algorithm when uploading a block blob. Defaults to 4*1024*1024+1. + :keyword bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False. + :keyword int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_get_size: The maximum size for a blob to be downloaded in a single call, + the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. + :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, + or 4MB. :keyword str audience: The audience to use when requesting tokens for Azure Active Directory authentication. Only has an effect when credential is of type TokenCredential. The value could be https://storage.azure.com/ (default) or https://.blob.core.windows.net. @@ -255,15 +362,30 @@ def from_connection_string( :caption: Creating the ContainerClient from a connection string. """ account_url, secondary, credential = parse_connection_str(conn_str, credential, 'blob') - if 'secondary_hostname' not in kwargs: - kwargs['secondary_hostname'] = secondary return cls( - account_url, container_name=container_name, credential=credential, **kwargs) + account_url, + container_name=container_name, + credential=credential, + api_version=api_version, + audience=audience, + secondary_hostname=secondary_hostname or secondary, + max_block_size=max_block_size, + max_page_size=max_page_size, + max_chunk_get_size=max_chunk_get_size, + max_single_put_size=max_single_put_size, + max_single_get_size=max_single_get_size, + min_large_block_upload_threshold=min_large_block_upload_threshold, + use_byte_buffer=use_byte_buffer, + **kwargs + ) @distributed_trace_async async def create_container( self, metadata: Optional[Dict[str, str]] = None, public_access: Optional[Union["PublicAccess", str]] = None, + *, + container_encryption_scope: Optional[Union[Dict[str, Any], "ContainerEncryptionScope"]] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, datetime]]: """ @@ -282,7 +404,7 @@ async def create_container( .. versionadded:: 12.2.0 - :paramtype container_encryption_scope: dict or ~azure.storage.blob.ContainerEncryptionScope + :paramtype container_encryption_scope: Dict[str, Any] or ~azure.storage.blob.ContainerEncryptionScope :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. @@ -302,22 +424,28 @@ async def create_container( :caption: Creating a container to store blobs. """ headers = kwargs.pop('headers', {}) - headers.update(add_metadata_headers(metadata)) # type: ignore - timeout = kwargs.pop('timeout', None) - container_cpk_scope_info = get_container_cpk_scope_info(kwargs) + headers.update(add_metadata_headers(metadata)) # type: ignore + container_cpk_scope_info = get_container_cpk_scope_info(container_encryption_scope) try: - return await self._client.container.create( # type: ignore + return await self._client.container.create( # type: ignore timeout=timeout, access=public_access, container_cpk_scope_info=container_cpk_scope_info, cls=return_response_headers, headers=headers, - **kwargs) + **kwargs + ) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def _rename_container(self, new_name: str, **kwargs: Any) -> "ContainerClient": + async def _rename_container( + self, new_name: str, + *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> "ContainerClient": """Renames a container. Operation is successful only if the source container exists. @@ -337,10 +465,9 @@ async def _rename_container(self, new_name: str, **kwargs: Any) -> "ContainerCli :returns: The renamed container. :rtype: ~azure.storage.blob.ContainerClient """ - lease = kwargs.pop('lease', None) - try: + if lease and hasattr(lease, "id"): kwargs['source_lease_id'] = lease.id - except AttributeError: + else: kwargs['source_lease_id'] = lease try: renamed_container = ContainerClient( @@ -349,13 +476,22 @@ async def _rename_container(self, new_name: str, **kwargs: Any) -> "ContainerCli _pipeline=self._pipeline, _location_mode=self._location_mode, _hosts=self._hosts, require_encryption=self.require_encryption, encryption_version=self.encryption_version, key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function) - await renamed_container._client.container.rename(self.container_name, **kwargs) # pylint: disable = protected-access + await renamed_container._client.container.rename(self.container_name, timeout=timeout, **kwargs) # pylint: disable = protected-access return renamed_container except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def delete_container(self, **kwargs: Any) -> None: + async def delete_container( + self, *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: """ Marks the specified container for deletion. The container and any blobs contained within it are later deleted during garbage collection. @@ -399,23 +535,30 @@ async def delete_container(self, **kwargs: Any) -> None: :dedent: 16 :caption: Delete a container. """ - lease = kwargs.pop('lease', None) - access_conditions = get_access_conditions(lease) - mod_conditions = get_modify_conditions(kwargs) - timeout = kwargs.pop('timeout', None) + options = _delete_container_options( + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + timeout=timeout, + **kwargs + ) try: - await self._client.container.delete( - timeout=timeout, - lease_access_conditions=access_conditions, - modified_access_conditions=mod_conditions, - **kwargs) + await self._client.container.delete(**options) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async async def acquire_lease( - self, lease_duration: int =-1, + self, lease_duration: int = -1, lease_id: Optional[str] = None, + *, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> BlobLeaseClient: """ @@ -466,10 +609,18 @@ async def acquire_lease( :dedent: 12 :caption: Acquiring a lease on the container. """ - lease = BlobLeaseClient(self, lease_id=lease_id) # type: ignore + lease = BlobLeaseClient(self, lease_id=lease_id) # type: ignore kwargs.setdefault('merge_span', True) - timeout = kwargs.pop('timeout', None) - await lease.acquire(lease_duration=lease_duration, timeout=timeout, **kwargs) + if etag is not None: + kwargs['etag'] = etag + await lease.acquire( + lease_duration=lease_duration, + timeout=timeout, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + match_condition=match_condition, + **kwargs + ) return lease @distributed_trace_async @@ -480,7 +631,7 @@ async def get_account_information(self, **kwargs: Any) -> Dict[str, str]: The keys in the returned dictionary include 'sku_name' and 'account_kind'. :returns: A dict of account information (SKU and account type). - :rtype: dict(str, str) + :rtype: Dict[str, str] """ try: return await self._client.container.get_account_info(cls=return_response_headers, **kwargs) # type: ignore @@ -488,7 +639,12 @@ async def get_account_information(self, **kwargs: Any) -> Dict[str, str]: process_storage_error(error) @distributed_trace_async - async def get_container_properties(self, **kwargs: Any) -> ContainerProperties: + async def get_container_properties( + self, *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> ContainerProperties: """Returns all user-defined metadata and system properties for the specified container. The data returned does not include the container's list of blobs. @@ -514,22 +670,21 @@ async def get_container_properties(self, **kwargs: Any) -> ContainerProperties: :dedent: 16 :caption: Getting properties on the container. """ - lease = kwargs.pop('lease', None) access_conditions = get_access_conditions(lease) - timeout = kwargs.pop('timeout', None) try: response = await self._client.container.get_properties( timeout=timeout, lease_access_conditions=access_conditions, cls=deserialize_container_properties, - **kwargs) + **kwargs + ) except HttpResponseError as error: process_storage_error(error) response.name = self.container_name - return response # type: ignore + return response # type: ignore @distributed_trace_async - async def exists(self, **kwargs: Any) -> bool: + async def exists(self, *, timeout: Optional[int] = None, **kwargs: Any) -> bool: """ Returns True if a container exists and returns False otherwise. @@ -543,7 +698,7 @@ async def exists(self, **kwargs: Any) -> bool: :rtype: bool """ try: - await self._client.container.get_properties(**kwargs) + await self._client.container.get_properties(timeout=timeout, **kwargs) return True except HttpResponseError as error: try: @@ -554,6 +709,13 @@ async def exists(self, **kwargs: Any) -> bool: @distributed_trace_async async def set_container_metadata( self, metadata: Optional[Dict[str, str]] = None, + *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, datetime]]: """Sets one or more user-defined name-value pairs for the specified @@ -593,20 +755,18 @@ async def set_container_metadata( :dedent: 16 :caption: Setting metadata on the container. """ - headers = kwargs.pop('headers', {}) - headers.update(add_metadata_headers(metadata)) - lease = kwargs.pop('lease', None) - access_conditions = get_access_conditions(lease) - mod_conditions = get_modify_conditions(kwargs) - timeout = kwargs.pop('timeout', None) + options = _set_container_metadata_options( + metadata, + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + timeout=timeout, + **kwargs + ) try: - return await self._client.container.set_metadata( # type: ignore - timeout=timeout, - lease_access_conditions=access_conditions, - modified_access_conditions=mod_conditions, - cls=return_response_headers, - headers=headers, - **kwargs) + return await self._client.container.set_metadata(**options) except HttpResponseError as error: process_storage_error(error) @@ -983,7 +1143,7 @@ async def upload_blob( should be supplied for optimal performance. :param metadata: Name-value pairs associated with the blob as metadata. - :type metadata: dict(str, str) + :type metadata: Dict[str, str] :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. If True, upload_blob will overwrite the existing data. If set to False, the operation will fail with ResourceExistsError. The exception to the above is with Append @@ -1319,7 +1479,7 @@ async def download_blob( async def delete_blobs( self, *blobs: Union[str, Dict[str, Any], BlobProperties], **kwargs: Any - ) -> AsyncIterator[AsyncHttpResponse]: + ) -> AsyncIterator["AsyncHttpResponse"]: """Marks the specified blobs or snapshots for deletion. The blobs are later deleted during garbage collection. @@ -1420,14 +1580,14 @@ async def delete_blobs( **kwargs ) - return cast(AsyncIterator[AsyncHttpResponse], await self._batch_send(*reqs, **options)) + return cast(AsyncIterator["AsyncHttpResponse"], await self._batch_send(*reqs, **options)) @distributed_trace_async async def set_standard_blob_tier_blobs( self, standard_blob_tier: Union[str, 'StandardBlobTier'], *blobs: Union[str, Dict[str, Any], BlobProperties], **kwargs: Any - ) -> AsyncIterator[AsyncHttpResponse]: + ) -> AsyncIterator["AsyncHttpResponse"]: """This operation sets the tier on block blobs. A block blob's tier determines Hot/Cool/Archive storage type. @@ -1468,7 +1628,7 @@ async def set_standard_blob_tier_blobs( timeout for subrequest: key: 'timeout', value type: int - :type blobs: str or dict(str, Any) or ~azure.storage.blob.BlobProperties + :type blobs: str or Dict[str, Any] or ~azure.storage.blob.BlobProperties :keyword ~azure.storage.blob.RehydratePriority rehydrate_priority: Indicates the priority with which to rehydrate an archived blob :keyword str if_tags_match_condition: @@ -1500,14 +1660,14 @@ async def set_standard_blob_tier_blobs( *blobs, **kwargs) - return cast(AsyncIterator[AsyncHttpResponse], await self._batch_send(*reqs, **options)) + return cast(AsyncIterator["AsyncHttpResponse"], await self._batch_send(*reqs, **options)) @distributed_trace_async async def set_premium_page_blob_tier_blobs( self, premium_page_blob_tier: Union[str, 'PremiumPageBlobTier'], *blobs: Union[str, Dict[str, Any], BlobProperties], **kwargs: Any - ) -> AsyncIterator[AsyncHttpResponse]: + ) -> AsyncIterator["AsyncHttpResponse"]: """Sets the page blob tiers on the blobs. This API is only supported for page blobs on premium accounts. The maximum number of blobs that can be updated in a single request is 256. @@ -1537,7 +1697,7 @@ async def set_premium_page_blob_tier_blobs( timeout for subrequest: key: 'timeout', value type: int - :type blobs: str or dict(str, Any) or ~azure.storage.blob.BlobProperties + :type blobs: str or Dict[str, any] or ~azure.storage.blob.BlobProperties :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. @@ -1561,7 +1721,7 @@ async def set_premium_page_blob_tier_blobs( *blobs, **kwargs) - return cast(AsyncIterator[AsyncHttpResponse], await self._batch_send(*reqs, **options)) + return cast(AsyncIterator["AsyncHttpResponse"], await self._batch_send(*reqs, **options)) def get_blob_client( self, blob: str, From 3283d0e44758fb61ebf4bfc299a2c991b33a1404 Mon Sep 17 00:00:00 2001 From: Peter Wu Date: Wed, 23 Apr 2025 15:30:17 -0400 Subject: [PATCH 3/6] Fixed pylint/mypy --- .../azure/storage/blob/_blob_client.py | 10 ++++++++++ .../azure/storage/blob/aio/_blob_client_async.py | 13 +++++++++++-- 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py index 18a4236f4b1a..d88a2eec9406 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py @@ -482,6 +482,7 @@ def upload_blob_from_url( encryption_scope: Optional[str] = None, standard_blob_tier: Optional["StandardBlobTier"] = None, source_authorization: Optional[str] = None, + source_token_intent: Optional[Literal["backup"]] = None, **kwargs: Any ) -> Dict[str, Any]: """ @@ -616,6 +617,7 @@ def upload_blob_from_url( encryption_scope=encryption_scope, standard_blob_tier=standard_blob_tier, source_authorization=source_authorization, + source_token_intent=source_token_intent, **kwargs ) try: @@ -2092,6 +2094,7 @@ def start_copy_from_url( seal_destination_blob: Optional[bool] = None, requires_sync: Optional[bool] = None, source_authorization: Optional[str] = None, + source_token_intent: Optional[Literal["backup"]] = None, encryption_scope: Optional[str] = None, timeout: Optional[int] = None, **kwargs: Any @@ -2312,6 +2315,7 @@ def start_copy_from_url( seal_destination_blob=seal_destination_blob, requires_sync=requires_sync, source_authorization=source_authorization, + source_token_intent=source_token_intent, encryption_scope=encryption_scope, timeout=timeout, **kwargs @@ -2594,6 +2598,7 @@ def stage_block_from_url( cpk: Optional["CustomerProvidedEncryptionKey"] = None, encryption_scope: Optional[str] = None, source_authorization: Optional[str] = None, + source_token_intent: Optional[Literal["backup"]] = None, timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Any]: @@ -2664,6 +2669,7 @@ def stage_block_from_url( cpk=cpk, encryption_scope=encryption_scope, source_authorization=source_authorization, + source_token_intent=source_token_intent, timeout=timeout, **kwargs ) @@ -3637,6 +3643,7 @@ def upload_pages_from_url( cpk: Optional["CustomerProvidedEncryptionKey"] = None, encryption_scope: Optional[str] = None, source_authorization: Optional[str] = None, + source_token_intent: Optional[Literal["backup"]] = None, timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Any]: @@ -3778,6 +3785,7 @@ def upload_pages_from_url( cpk=cpk, encryption_scope=encryption_scope, source_authorization=source_authorization, + source_token_intent=source_token_intent, timeout=timeout, **kwargs ) @@ -4037,6 +4045,7 @@ def append_block_from_url( cpk: Optional["CustomerProvidedEncryptionKey"] = None, encryption_scope: Optional[str] = None, source_authorization: Optional[str] = None, + source_token_intent: Optional[Literal["backup"]] = None, timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, datetime, int]]: @@ -4170,6 +4179,7 @@ def append_block_from_url( cpk=cpk, encryption_scope=encryption_scope, source_authorization=source_authorization, + source_token_intent=source_token_intent, timeout=timeout, **kwargs ) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py index 9b1290a55367..15e081fdc3e0 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py @@ -76,9 +76,8 @@ ) from .._encryption import StorageEncryptionMixin, _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION from .._generated.aio import AzureBlobStorage -from .._generated.models import CpkInfo from .._models import BlobType, BlobBlock, BlobProperties, BlobQueryError, PageRange -from .._serialize import get_access_conditions, get_api_version, get_modify_conditions, get_version_id +from .._serialize import get_api_version from .._shared.base_client_async import AsyncStorageAccountHostsMixin, AsyncTransportWrapper, parse_connection_str from .._shared.policies_async import ExponentialRetry from .._shared.response_handlers import process_storage_error, return_response_headers @@ -473,6 +472,7 @@ async def upload_blob_from_url( encryption_scope: Optional[str] = None, standard_blob_tier: Optional["StandardBlobTier"] = None, source_authorization: Optional[str] = None, + source_token_intent: Optional[Literal["backup"]] = None, **kwargs: Any ) -> Dict[str, Any]: """ @@ -607,6 +607,7 @@ async def upload_blob_from_url( encryption_scope=encryption_scope, standard_blob_tier=standard_blob_tier, source_authorization=source_authorization, + source_token_intent=source_token_intent, **kwargs ) try: @@ -2079,6 +2080,7 @@ async def start_copy_from_url( seal_destination_blob: Optional[bool] = None, requires_sync: Optional[bool] = None, source_authorization: Optional[str] = None, + source_token_intent: Optional[Literal["backup"]] = None, encryption_scope: Optional[str] = None, timeout: Optional[int] = None, **kwargs: Any @@ -2298,6 +2300,7 @@ async def start_copy_from_url( seal_destination_blob=seal_destination_blob, requires_sync=requires_sync, source_authorization=source_authorization, + source_token_intent=source_token_intent, encryption_scope=encryption_scope, timeout=timeout, **kwargs @@ -2580,6 +2583,7 @@ async def stage_block_from_url( cpk: Optional["CustomerProvidedEncryptionKey"] = None, encryption_scope: Optional[str] = None, source_authorization: Optional[str] = None, + source_token_intent: Optional[Literal["backup"]] = None, timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Any]: @@ -2650,6 +2654,7 @@ async def stage_block_from_url( cpk=cpk, encryption_scope=encryption_scope, source_authorization=source_authorization, + source_token_intent=source_token_intent, timeout=timeout, **kwargs ) @@ -3625,6 +3630,7 @@ async def upload_pages_from_url( cpk: Optional["CustomerProvidedEncryptionKey"] = None, encryption_scope: Optional[str] = None, source_authorization: Optional[str] = None, + source_token_intent: Optional[Literal["backup"]] = None, timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Any]: @@ -3767,6 +3773,7 @@ async def upload_pages_from_url( cpk=cpk, encryption_scope=encryption_scope, source_authorization=source_authorization, + source_token_intent=source_token_intent, timeout=timeout, **kwargs ) @@ -4026,6 +4033,7 @@ async def append_block_from_url( cpk: Optional["CustomerProvidedEncryptionKey"] = None, encryption_scope: Optional[str] = None, source_authorization: Optional[str] = None, + source_token_intent: Optional[Literal["backup"]] = None, timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, datetime, int]]: @@ -4159,6 +4167,7 @@ async def append_block_from_url( cpk=cpk, encryption_scope=encryption_scope, source_authorization=source_authorization, + source_token_intent=source_token_intent, timeout=timeout, **kwargs ) From 2793137c4f58bea76767ed2cc8c2d7c867e2bec2 Mon Sep 17 00:00:00 2001 From: Peter Wu Date: Wed, 23 Apr 2025 16:48:56 -0400 Subject: [PATCH 4/6] up to upload_blob --- .../azure/storage/blob/_container_client.py | 146 +++++++++++++----- .../blob/aio/_container_client_async.py | 136 +++++++++++----- 2 files changed, 207 insertions(+), 75 deletions(-) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py index 839796c07ff9..cb9d1c864fe0 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py @@ -9,7 +9,8 @@ import warnings from datetime import datetime from typing import ( - Any, AnyStr, cast, Dict, List, IO, Iterable, Iterator, Optional, overload, Union, + Any, AnyStr, Callable, cast, Dict, List, IO, + Iterable, Iterator, Optional, overload, Union, TYPE_CHECKING ) from urllib.parse import unquote, urlparse @@ -64,6 +65,8 @@ from ._models import ( AccessPolicy, ContainerEncryptionScope, + ContentSettings, + CustomerProvidedEncryptionKey, PremiumPageBlobTier, PublicAccess, StandardBlobTier @@ -822,7 +825,12 @@ def _get_blob_service_client(self) -> "BlobServiceClient": key_resolver_function=self.key_resolver_function, _pipeline=_pipeline) @distributed_trace - def get_container_access_policy(self, **kwargs: Any) -> Dict[str, Any]: + def get_container_access_policy( + self, *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Any]: """Gets the permissions for the specified container. The permissions indicate whether container data may be accessed publicly. @@ -848,15 +856,14 @@ def get_container_access_policy(self, **kwargs: Any) -> Dict[str, Any]: :dedent: 12 :caption: Getting the access policy on the container. """ - lease = kwargs.pop('lease', None) access_conditions = get_access_conditions(lease) - timeout = kwargs.pop('timeout', None) try: response, identifiers = self._client.container.get_access_policy( timeout=timeout, lease_access_conditions=access_conditions, cls=return_headers_and_deserialized, - **kwargs) + **kwargs + ) except HttpResponseError as error: process_storage_error(error) return { @@ -868,6 +875,11 @@ def get_container_access_policy(self, **kwargs: Any) -> Dict[str, Any]: def set_container_access_policy( self, signed_identifiers: Dict[str, "AccessPolicy"], public_access: Optional[Union[str, "PublicAccess"]] = None, + *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, datetime]]: """Sets the permissions for the specified container or stored access @@ -924,12 +936,13 @@ def set_container_access_policy( if value: value.start = serialize_iso(value.start) value.expiry = serialize_iso(value.expiry) - identifiers.append(SignedIdentifier(id=key, access_policy=value)) # type: ignore - signed_identifiers = identifiers # type: ignore - lease = kwargs.pop('lease', None) - mod_conditions = get_modify_conditions(kwargs) + identifiers.append(SignedIdentifier(id=key, access_policy=value)) # type: ignore + signed_identifiers = identifiers # type: ignore + mod_conditions = get_modify_conditions({ + 'if_modified_since': if_modified_since, + 'if_unmodified_since': if_unmodified_since + }) access_conditions = get_access_conditions(lease) - timeout = kwargs.pop('timeout', None) try: return cast(Dict[str, Union[str, datetime]], self._client.container.set_access_policy( container_acl=signed_identifiers or None, @@ -938,7 +951,8 @@ def set_container_access_policy( lease_access_conditions=access_conditions, modified_access_conditions=mod_conditions, cls=return_response_headers, - **kwargs)) + **kwargs + )) except HttpResponseError as error: process_storage_error(error) @@ -946,6 +960,8 @@ def set_container_access_policy( def list_blobs( self, name_starts_with: Optional[str] = None, include: Optional[Union[str, List[str]]] = None, + *, + timeout: Optional[int] = None, **kwargs: Any ) -> ItemPaged[BlobProperties]: """Returns a generator to list the blobs under the specified container. @@ -959,7 +975,7 @@ def list_blobs( Specifies one or more additional datasets to include in the response. Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted', 'deletedwithversions', 'tags', 'versions', 'immutabilitypolicy', 'legalhold'. - :type include: list[str] or str + :type include: List[str] or str :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. @@ -986,18 +1002,27 @@ def list_blobs( include = [include] results_per_page = kwargs.pop('results_per_page', None) - timeout = kwargs.pop('timeout', None) command = functools.partial( self._client.container.list_blob_flat_segment, include=include, timeout=timeout, - **kwargs) + **kwargs + ) return ItemPaged( - command, prefix=name_starts_with, results_per_page=results_per_page, container=self.container_name, - page_iterator_class=BlobPropertiesPaged) + command, + prefix=name_starts_with, + results_per_page=results_per_page, + container=self.container_name, + page_iterator_class=BlobPropertiesPaged + ) @distributed_trace - def list_blob_names(self, **kwargs: Any) -> ItemPaged[str]: + def list_blob_names( + self, *, + name_starts_with: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> ItemPaged[str]: """Returns a generator to list the names of blobs under the specified container. The generator will lazily follow the continuation tokens returned by the service. @@ -1022,9 +1047,7 @@ def list_blob_names(self, **kwargs: Any) -> ItemPaged[str]: raise ValueError("Passing 'prefix' has no effect on filtering, " + "please use the 'name_starts_with' parameter instead.") - name_starts_with = kwargs.pop('name_starts_with', None) results_per_page = kwargs.pop('results_per_page', None) - timeout = kwargs.pop('timeout', None) # For listing only names we need to create a one-off generated client and # override its deserializer to prevent deserialization of the full response. @@ -1034,19 +1057,23 @@ def list_blob_names(self, **kwargs: Any) -> ItemPaged[str]: command = functools.partial( client.container.list_blob_flat_segment, timeout=timeout, - **kwargs) + **kwargs + ) return ItemPaged( command, prefix=name_starts_with, results_per_page=results_per_page, container=self.container_name, - page_iterator_class=BlobNamesPaged) + page_iterator_class=BlobNamesPaged + ) @distributed_trace def walk_blobs( self, name_starts_with: Optional[str] = None, include: Optional[Union[List[str], str]] = None, delimiter: str = "/", + *, + timeout: Optional[int] = None, **kwargs: Any ) -> ItemPaged[BlobProperties]: """Returns a generator to list the blobs under the specified container. @@ -1061,7 +1088,7 @@ def walk_blobs( Specifies one or more additional datasets to include in the response. Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted', 'deletedwithversions', 'tags', 'versions', 'immutabilitypolicy', 'legalhold'. - :type include: list[str] or str + :type include: List[str] or str :param str delimiter: When the request includes this parameter, the operation returns a BlobPrefix element in the response body that acts as a placeholder for all blobs whose @@ -1084,23 +1111,27 @@ def walk_blobs( include = [include] results_per_page = kwargs.pop('results_per_page', None) - timeout = kwargs.pop('timeout', None) command = functools.partial( self._client.container.list_blob_hierarchy_segment, delimiter=delimiter, include=include, timeout=timeout, - **kwargs) + **kwargs + ) return BlobPrefix( command, prefix=name_starts_with, results_per_page=results_per_page, container=self.container_name, - delimiter=delimiter) + delimiter=delimiter + ) @distributed_trace def find_blobs_by_tags( self, filter_expression: str, + *, + results_per_page: Optional[int] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> ItemPaged[FilteredBlob]: """Returns a generator to list the blobs under the specified container whose tags @@ -1122,16 +1153,18 @@ def find_blobs_by_tags( :returns: An iterable (auto-paging) response of FilteredBlob. :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.BlobProperties] """ - results_per_page = kwargs.pop('results_per_page', None) - timeout = kwargs.pop('timeout', None) command = functools.partial( self._client.container.filter_blobs, timeout=timeout, where=filter_expression, - **kwargs) + **kwargs + ) return ItemPaged( - command, results_per_page=results_per_page, container=self.container_name, - page_iterator_class=FilteredBlobPaged) + command, + results_per_page=results_per_page, + container=self.container_name, + page_iterator_class=FilteredBlobPaged + ) @distributed_trace def upload_blob( @@ -1140,7 +1173,26 @@ def upload_blob( blob_type: Union[str, BlobType] = BlobType.BLOCKBLOB, length: Optional[int] = None, metadata: Optional[Dict[str, str]] = None, - **kwargs + *, + overwrite: Optional[bool] = None, + content_settings: Optional["ContentSettings"] = None, + validate_content: Optional[bool] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + timeout: Optional[int] = None, + premium_page_blob_tier: Optional["PremiumPageBlobTier"] = None, + standard_blob_tier: Optional["StandardBlobTier"] = None, + maxsize_condition: Optional[int] = None, + max_concurrency: Optional[int] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + encryption_scope: Optional[str] = None, + encoding: Optional[str] = None, + progress_hook: Optional[Callable[[int, Optional[int]], None]] = None, + **kwargs: Any ) -> BlobClient: """Creates a new blob from a data source with automatic chunking. @@ -1263,16 +1315,33 @@ def upload_blob( ) blob = self.get_blob_client(name) kwargs.setdefault('merge_span', True) - timeout = kwargs.pop('timeout', None) - encoding = kwargs.pop('encoding', 'UTF-8') + kwargs.update({ + 'overwrite': overwrite, + 'content_settings': content_settings, + 'validate_content': validate_content, + 'lease': lease, + 'if_modified_since': if_modified_since, + 'if_unmodified_since': if_unmodified_since, + 'etag': etag, + 'match_condition': match_condition, + 'if_tags_match_condition': if_tags_match_condition, + 'premium_page_blob_tier': premium_page_blob_tier, + 'standard_blob_tier': standard_blob_tier, + 'maxsize_condition': maxsize_condition, + 'max_concurrency': max_concurrency, + 'cpk': cpk, + 'encryption_scope': encryption_scope, + 'encoding': encoding or 'UTF-8', + 'progress_hook': progress_hook + }) + options = {k: v for k, v in kwargs.items() if v is not None} blob.upload_blob( data, blob_type=blob_type, length=length, metadata=metadata, timeout=timeout, - encoding=encoding, - **kwargs + **options ) return blob @@ -1349,13 +1418,14 @@ def delete_blob( "Please use 'BlobProperties.name' or any other str input type instead.", DeprecationWarning ) - blob_client = self.get_blob_client(blob) # type: ignore + blob_client = self.get_blob_client(blob) # type: ignore kwargs.setdefault('merge_span', True) timeout = kwargs.pop('timeout', None) - blob_client.delete_blob( # type: ignore + blob_client.delete_blob( # type: ignore delete_snapshots=delete_snapshots, timeout=timeout, - **kwargs) + **kwargs + ) @overload def download_blob( diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py index 62c04272e97c..3c1c2ff2ae3f 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py @@ -9,7 +9,8 @@ import warnings from datetime import datetime from typing import ( - Any, AnyStr, AsyncIterable, AsyncIterator, cast, Dict, List, IO, Iterable, Optional, overload, Union, + Any, AnyStr, AsyncIterable, AsyncIterator, Awaitable, Callable, + cast, Dict, List, IO, Iterable, Optional, overload, Union, TYPE_CHECKING ) from urllib.parse import unquote, urlparse @@ -60,6 +61,8 @@ from .._models import ( AccessPolicy, ContainerEncryptionScope, + ContentSettings, + CustomerProvidedEncryptionKey, PremiumPageBlobTier, PublicAccess, StandardBlobTier, @@ -803,9 +806,13 @@ def _get_blob_service_client(self) -> "BlobServiceClient": encryption_version=self.encryption_version, key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function, _pipeline=_pipeline) - @distributed_trace_async - async def get_container_access_policy(self, **kwargs: Any) -> Dict[str, Any]: + async def get_container_access_policy( + self, *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Any]: """Gets the permissions for the specified container. The permissions indicate whether container data may be accessed publicly. @@ -831,15 +838,14 @@ async def get_container_access_policy(self, **kwargs: Any) -> Dict[str, Any]: :dedent: 16 :caption: Getting the access policy on the container. """ - lease = kwargs.pop('lease', None) access_conditions = get_access_conditions(lease) - timeout = kwargs.pop('timeout', None) try: response, identifiers = await self._client.container.get_access_policy( timeout=timeout, lease_access_conditions=access_conditions, cls=return_headers_and_deserialized, - **kwargs) + **kwargs + ) except HttpResponseError as error: process_storage_error(error) return { @@ -851,6 +857,11 @@ async def get_container_access_policy(self, **kwargs: Any) -> Dict[str, Any]: async def set_container_access_policy( self, signed_identifiers: Dict[str, "AccessPolicy"], public_access: Optional[Union[str, "PublicAccess"]] = None, + *, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, datetime]]: """Sets the permissions for the specified container or stored access @@ -898,8 +909,6 @@ async def set_container_access_policy( :dedent: 16 :caption: Setting access policy on the container. """ - timeout = kwargs.pop('timeout', None) - lease = kwargs.pop('lease', None) if len(signed_identifiers) > 5: raise ValueError( 'Too many access policies provided. The server does not support setting ' @@ -909,10 +918,12 @@ async def set_container_access_policy( if value: value.start = serialize_iso(value.start) value.expiry = serialize_iso(value.expiry) - identifiers.append(SignedIdentifier(id=key, access_policy=value)) # type: ignore - signed_identifiers = identifiers # type: ignore - - mod_conditions = get_modify_conditions(kwargs) + identifiers.append(SignedIdentifier(id=key, access_policy=value)) # type: ignore + signed_identifiers = identifiers # type: ignore + mod_conditions = get_modify_conditions({ + 'if_modified_since': if_modified_since, + 'if_unmodified_since': if_unmodified_since + }) access_conditions = get_access_conditions(lease) try: return cast(Dict[str, Union[str, datetime]], await self._client.container.set_access_policy( @@ -922,7 +933,8 @@ async def set_container_access_policy( lease_access_conditions=access_conditions, modified_access_conditions=mod_conditions, cls=return_response_headers, - **kwargs)) + **kwargs + )) except HttpResponseError as error: process_storage_error(error) @@ -930,6 +942,8 @@ async def set_container_access_policy( def list_blobs( self, name_starts_with: Optional[str] = None, include: Optional[Union[str, List[str]]] = None, + *, + timeout: Optional[int] = None, **kwargs: Any ) -> AsyncItemPaged[BlobProperties]: """Returns a generator to list the blobs under the specified container. @@ -943,7 +957,7 @@ def list_blobs( Specifies one or more additional datasets to include in the response. Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted', 'deletedwithversions', 'tags', 'versions', 'immutabilitypolicy', 'legalhold'. - :type include: list[str] or str + :type include: List[str] or str :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. @@ -970,12 +984,12 @@ def list_blobs( include = [include] results_per_page = kwargs.pop('results_per_page', None) - timeout = kwargs.pop('timeout', None) command = functools.partial( self._client.container.list_blob_flat_segment, include=include, timeout=timeout, - **kwargs) + **kwargs + ) return AsyncItemPaged( command, prefix=name_starts_with, @@ -985,7 +999,12 @@ def list_blobs( ) @distributed_trace - def list_blob_names(self, **kwargs: Any) -> AsyncItemPaged[str]: + def list_blob_names( + self, *, + name_starts_with: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> AsyncItemPaged[str]: """Returns a generator to list the names of blobs under the specified container. The generator will lazily follow the continuation tokens returned by the service. @@ -1010,9 +1029,7 @@ def list_blob_names(self, **kwargs: Any) -> AsyncItemPaged[str]: raise ValueError("Passing 'prefix' has no effect on filtering, " + "please use the 'name_starts_with' parameter instead.") - name_starts_with = kwargs.pop('name_starts_with', None) results_per_page = kwargs.pop('results_per_page', None) - timeout = kwargs.pop('timeout', None) # For listing only names we need to create a one-off generated client and # override its deserializer to prevent deserialization of the full response. @@ -1022,13 +1039,15 @@ def list_blob_names(self, **kwargs: Any) -> AsyncItemPaged[str]: command = functools.partial( client.container.list_blob_flat_segment, timeout=timeout, - **kwargs) + **kwargs + ) return AsyncItemPaged( command, prefix=name_starts_with, results_per_page=results_per_page, container=self.container_name, - page_iterator_class=BlobNamesPaged) + page_iterator_class=BlobNamesPaged + ) @distributed_trace def walk_blobs( @@ -1049,7 +1068,7 @@ def walk_blobs( Specifies one or more additional datasets to include in the response. Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted', 'deletedwithversions', 'tags', 'versions', 'immutabilitypolicy', 'legalhold'. - :type include: list[str] or str + :type include: List[str] or str :param str delimiter: When the request includes this parameter, the operation returns a BlobPrefix element in the response body that acts as a placeholder for all blobs whose @@ -1078,17 +1097,22 @@ def walk_blobs( delimiter=delimiter, include=include, timeout=timeout, - **kwargs) + **kwargs + ) return BlobPrefix( command, prefix=name_starts_with, results_per_page=results_per_page, container=self.container_name, - delimiter=delimiter) + delimiter=delimiter + ) @distributed_trace def find_blobs_by_tags( self, filter_expression: str, + *, + results_per_page: Optional[int] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> AsyncItemPaged[FilteredBlob]: """Returns a generator to list the blobs under the specified container whose tags @@ -1110,17 +1134,18 @@ def find_blobs_by_tags( :returns: An iterable (auto-paging) response of FilteredBlob. :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.BlobProperties] """ - results_per_page = kwargs.pop('results_per_page', None) - timeout = kwargs.pop('timeout', None) command = functools.partial( self._client.container.filter_blobs, timeout=timeout, where=filter_expression, - **kwargs) + **kwargs + ) return AsyncItemPaged( - command, results_per_page=results_per_page, + command, + results_per_page=results_per_page, container=self.container_name, - page_iterator_class=FilteredBlobPaged) + page_iterator_class=FilteredBlobPaged + ) @distributed_trace_async async def upload_blob( @@ -1129,7 +1154,26 @@ async def upload_blob( blob_type: Union[str, BlobType] = BlobType.BLOCKBLOB, length: Optional[int] = None, metadata: Optional[Dict[str, str]] = None, - **kwargs + *, + overwrite: Optional[bool] = None, + content_settings: Optional["ContentSettings"] = None, + validate_content: Optional[bool] = None, + lease: Optional[Union["BlobLeaseClient", str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + timeout: Optional[int] = None, + premium_page_blob_tier: Optional["PremiumPageBlobTier"] = None, + standard_blob_tier: Optional["StandardBlobTier"] = None, + maxsize_condition: Optional[int] = None, + max_concurrency: Optional[int] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + encryption_scope: Optional[str] = None, + encoding: Optional[str] = None, + progress_hook: Optional[Callable[[int, Optional[int]], Awaitable[None]]] = None, + **kwargs: Any ) -> BlobClient: """Creates a new blob from a data source with automatic chunking. @@ -1254,16 +1298,33 @@ async def upload_blob( ) blob = self.get_blob_client(name) kwargs.setdefault('merge_span', True) - timeout = kwargs.pop('timeout', None) - encoding = kwargs.pop('encoding', 'UTF-8') + kwargs.update({ + 'overwrite': overwrite, + 'content_settings': content_settings, + 'validate_content': validate_content, + 'lease': lease, + 'if_modified_since': if_modified_since, + 'if_unmodified_since': if_unmodified_since, + 'etag': etag, + 'match_condition': match_condition, + 'if_tags_match_condition': if_tags_match_condition, + 'premium_page_blob_tier': premium_page_blob_tier, + 'standard_blob_tier': standard_blob_tier, + 'maxsize_condition': maxsize_condition, + 'max_concurrency': max_concurrency, + 'cpk': cpk, + 'encryption_scope': encryption_scope, + 'encoding': encoding or 'UTF-8', + 'progress_hook': progress_hook + }) + options = {k: v for k, v in kwargs.items() if v is not None} await blob.upload_blob( data, blob_type=blob_type, length=length, metadata=metadata, timeout=timeout, - encoding=encoding, - **kwargs + **options ) return blob @@ -1340,13 +1401,14 @@ async def delete_blob( "Please use 'BlobProperties.name' or any other str input type instead.", DeprecationWarning ) - blob = self.get_blob_client(blob) # type: ignore + blob = self.get_blob_client(blob) # type: ignore kwargs.setdefault('merge_span', True) timeout = kwargs.pop('timeout', None) - await blob.delete_blob( # type: ignore + await blob.delete_blob( # type: ignore delete_snapshots=delete_snapshots, timeout=timeout, - **kwargs) + **kwargs + ) @overload async def download_blob( From 401132248002d6fda9673d981debb9eb52a85661 Mon Sep 17 00:00:00 2001 From: Peter Wu Date: Wed, 23 Apr 2025 17:38:34 -0400 Subject: [PATCH 5/6] Container Client Named Keywords --- .../azure/storage/blob/_container_client.py | 128 ++++++++++++++++-- .../storage/blob/_container_client_helpers.py | 13 +- .../blob/aio/_container_client_async.py | 126 +++++++++++++++-- 3 files changed, 231 insertions(+), 36 deletions(-) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py index cb9d1c864fe0..a1648fc368ac 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py @@ -62,6 +62,7 @@ from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential, TokenCredential from azure.core.pipeline.transport import HttpResponse # pylint: disable=C4756 from azure.storage.blob import BlobServiceClient + from ._generated.models import RehydratePriority from ._models import ( AccessPolicy, ContainerEncryptionScope, @@ -1167,7 +1168,7 @@ def find_blobs_by_tags( ) @distributed_trace - def upload_blob( + def upload_blob( # pylint: disable=too-many-locals self, name: str, data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], blob_type: Union[str, BlobType] = BlobType.BLOCKBLOB, @@ -1349,6 +1350,15 @@ def upload_blob( def delete_blob( self, blob: str, delete_snapshots: Optional[str] = None, + *, + version_id: Optional[str] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> None: """Marks the specified blob or snapshot for deletion. @@ -1420,11 +1430,20 @@ def delete_blob( ) blob_client = self.get_blob_client(blob) # type: ignore kwargs.setdefault('merge_span', True) - timeout = kwargs.pop('timeout', None) + kwargs.update({ + 'version_id': version_id, + 'lease': lease, + 'if_modified_since': if_modified_since, + 'if_unmodified_since': if_unmodified_since, + 'etag': etag, + 'match_condition': match_condition, + 'if_tags_match_condition': if_tags_match_condition, + }) + options = {k: v for k, v in kwargs.items() if v is not None} blob_client.delete_blob( # type: ignore delete_snapshots=delete_snapshots, timeout=timeout, - **kwargs + **options ) @overload @@ -1433,8 +1452,19 @@ def download_blob( offset: Optional[int] = None, length: Optional[int] = None, *, + version_id: Optional[str] = None, + validate_content: Optional[bool] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + max_concurrency: Optional[int] = None, encoding: str, - **kwargs: Any + progress_hook: Optional[Callable[[int, int], None]] = None, + timeout: Optional[int] = None, ) -> StorageStreamDownloader[str]: ... @@ -1444,7 +1474,19 @@ def download_blob( offset: Optional[int] = None, length: Optional[int] = None, *, + version_id: Optional[str] = None, + validate_content: Optional[bool] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + max_concurrency: Optional[int] = None, encoding: None = None, + progress_hook: Optional[Callable[[int, int], None]] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> StorageStreamDownloader[bytes]: ... @@ -1455,7 +1497,19 @@ def download_blob( offset: Optional[int] = None, length: Optional[int] = None, *, - encoding: Union[str, None] = None, + version_id: Optional[str] = None, + validate_content: Optional[bool] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + max_concurrency: Optional[int] = None, + encoding: Optional[str] = None, + progress_hook: Optional[Callable[[int, int], None]] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Union[StorageStreamDownloader[str], StorageStreamDownloader[bytes]]: """Downloads a blob to the StorageStreamDownloader. The readall() method must @@ -1546,17 +1600,39 @@ def download_blob( "Please use 'BlobProperties.name' or any other str input type instead.", DeprecationWarning ) - blob_client = self.get_blob_client(blob) # type: ignore + blob_client = self.get_blob_client(blob) # type: ignore kwargs.setdefault('merge_span', True) + kwargs.update({ + 'version_id': version_id, + 'validate_content': validate_content, + 'lease': lease, + 'if_modified_since': if_modified_since, + 'if_unmodified_since': if_unmodified_since, + 'etag': etag, + 'match_condition': match_condition, + 'if_tags_match_condition': if_tags_match_condition, + 'cpk': cpk, + 'max_concurrency': max_concurrency, + 'progress_hook': progress_hook, + 'timeout': timeout, + }) + options = {k: v for k, v in kwargs.items() if v is not None} return blob_client.download_blob( offset=offset, length=length, encoding=encoding, - **kwargs) + **options + ) @distributed_trace def delete_blobs( # pylint: disable=delete-operation-wrong-return-type self, *blobs: Union[str, Dict[str, Any], BlobProperties], + delete_snapshots: Optional[str] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + if_tags_match_condition: Optional[str] = None, + raise_on_any_failure: bool = True, + timeout: Optional[int] = None, **kwargs: Any ) -> Iterator["HttpResponse"]: """Marks the specified blobs or snapshots for deletion. @@ -1654,6 +1730,12 @@ def delete_blobs( # pylint: disable=delete-operation-wrong-return-type self._query_str, self.container_name, self._client, + delete_snapshots=delete_snapshots, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags_match_condition=if_tags_match_condition, + raise_on_any_failure=raise_on_any_failure, + timeout=timeout, *blobs, **kwargs ) @@ -1664,6 +1746,10 @@ def delete_blobs( # pylint: disable=delete-operation-wrong-return-type def set_standard_blob_tier_blobs( self, standard_blob_tier: Optional[Union[str, "StandardBlobTier"]], *blobs: Union[str, Dict[str, Any], BlobProperties], + rehydrate_priority: Optional["RehydratePriority"] = None, + if_tags_match_condition: Optional[str] = None, + raise_on_any_failure: bool = True, + timeout: Optional[int] = None, **kwargs: Any ) -> Iterator["HttpResponse"]: """This operation sets the tier on block blobs. @@ -1719,15 +1805,16 @@ def set_standard_blob_tier_blobs( .. versionadded:: 12.4.0 + :keyword bool raise_on_any_failure: + This is a boolean param which defaults to True. When this is set, an exception + is raised even if there is a single operation failure. For optimal performance, + this should be set to False. :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. This value is not tracked or validated on the client. To configure client-side network timesouts see `here `__. - :keyword bool raise_on_any_failure: - This is a boolean param which defaults to True. When this is set, an exception - is raised even if there is a single operation failure. :return: An iterator of responses, one for each blob in order :rtype: Iterator[~azure.core.pipeline.transport.HttpResponse] """ @@ -1738,8 +1825,13 @@ def set_standard_blob_tier_blobs( self.container_name, standard_blob_tier, self._client, + rehydrate_priority=rehydrate_priority, + if_tags_match_condition=if_tags_match_condition, + timeout=timeout, + raise_on_any_failure=raise_on_any_failure, *blobs, - **kwargs) + **kwargs + ) return self._batch_send(*reqs, **options) @@ -1747,6 +1839,8 @@ def set_standard_blob_tier_blobs( def set_premium_page_blob_tier_blobs( self, premium_page_blob_tier: Optional[Union[str, "PremiumPageBlobTier"]], *blobs: Union[str, Dict[str, Any], BlobProperties], + raise_on_any_failure: bool = True, + timeout: Optional[int] = None, **kwargs: Any ) -> Iterator["HttpResponse"]: """Sets the page blob tiers on all blobs. This API is only supported for page blobs on premium accounts. @@ -1780,15 +1874,16 @@ def set_premium_page_blob_tier_blobs( key: 'timeout', value type: int :type blobs: str or Dict[str, Any] or ~azure.storage.blob.BlobProperties + :keyword bool raise_on_any_failure: + This is a boolean param which defaults to True. When this is set, an exception + is raised even if there is a single operation failure. For optimal performance, + this should be set to False. :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. This value is not tracked or validated on the client. To configure client-side network timesouts see `here `__. - :keyword bool raise_on_any_failure: - This is a boolean param which defaults to True. When this is set, an exception - is raised even if there is a single operation failure. :return: An iterator of responses, one for each blob in order :rtype: Iterator[~azure.core.pipeline.transport.HttpResponse] """ @@ -1799,8 +1894,11 @@ def set_premium_page_blob_tier_blobs( self.container_name, premium_page_blob_tier, self._client, + raise_on_any_failure=raise_on_any_failure, + timeout=timeout, *blobs, - **kwargs) + **kwargs + ) return self._batch_send(*reqs, **options) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client_helpers.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client_helpers.py index dfcb1c272617..76a1fcbb5307 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client_helpers.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client_helpers.py @@ -127,12 +127,13 @@ def _generate_delete_blobs_options( if_unmodified_since = kwargs.pop('if_unmodified_since', None) if_tags_match_condition = kwargs.pop('if_tags_match_condition', None) url_prepend = kwargs.pop('url_prepend', None) - kwargs.update({'raise_on_any_failure': raise_on_any_failure, - 'sas': query_str.replace('?', '&'), - 'timeout': '&timeout=' + str(timeout) if timeout else "", - 'path': container_name, - 'restype': 'restype=container&' - }) + kwargs.update({ + 'raise_on_any_failure': raise_on_any_failure, + 'sas': query_str.replace('?', '&'), + 'timeout': '&timeout=' + str(timeout) if timeout else "", + 'path': container_name, + 'restype': 'restype=container&' + }) reqs = [] for blob in blobs: diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py index 3c1c2ff2ae3f..a0551175ebe8 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py @@ -58,6 +58,7 @@ from azure.core.credentials_async import AsyncTokenCredential from azure.core.pipeline.transport import AsyncHttpResponse # pylint: disable=C4756 from ._blob_service_client_async import BlobServiceClient + from .._generated.models import RehydratePriority from .._models import ( AccessPolicy, ContainerEncryptionScope, @@ -1148,7 +1149,7 @@ def find_blobs_by_tags( ) @distributed_trace_async - async def upload_blob( + async def upload_blob( # pylint: disable=too-many-locals self, name: str, data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[AnyStr]], blob_type: Union[str, BlobType] = BlobType.BLOCKBLOB, @@ -1332,6 +1333,15 @@ async def upload_blob( async def delete_blob( self, blob: str, delete_snapshots: Optional[str] = None, + *, + version_id: Optional[str] = None, + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> None: """Marks the specified blob or snapshot for deletion. @@ -1403,11 +1413,20 @@ async def delete_blob( ) blob = self.get_blob_client(blob) # type: ignore kwargs.setdefault('merge_span', True) - timeout = kwargs.pop('timeout', None) + kwargs.update({ + 'version_id': version_id, + 'lease': lease, + 'if_modified_since': if_modified_since, + 'if_unmodified_since': if_unmodified_since, + 'etag': etag, + 'match_condition': match_condition, + 'if_tags_match_condition': if_tags_match_condition, + }) + options = {k: v for k, v in kwargs.items() if v is not None} await blob.delete_blob( # type: ignore delete_snapshots=delete_snapshots, timeout=timeout, - **kwargs + **options ) @overload @@ -1416,7 +1435,19 @@ async def download_blob( offset: Optional[int] = None, length: Optional[int] = None, *, + version_id: Optional[str] = None, + validate_content: Optional[bool] = None, + lease: Optional[Union["BlobLeaseClient", str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + max_concurrency: Optional[int] = None, encoding: str, + progress_hook: Optional[Callable[[int, int], Awaitable[None]]] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> StorageStreamDownloader[str]: ... @@ -1427,7 +1458,19 @@ async def download_blob( offset: Optional[int] = None, length: Optional[int] = None, *, + version_id: Optional[str] = None, + validate_content: Optional[bool] = None, + lease: Optional[Union["BlobLeaseClient", str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + max_concurrency: Optional[int] = None, encoding: None = None, + progress_hook: Optional[Callable[[int, int], Awaitable[None]]] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> StorageStreamDownloader[bytes]: ... @@ -1438,7 +1481,19 @@ async def download_blob( offset: Optional[int] = None, length: Optional[int] = None, *, - encoding: Union[str, None] = None, + version_id: Optional[str] = None, + validate_content: Optional[bool] = None, + lease: Optional[Union["BlobLeaseClient", str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + max_concurrency: Optional[int] = None, + encoding: Optional[str] = None, + progress_hook: Optional[Callable[[int, int], Awaitable[None]]] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Union[StorageStreamDownloader[str], StorageStreamDownloader[bytes]]: """Downloads a blob to the StorageStreamDownloader. The readall() method must @@ -1529,17 +1584,39 @@ async def download_blob( "Please use 'BlobProperties.name' or any other str input type instead.", DeprecationWarning ) - blob_client = self.get_blob_client(blob) # type: ignore + blob_client = self.get_blob_client(blob) # type: ignore kwargs.setdefault('merge_span', True) + kwargs.update({ + 'version_id': version_id, + 'validate_content': validate_content, + 'lease': lease, + 'if_modified_since': if_modified_since, + 'if_unmodified_since': if_unmodified_since, + 'etag': etag, + 'match_condition': match_condition, + 'if_tags_match_condition': if_tags_match_condition, + 'cpk': cpk, + 'max_concurrency': max_concurrency, + 'progress_hook': progress_hook, + 'timeout': timeout, + }) + options = {k: v for k, v in kwargs.items() if v is not None} return await blob_client.download_blob( offset=offset, length=length, encoding=encoding, - **kwargs) + **options + ) @distributed_trace_async async def delete_blobs( self, *blobs: Union[str, Dict[str, Any], BlobProperties], + delete_snapshots: Optional[str] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + if_tags_match_condition: Optional[str] = None, + raise_on_any_failure: bool = True, + timeout: Optional[int] = None, **kwargs: Any ) -> AsyncIterator["AsyncHttpResponse"]: """Marks the specified blobs or snapshots for deletion. @@ -1638,6 +1715,12 @@ async def delete_blobs( self._query_str, self.container_name, self._client, + delete_snapshots=delete_snapshots, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags_match_condition=if_tags_match_condition, + raise_on_any_failure=raise_on_any_failure, + timeout=timeout, *blobs, **kwargs ) @@ -1648,6 +1731,10 @@ async def delete_blobs( async def set_standard_blob_tier_blobs( self, standard_blob_tier: Union[str, 'StandardBlobTier'], *blobs: Union[str, Dict[str, Any], BlobProperties], + rehydrate_priority: Optional["RehydratePriority"] = None, + if_tags_match_condition: Optional[str] = None, + raise_on_any_failure: bool = True, + timeout: Optional[int] = None, **kwargs: Any ) -> AsyncIterator["AsyncHttpResponse"]: """This operation sets the tier on block blobs. @@ -1699,16 +1786,16 @@ async def set_standard_blob_tier_blobs( .. versionadded:: 12.4.0 + :keyword bool raise_on_any_failure: + This is a boolean param which defaults to True. When this is set, an exception + is raised even if there is a single operation failure. For optimal performance, + this should be set to False. :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. This value is not tracked or validated on the client. To configure client-side network timesouts see `here `__. - :keyword bool raise_on_any_failure: - This is a boolean param which defaults to True. When this is set, an exception - is raised even if there is a single operation failure. For optimal performance, - this should be set to False. :return: An async iterator of responses, one for each blob in order :rtype: asynciterator[~azure.core.pipeline.transport.AsyncHttpResponse] """ @@ -1719,8 +1806,13 @@ async def set_standard_blob_tier_blobs( self.container_name, standard_blob_tier, self._client, + rehydrate_priority=rehydrate_priority, + if_tags_match_condition=if_tags_match_condition, + timeout=timeout, + raise_on_any_failure=raise_on_any_failure, *blobs, - **kwargs) + **kwargs + ) return cast(AsyncIterator["AsyncHttpResponse"], await self._batch_send(*reqs, **options)) @@ -1728,6 +1820,8 @@ async def set_standard_blob_tier_blobs( async def set_premium_page_blob_tier_blobs( self, premium_page_blob_tier: Union[str, 'PremiumPageBlobTier'], *blobs: Union[str, Dict[str, Any], BlobProperties], + raise_on_any_failure: bool = True, + timeout: Optional[int] = None, **kwargs: Any ) -> AsyncIterator["AsyncHttpResponse"]: """Sets the page blob tiers on the blobs. This API is only supported for page blobs on premium accounts. @@ -1760,16 +1854,16 @@ async def set_premium_page_blob_tier_blobs( key: 'timeout', value type: int :type blobs: str or Dict[str, any] or ~azure.storage.blob.BlobProperties + :keyword bool raise_on_any_failure: + This is a boolean param which defaults to True. When this is set, an exception + is raised even if there is a single operation failure. For optimal performance, + this should be set to False. :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. This value is not tracked or validated on the client. To configure client-side network timesouts see `here `__. - :keyword bool raise_on_any_failure: - This is a boolean param which defaults to True. When this is set, an exception - is raised even if there is a single operation failure. For optimal performance, - this should be set to False. :return: An async iterator of responses, one for each blob in order :rtype: asynciterator[~azure.core.pipeline.transport.AsyncHttpResponse] """ @@ -1780,6 +1874,8 @@ async def set_premium_page_blob_tier_blobs( self.container_name, premium_page_blob_tier, self._client, + raise_on_any_failure=raise_on_any_failure, + timeout=timeout, *blobs, **kwargs) From 73ffdb4d0bf097d39edcf8332e8e2794df8b6bb2 Mon Sep 17 00:00:00 2001 From: Peter Wu Date: Wed, 23 Apr 2025 18:57:45 -0400 Subject: [PATCH 6/6] Duplicated super call --- .../azure-storage-blob/azure/storage/blob/_container_client.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py index a1648fc368ac..a50435cd67b5 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py @@ -180,8 +180,6 @@ def __init__( **kwargs ) self._api_version = get_api_version(api_version) - super(ContainerClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) - self._api_version = get_api_version(api_version) self._client = self._build_generated_client() self._configure_encryption(kwargs)