Skip to content

Commit 9d43eb1

Browse files
JuliehzlUbuntu
andauthored
[azure.multiapi.storagev2.blob] Add api version 2019-12-12 (#34)
* add new api version for blob * add new version: Co-authored-by: Ubuntu <zunli@zuhvm.etyrgwjlsqfeplvzbzef2qjagg.cbnx.internal.cloudapp.net>
1 parent e1fed8a commit 9d43eb1

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

72 files changed

+43483
-1
lines changed

README.rst

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,10 @@ Handles multi-API versions of Azure Storage Data Plane originally from https://g
1717

1818
Change Log
1919
----------
20+
0.4.0
21+
+++++
22+
* Add v2019-12-12 for azure.multiapi.storagev2.blob
23+
2024
0.3.7
2125
+++++
2226
* Fix syntax warning in python 3.8
Lines changed: 223 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,223 @@
1+
# -------------------------------------------------------------------------
2+
# Copyright (c) Microsoft Corporation. All rights reserved.
3+
# Licensed under the MIT License. See License.txt in the project root for
4+
# license information.
5+
# --------------------------------------------------------------------------
6+
import os
7+
8+
from typing import Union, Iterable, AnyStr, IO, Any, Dict # pylint: disable=unused-import
9+
from ._version import VERSION
10+
from ._blob_client import BlobClient
11+
from ._container_client import ContainerClient
12+
from ._blob_service_client import BlobServiceClient
13+
from ._lease import BlobLeaseClient
14+
from ._download import StorageStreamDownloader
15+
from ._quick_query_helper import BlobQueryReader
16+
from ._shared_access_signature import generate_account_sas, generate_container_sas, generate_blob_sas
17+
from ._shared.policies import ExponentialRetry, LinearRetry
18+
from ._shared.response_handlers import PartialBatchErrorException
19+
from ._shared.models import(
20+
LocationMode,
21+
ResourceTypes,
22+
AccountSasPermissions,
23+
StorageErrorCode,
24+
UserDelegationKey
25+
)
26+
from ._generated.models import (
27+
RehydratePriority
28+
)
29+
from ._models import (
30+
BlobType,
31+
BlockState,
32+
StandardBlobTier,
33+
PremiumPageBlobTier,
34+
SequenceNumberAction,
35+
PublicAccess,
36+
BlobAnalyticsLogging,
37+
Metrics,
38+
RetentionPolicy,
39+
StaticWebsite,
40+
CorsRule,
41+
ContainerProperties,
42+
BlobProperties,
43+
FilteredBlob,
44+
LeaseProperties,
45+
ContentSettings,
46+
CopyProperties,
47+
BlobBlock,
48+
PageRange,
49+
AccessPolicy,
50+
ContainerSasPermissions,
51+
BlobSasPermissions,
52+
CustomerProvidedEncryptionKey,
53+
ContainerEncryptionScope,
54+
BlobQueryError,
55+
DelimitedJSON,
56+
DelimitedTextDialect,
57+
ObjectReplicationPolicy,
58+
ObjectReplicationRule
59+
)
60+
61+
__version__ = VERSION
62+
63+
64+
def upload_blob_to_url(
65+
blob_url, # type: str
66+
data, # type: Union[Iterable[AnyStr], IO[AnyStr]]
67+
credential=None, # type: Any
68+
**kwargs):
69+
# type: (...) -> Dict[str, Any]
70+
"""Upload data to a given URL
71+
72+
The data will be uploaded as a block blob.
73+
74+
:param str blob_url:
75+
The full URI to the blob. This can also include a SAS token.
76+
:param data:
77+
The data to upload. This can be bytes, text, an iterable or a file-like object.
78+
:type data: bytes or str or Iterable
79+
:param credential:
80+
The credentials with which to authenticate. This is optional if the
81+
blob URL already has a SAS token. The value can be a SAS token string, an account
82+
shared access key, or an instance of a TokenCredentials class from azure.identity.
83+
If the URL already has a SAS token, specifying an explicit credential will take priority.
84+
:keyword bool overwrite:
85+
Whether the blob to be uploaded should overwrite the current data.
86+
If True, upload_blob_to_url will overwrite any existing data. If set to False, the
87+
operation will fail with a ResourceExistsError.
88+
:keyword int max_concurrency:
89+
The number of parallel connections with which to download.
90+
:keyword int length:
91+
Number of bytes to read from the stream. This is optional, but
92+
should be supplied for optimal performance.
93+
:keyword dict(str,str) metadata:
94+
Name-value pairs associated with the blob as metadata.
95+
:keyword bool validate_content:
96+
If true, calculates an MD5 hash for each chunk of the blob. The storage
97+
service checks the hash of the content that has arrived with the hash
98+
that was sent. This is primarily valuable for detecting bitflips on
99+
the wire if using http instead of https as https (the default) will
100+
already validate. Note that this MD5 hash is not stored with the
101+
blob. Also note that if enabled, the memory-efficient upload algorithm
102+
will not be used, because computing the MD5 hash requires buffering
103+
entire blocks, and doing so defeats the purpose of the memory-efficient algorithm.
104+
:keyword str encoding:
105+
Encoding to use if text is supplied as input. Defaults to UTF-8.
106+
:returns: Blob-updated property dict (Etag and last modified)
107+
:rtype: dict(str, Any)
108+
"""
109+
with BlobClient.from_blob_url(blob_url, credential=credential) as client:
110+
return client.upload_blob(data=data, blob_type=BlobType.BlockBlob, **kwargs)
111+
112+
113+
def _download_to_stream(client, handle, **kwargs):
114+
"""Download data to specified open file-handle."""
115+
stream = client.download_blob(**kwargs)
116+
stream.readinto(handle)
117+
118+
119+
def download_blob_from_url(
120+
blob_url, # type: str
121+
output, # type: str
122+
credential=None, # type: Any
123+
**kwargs):
124+
# type: (...) -> None
125+
"""Download the contents of a blob to a local file or stream.
126+
127+
:param str blob_url:
128+
The full URI to the blob. This can also include a SAS token.
129+
:param output:
130+
Where the data should be downloaded to. This could be either a file path to write to,
131+
or an open IO handle to write to.
132+
:type output: str or writable stream.
133+
:param credential:
134+
The credentials with which to authenticate. This is optional if the
135+
blob URL already has a SAS token or the blob is public. The value can be a SAS token string,
136+
an account shared access key, or an instance of a TokenCredentials class from azure.identity.
137+
If the URL already has a SAS token, specifying an explicit credential will take priority.
138+
:keyword bool overwrite:
139+
Whether the local file should be overwritten if it already exists. The default value is
140+
`False` - in which case a ValueError will be raised if the file already exists. If set to
141+
`True`, an attempt will be made to write to the existing file. If a stream handle is passed
142+
in, this value is ignored.
143+
:keyword int max_concurrency:
144+
The number of parallel connections with which to download.
145+
:keyword int offset:
146+
Start of byte range to use for downloading a section of the blob.
147+
Must be set if length is provided.
148+
:keyword int length:
149+
Number of bytes to read from the stream. This is optional, but
150+
should be supplied for optimal performance.
151+
:keyword bool validate_content:
152+
If true, calculates an MD5 hash for each chunk of the blob. The storage
153+
service checks the hash of the content that has arrived with the hash
154+
that was sent. This is primarily valuable for detecting bitflips on
155+
the wire if using http instead of https as https (the default) will
156+
already validate. Note that this MD5 hash is not stored with the
157+
blob. Also note that if enabled, the memory-efficient upload algorithm
158+
will not be used, because computing the MD5 hash requires buffering
159+
entire blocks, and doing so defeats the purpose of the memory-efficient algorithm.
160+
:rtype: None
161+
"""
162+
overwrite = kwargs.pop('overwrite', False)
163+
with BlobClient.from_blob_url(blob_url, credential=credential) as client:
164+
if hasattr(output, 'write'):
165+
_download_to_stream(client, output, **kwargs)
166+
else:
167+
if not overwrite and os.path.isfile(output):
168+
raise ValueError("The file '{}' already exists.".format(output))
169+
with open(output, 'wb') as file_handle:
170+
_download_to_stream(client, file_handle, **kwargs)
171+
172+
173+
__all__ = [
174+
'upload_blob_to_url',
175+
'download_blob_from_url',
176+
'BlobServiceClient',
177+
'ContainerClient',
178+
'BlobClient',
179+
'BlobType',
180+
'BlobLeaseClient',
181+
'StorageErrorCode',
182+
'UserDelegationKey',
183+
'ExponentialRetry',
184+
'LinearRetry',
185+
'LocationMode',
186+
'BlockState',
187+
'StandardBlobTier',
188+
'PremiumPageBlobTier',
189+
'SequenceNumberAction',
190+
'PublicAccess',
191+
'BlobAnalyticsLogging',
192+
'Metrics',
193+
'RetentionPolicy',
194+
'StaticWebsite',
195+
'CorsRule',
196+
'ContainerProperties',
197+
'BlobProperties',
198+
'FilteredBlob',
199+
'LeaseProperties',
200+
'ContentSettings',
201+
'CopyProperties',
202+
'BlobBlock',
203+
'PageRange',
204+
'AccessPolicy',
205+
'ContainerSasPermissions',
206+
'BlobSasPermissions',
207+
'ResourceTypes',
208+
'AccountSasPermissions',
209+
'StorageStreamDownloader',
210+
'CustomerProvidedEncryptionKey',
211+
'RehydratePriority',
212+
'generate_account_sas',
213+
'generate_container_sas',
214+
'generate_blob_sas',
215+
'PartialBatchErrorException',
216+
'ContainerEncryptionScope',
217+
'BlobQueryError',
218+
'DelimitedJSON',
219+
'DelimitedTextDialect',
220+
'BlobQueryReader',
221+
'ObjectReplicationPolicy',
222+
'ObjectReplicationRule'
223+
]

0 commit comments

Comments
 (0)