Skip to content

Commit b98ad40

Browse files
authored
[0.8.0] Upgrade (#49)
* upgrade all * upgrade version * history notes
1 parent 4750947 commit b98ad40

File tree

202 files changed

+96043
-88
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

202 files changed

+96043
-88
lines changed

README.rst

+10
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,16 @@ Handles multi-API versions of Azure Storage Data Plane originally from https://g
1717

1818
Change Log
1919
----------
20+
0.8.0
21+
+++++
22+
* blob:
23+
- Support v2021-04-10(12.10.0)
24+
* fileshare:
25+
- Support v2021-04-10(12.7.0)
26+
* filedatalake:
27+
- Support v2020-10-02(12.6.0)
28+
* queue: Minor fix for 2018-03-28(12.2.0)
29+
2030
0.7.0
2131
+++++
2232
* blob:
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,239 @@
1+
# -------------------------------------------------------------------------
2+
# Copyright (c) Microsoft Corporation. All rights reserved.
3+
# Licensed under the MIT License. See License.txt in the project root for
4+
# license information.
5+
# --------------------------------------------------------------------------
6+
import os
7+
8+
from typing import Union, Iterable, AnyStr, IO, Any, Dict # pylint: disable=unused-import
9+
from ._version import VERSION
10+
from ._blob_client import BlobClient
11+
from ._container_client import ContainerClient
12+
from ._blob_service_client import BlobServiceClient
13+
from ._lease import BlobLeaseClient
14+
from ._download import StorageStreamDownloader
15+
from ._quick_query_helper import BlobQueryReader
16+
from ._shared_access_signature import generate_account_sas, generate_container_sas, generate_blob_sas
17+
from ._shared.policies import ExponentialRetry, LinearRetry
18+
from ._shared.response_handlers import PartialBatchErrorException
19+
from ._shared.models import(
20+
LocationMode,
21+
ResourceTypes,
22+
AccountSasPermissions,
23+
StorageErrorCode,
24+
UserDelegationKey
25+
)
26+
from ._generated.models import (
27+
RehydratePriority,
28+
)
29+
from ._models import (
30+
BlobType,
31+
BlockState,
32+
StandardBlobTier,
33+
PremiumPageBlobTier,
34+
BlobImmutabilityPolicyMode,
35+
SequenceNumberAction,
36+
PublicAccess,
37+
BlobAnalyticsLogging,
38+
Metrics,
39+
RetentionPolicy,
40+
StaticWebsite,
41+
CorsRule,
42+
ContainerProperties,
43+
BlobProperties,
44+
FilteredBlob,
45+
LeaseProperties,
46+
ContentSettings,
47+
CopyProperties,
48+
BlobBlock,
49+
PageRange,
50+
AccessPolicy,
51+
ContainerSasPermissions,
52+
BlobSasPermissions,
53+
CustomerProvidedEncryptionKey,
54+
ContainerEncryptionScope,
55+
BlobQueryError,
56+
DelimitedJsonDialect,
57+
DelimitedTextDialect,
58+
QuickQueryDialect,
59+
ArrowDialect,
60+
ArrowType,
61+
ObjectReplicationPolicy,
62+
ObjectReplicationRule,
63+
ImmutabilityPolicy
64+
)
65+
from ._list_blobs_helper import BlobPrefix
66+
67+
__version__ = VERSION
68+
69+
70+
def upload_blob_to_url(
71+
blob_url, # type: str
72+
data, # type: Union[Iterable[AnyStr], IO[AnyStr]]
73+
credential=None, # type: Any
74+
**kwargs):
75+
# type: (...) -> Dict[str, Any]
76+
"""Upload data to a given URL
77+
78+
The data will be uploaded as a block blob.
79+
80+
:param str blob_url:
81+
The full URI to the blob. This can also include a SAS token.
82+
:param data:
83+
The data to upload. This can be bytes, text, an iterable or a file-like object.
84+
:type data: bytes or str or Iterable
85+
:param credential:
86+
The credentials with which to authenticate. This is optional if the
87+
blob URL already has a SAS token. The value can be a SAS token string,
88+
an instance of a AzureSasCredential from azure.core.credentials, an account
89+
shared access key, or an instance of a TokenCredentials class from azure.identity.
90+
If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential
91+
- except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError.
92+
:keyword bool overwrite:
93+
Whether the blob to be uploaded should overwrite the current data.
94+
If True, upload_blob_to_url will overwrite any existing data. If set to False, the
95+
operation will fail with a ResourceExistsError.
96+
:keyword int max_concurrency:
97+
The number of parallel connections with which to download.
98+
:keyword int length:
99+
Number of bytes to read from the stream. This is optional, but
100+
should be supplied for optimal performance.
101+
:keyword dict(str,str) metadata:
102+
Name-value pairs associated with the blob as metadata.
103+
:keyword bool validate_content:
104+
If true, calculates an MD5 hash for each chunk of the blob. The storage
105+
service checks the hash of the content that has arrived with the hash
106+
that was sent. This is primarily valuable for detecting bitflips on
107+
the wire if using http instead of https as https (the default) will
108+
already validate. Note that this MD5 hash is not stored with the
109+
blob. Also note that if enabled, the memory-efficient upload algorithm
110+
will not be used, because computing the MD5 hash requires buffering
111+
entire blocks, and doing so defeats the purpose of the memory-efficient algorithm.
112+
:keyword str encoding:
113+
Encoding to use if text is supplied as input. Defaults to UTF-8.
114+
:returns: Blob-updated property dict (Etag and last modified)
115+
:rtype: dict(str, Any)
116+
"""
117+
with BlobClient.from_blob_url(blob_url, credential=credential) as client:
118+
return client.upload_blob(data=data, blob_type=BlobType.BlockBlob, **kwargs)
119+
120+
121+
def _download_to_stream(client, handle, **kwargs):
122+
"""Download data to specified open file-handle."""
123+
stream = client.download_blob(**kwargs)
124+
stream.readinto(handle)
125+
126+
127+
def download_blob_from_url(
128+
blob_url, # type: str
129+
output, # type: str
130+
credential=None, # type: Any
131+
**kwargs):
132+
# type: (...) -> None
133+
"""Download the contents of a blob to a local file or stream.
134+
135+
:param str blob_url:
136+
The full URI to the blob. This can also include a SAS token.
137+
:param output:
138+
Where the data should be downloaded to. This could be either a file path to write to,
139+
or an open IO handle to write to.
140+
:type output: str or writable stream.
141+
:param credential:
142+
The credentials with which to authenticate. This is optional if the
143+
blob URL already has a SAS token or the blob is public. The value can be a SAS token string,
144+
an instance of a AzureSasCredential from azure.core.credentials,
145+
an account shared access key, or an instance of a TokenCredentials class from azure.identity.
146+
If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential
147+
- except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError.
148+
:keyword bool overwrite:
149+
Whether the local file should be overwritten if it already exists. The default value is
150+
`False` - in which case a ValueError will be raised if the file already exists. If set to
151+
`True`, an attempt will be made to write to the existing file. If a stream handle is passed
152+
in, this value is ignored.
153+
:keyword int max_concurrency:
154+
The number of parallel connections with which to download.
155+
:keyword int offset:
156+
Start of byte range to use for downloading a section of the blob.
157+
Must be set if length is provided.
158+
:keyword int length:
159+
Number of bytes to read from the stream. This is optional, but
160+
should be supplied for optimal performance.
161+
:keyword bool validate_content:
162+
If true, calculates an MD5 hash for each chunk of the blob. The storage
163+
service checks the hash of the content that has arrived with the hash
164+
that was sent. This is primarily valuable for detecting bitflips on
165+
the wire if using http instead of https as https (the default) will
166+
already validate. Note that this MD5 hash is not stored with the
167+
blob. Also note that if enabled, the memory-efficient upload algorithm
168+
will not be used, because computing the MD5 hash requires buffering
169+
entire blocks, and doing so defeats the purpose of the memory-efficient algorithm.
170+
:rtype: None
171+
"""
172+
overwrite = kwargs.pop('overwrite', False)
173+
with BlobClient.from_blob_url(blob_url, credential=credential) as client:
174+
if hasattr(output, 'write'):
175+
_download_to_stream(client, output, **kwargs)
176+
else:
177+
if not overwrite and os.path.isfile(output):
178+
raise ValueError("The file '{}' already exists.".format(output))
179+
with open(output, 'wb') as file_handle:
180+
_download_to_stream(client, file_handle, **kwargs)
181+
182+
183+
__all__ = [
184+
'upload_blob_to_url',
185+
'download_blob_from_url',
186+
'BlobServiceClient',
187+
'ContainerClient',
188+
'BlobClient',
189+
'BlobType',
190+
'BlobLeaseClient',
191+
'StorageErrorCode',
192+
'UserDelegationKey',
193+
'ExponentialRetry',
194+
'LinearRetry',
195+
'LocationMode',
196+
'BlockState',
197+
'StandardBlobTier',
198+
'PremiumPageBlobTier',
199+
'SequenceNumberAction',
200+
'BlobImmutabilityPolicyMode',
201+
'ImmutabilityPolicy',
202+
'PublicAccess',
203+
'BlobAnalyticsLogging',
204+
'Metrics',
205+
'RetentionPolicy',
206+
'StaticWebsite',
207+
'CorsRule',
208+
'ContainerProperties',
209+
'BlobProperties',
210+
'BlobPrefix',
211+
'FilteredBlob',
212+
'LeaseProperties',
213+
'ContentSettings',
214+
'CopyProperties',
215+
'BlobBlock',
216+
'PageRange',
217+
'AccessPolicy',
218+
'QuickQueryDialect',
219+
'ContainerSasPermissions',
220+
'BlobSasPermissions',
221+
'ResourceTypes',
222+
'AccountSasPermissions',
223+
'StorageStreamDownloader',
224+
'CustomerProvidedEncryptionKey',
225+
'RehydratePriority',
226+
'generate_account_sas',
227+
'generate_container_sas',
228+
'generate_blob_sas',
229+
'PartialBatchErrorException',
230+
'ContainerEncryptionScope',
231+
'BlobQueryError',
232+
'DelimitedJsonDialect',
233+
'DelimitedTextDialect',
234+
'ArrowDialect',
235+
'ArrowType',
236+
'BlobQueryReader',
237+
'ObjectReplicationPolicy',
238+
'ObjectReplicationRule'
239+
]

0 commit comments

Comments
 (0)