Skip to content

Commit fee20f3

Browse files
authored
[azure-multiadpi-storagev2] Add support for storage data plane track2 SDK (#21)
* add new script * update script * add storagev2 for track2 * add release note * update version * add more SDK * change folder structure * refine readme * Update README.rst * replace current namespace * fix namespce
1 parent 2351d60 commit fee20f3

File tree

345 files changed

+143633
-2
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

345 files changed

+143633
-2
lines changed

README.rst

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,19 @@ Handles multi-API versions of Azure Storage Data Plane originally from https://g
1717

1818
Change Log
1919
----------
20+
0.3.0
21+
+++++
22+
* Add support for storage data plane track2 SDK with new package azure-multiapi-storagev2:
23+
- blob
24+
- v2019-02-02
25+
- v2019-07-07
26+
- filedatalake
27+
- v2018-11-09
28+
- fileshare
29+
- v2019-02-02
30+
- v2019-07-07
31+
- queue
32+
- v2018-03-28
2033

2134
0.2.4
2235
+++++
Lines changed: 208 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,208 @@
1+
# -------------------------------------------------------------------------
2+
# Copyright (c) Microsoft Corporation. All rights reserved.
3+
# Licensed under the MIT License. See License.txt in the project root for
4+
# license information.
5+
# --------------------------------------------------------------------------
6+
7+
import os
8+
9+
from typing import Union, Iterable, AnyStr, IO, Any, Dict # pylint: disable=unused-import
10+
from ._version import VERSION
11+
from ._blob_client import BlobClient
12+
from ._container_client import ContainerClient
13+
from ._blob_service_client import BlobServiceClient
14+
from ._lease import BlobLeaseClient
15+
from ._download import StorageStreamDownloader
16+
from ._shared_access_signature import generate_account_sas, generate_container_sas, generate_blob_sas
17+
from ._shared.policies import ExponentialRetry, LinearRetry
18+
from ._shared.response_handlers import PartialBatchErrorException
19+
from ._shared.models import(
20+
LocationMode,
21+
ResourceTypes,
22+
AccountSasPermissions,
23+
StorageErrorCode,
24+
UserDelegationKey
25+
)
26+
from ._generated.models import (
27+
RehydratePriority
28+
)
29+
from ._models import (
30+
BlobType,
31+
BlockState,
32+
StandardBlobTier,
33+
PremiumPageBlobTier,
34+
SequenceNumberAction,
35+
PublicAccess,
36+
BlobAnalyticsLogging,
37+
Metrics,
38+
RetentionPolicy,
39+
StaticWebsite,
40+
CorsRule,
41+
ContainerProperties,
42+
BlobProperties,
43+
LeaseProperties,
44+
ContentSettings,
45+
CopyProperties,
46+
BlobBlock,
47+
PageRange,
48+
AccessPolicy,
49+
ContainerSasPermissions,
50+
BlobSasPermissions,
51+
CustomerProvidedEncryptionKey,
52+
)
53+
54+
__version__ = VERSION
55+
56+
57+
def upload_blob_to_url(
58+
blob_url, # type: str
59+
data, # type: Union[Iterable[AnyStr], IO[AnyStr]]
60+
credential=None, # type: Any
61+
**kwargs):
62+
# type: (...) -> Dict[str, Any]
63+
"""Upload data to a given URL
64+
65+
The data will be uploaded as a block blob.
66+
67+
:param str blob_url:
68+
The full URI to the blob. This can also include a SAS token.
69+
:param data:
70+
The data to upload. This can be bytes, text, an iterable or a file-like object.
71+
:type data: bytes or str or Iterable
72+
:param credential:
73+
The credentials with which to authenticate. This is optional if the
74+
blob URL already has a SAS token. The value can be a SAS token string, an account
75+
shared access key, or an instance of a TokenCredentials class from azure.identity.
76+
If the URL already has a SAS token, specifying an explicit credential will take priority.
77+
:keyword bool overwrite:
78+
Whether the blob to be uploaded should overwrite the current data.
79+
If True, upload_blob_to_url will overwrite any existing data. If set to False, the
80+
operation will fail with a ResourceExistsError.
81+
:keyword int max_concurrency:
82+
The number of parallel connections with which to download.
83+
:keyword int length:
84+
Number of bytes to read from the stream. This is optional, but
85+
should be supplied for optimal performance.
86+
:keyword dict(str,str) metadata:
87+
Name-value pairs associated with the blob as metadata.
88+
:keyword bool validate_content:
89+
If true, calculates an MD5 hash for each chunk of the blob. The storage
90+
service checks the hash of the content that has arrived with the hash
91+
that was sent. This is primarily valuable for detecting bitflips on
92+
the wire if using http instead of https as https (the default) will
93+
already validate. Note that this MD5 hash is not stored with the
94+
blob. Also note that if enabled, the memory-efficient upload algorithm
95+
will not be used, because computing the MD5 hash requires buffering
96+
entire blocks, and doing so defeats the purpose of the memory-efficient algorithm.
97+
:keyword str encoding:
98+
Encoding to use if text is supplied as input. Defaults to UTF-8.
99+
:returns: Blob-updated property dict (Etag and last modified)
100+
:rtype: dict(str, Any)
101+
"""
102+
with BlobClient.from_blob_url(blob_url, credential=credential) as client:
103+
return client.upload_blob(data=data, blob_type=BlobType.BlockBlob, **kwargs)
104+
105+
106+
def _download_to_stream(client, handle, **kwargs):
107+
"""Download data to specified open file-handle."""
108+
stream = client.download_blob(**kwargs)
109+
stream.readinto(handle)
110+
111+
112+
def download_blob_from_url(
113+
blob_url, # type: str
114+
output, # type: str
115+
credential=None, # type: Any
116+
**kwargs):
117+
# type: (...) -> None
118+
"""Download the contents of a blob to a local file or stream.
119+
120+
:param str blob_url:
121+
The full URI to the blob. This can also include a SAS token.
122+
:param output:
123+
Where the data should be downloaded to. This could be either a file path to write to,
124+
or an open IO handle to write to.
125+
:type output: str or writable stream.
126+
:param credential:
127+
The credentials with which to authenticate. This is optional if the
128+
blob URL already has a SAS token or the blob is public. The value can be a SAS token string,
129+
an account shared access key, or an instance of a TokenCredentials class from azure.identity.
130+
If the URL already has a SAS token, specifying an explicit credential will take priority.
131+
:keyword bool overwrite:
132+
Whether the local file should be overwritten if it already exists. The default value is
133+
`False` - in which case a ValueError will be raised if the file already exists. If set to
134+
`True`, an attempt will be made to write to the existing file. If a stream handle is passed
135+
in, this value is ignored.
136+
:keyword int max_concurrency:
137+
The number of parallel connections with which to download.
138+
:keyword int offset:
139+
Start of byte range to use for downloading a section of the blob.
140+
Must be set if length is provided.
141+
:keyword int length:
142+
Number of bytes to read from the stream. This is optional, but
143+
should be supplied for optimal performance.
144+
:keyword bool validate_content:
145+
If true, calculates an MD5 hash for each chunk of the blob. The storage
146+
service checks the hash of the content that has arrived with the hash
147+
that was sent. This is primarily valuable for detecting bitflips on
148+
the wire if using http instead of https as https (the default) will
149+
already validate. Note that this MD5 hash is not stored with the
150+
blob. Also note that if enabled, the memory-efficient upload algorithm
151+
will not be used, because computing the MD5 hash requires buffering
152+
entire blocks, and doing so defeats the purpose of the memory-efficient algorithm.
153+
:rtype: None
154+
"""
155+
overwrite = kwargs.pop('overwrite', False)
156+
with BlobClient.from_blob_url(blob_url, credential=credential) as client:
157+
if hasattr(output, 'write'):
158+
_download_to_stream(client, output, **kwargs)
159+
else:
160+
if not overwrite and os.path.isfile(output):
161+
raise ValueError("The file '{}' already exists.".format(output))
162+
with open(output, 'wb') as file_handle:
163+
_download_to_stream(client, file_handle, **kwargs)
164+
165+
166+
__all__ = [
167+
'upload_blob_to_url',
168+
'download_blob_from_url',
169+
'BlobServiceClient',
170+
'ContainerClient',
171+
'BlobClient',
172+
'BlobType',
173+
'BlobLeaseClient',
174+
'StorageErrorCode',
175+
'UserDelegationKey',
176+
'ExponentialRetry',
177+
'LinearRetry',
178+
'LocationMode',
179+
'BlockState',
180+
'StandardBlobTier',
181+
'PremiumPageBlobTier',
182+
'SequenceNumberAction',
183+
'PublicAccess',
184+
'BlobAnalyticsLogging',
185+
'Metrics',
186+
'RetentionPolicy',
187+
'StaticWebsite',
188+
'CorsRule',
189+
'ContainerProperties',
190+
'BlobProperties',
191+
'LeaseProperties',
192+
'ContentSettings',
193+
'CopyProperties',
194+
'BlobBlock',
195+
'PageRange',
196+
'AccessPolicy',
197+
'ContainerSasPermissions',
198+
'BlobSasPermissions',
199+
'ResourceTypes',
200+
'AccountSasPermissions',
201+
'StorageStreamDownloader',
202+
'CustomerProvidedEncryptionKey',
203+
'RehydratePriority',
204+
'generate_account_sas',
205+
'generate_container_sas',
206+
'generate_blob_sas',
207+
'PartialBatchErrorException'
208+
]

0 commit comments

Comments
 (0)