Skip to content

Commit 18890c0

Browse files
Merge pull request #563 from Backblaze/stream-upload-retries
Fix closed stream failure when retrying small file uploads
2 parents d476d7e + 6e219d7 commit 18890c0

File tree

3 files changed

+22
-17
lines changed

3 files changed

+22
-17
lines changed

b2sdk/_internal/transfer/outbound/upload_manager.py

Lines changed: 16 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -204,18 +204,17 @@ def _upload_small_file(
204204
content_length = upload_source.get_content_length()
205205
exception_info_list = []
206206
progress_listener.set_total_bytes(content_length)
207-
for _ in range(self.MAX_UPLOAD_ATTEMPTS):
208-
try:
209-
with upload_source.open() as file:
210-
input_stream = ReadingStreamWithProgress(
211-
file, progress_listener, length=content_length
212-
)
213-
if upload_source.is_sha1_known():
214-
content_sha1 = upload_source.get_content_sha1()
215-
else:
216-
input_stream = StreamWithHash(input_stream, stream_length=content_length)
217-
content_sha1 = HEX_DIGITS_AT_END
218-
# it is important that `len()` works on `input_stream`
207+
with upload_source.open() as file:
208+
input_stream = ReadingStreamWithProgress(file, progress_listener, length=content_length)
209+
if upload_source.is_sha1_known():
210+
content_sha1 = upload_source.get_content_sha1()
211+
else:
212+
input_stream = StreamWithHash(input_stream, stream_length=content_length)
213+
content_sha1 = HEX_DIGITS_AT_END
214+
# it is important that `len()` works on `input_stream`
215+
216+
for _ in range(self.MAX_UPLOAD_ATTEMPTS):
217+
try:
219218
response = self.services.session.upload_file(
220219
bucket_id,
221220
file_name,
@@ -236,10 +235,10 @@ def _upload_small_file(
236235
), '{} != {}'.format(content_sha1, response['contentSha1'])
237236
return self.services.api.file_version_factory.from_api_response(response)
238237

239-
except B2Error as e:
240-
if not e.should_retry_upload():
241-
raise
242-
exception_info_list.append(e)
243-
self.account_info.clear_bucket_upload_data(bucket_id)
238+
except B2Error as e:
239+
if not e.should_retry_upload():
240+
raise
241+
exception_info_list.append(e)
242+
self.account_info.clear_bucket_upload_data(bucket_id)
244243

245244
raise MaxRetriesExceeded(self.MAX_UPLOAD_ATTEMPTS, exception_info_list)
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Fixed a retry bug in `upload_unbound_stream()` small-file uploads where a retryable upload error could cause a one-shot buffered stream to be reopened after it was closed, raising `ValueError: I/O operation on closed file`.

test/unit/bucket/test_bucket.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1918,6 +1918,11 @@ def test_upload_one_retryable_error(self):
19181918
data = b'hello world'
19191919
self.bucket.upload_bytes(data, 'file1')
19201920

1921+
def test_upload_unbound_stream_one_retryable_error(self):
1922+
self.simulator.set_upload_errors([CanRetry(True)])
1923+
data = b'hello world'
1924+
self.bucket.upload_unbound_stream(io.BytesIO(data), 'file1')
1925+
19211926
def test_upload_timeout(self):
19221927
self.simulator.set_upload_errors([B2RequestTimeoutDuringUpload()])
19231928
data = b'hello world'

0 commit comments

Comments
 (0)