From 8dd35cbb5c7e971a59fbd9095c852e18c2eceb3c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Nowacki?= Date: Tue, 11 May 2021 13:23:21 +0200 Subject: [PATCH 01/33] dataclasses for file lock and retention --- b2sdk/file_lock.py | 64 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) create mode 100644 b2sdk/file_lock.py diff --git a/b2sdk/file_lock.py b/b2sdk/file_lock.py new file mode 100644 index 000000000..a91cdcb82 --- /dev/null +++ b/b2sdk/file_lock.py @@ -0,0 +1,64 @@ +import enum + + +@enum.unique +class RetentionMode(enum.Enum): + COMPLIANCE = "compliance" # TODO: docs + GOVERNANCE = "governance" # TODO: docs + + +class RetentionPeriod: + """ + "period": { + "duration": 2, + "unit": "years" + } + """ + def __init__(self, *, years=None, days=None): + assert (years is None) != (days is None) + if years is not None: + self.duration = years + self.unit = 'years' + else: + self.duration = days + self.unit = 'days' + + +class RetentionSetting: + """ + "defaultRetention": { + "mode": "compliance", + "period": { + "duration": 7, + "unit": "days" + } + } + """ + + +class FileRetention: + pass + + +class FileLockConfiguration: + """ + "fileLockConfiguration": { + "isClientAuthorizedToRead": true, + "value": { + "defaultRetention": { + "mode": "governance", + "period": { + "duration": 2, + "unit": "years" + } + }, + "isFileLockEnabled": true + } + } + + "fileLockConfiguration": { + "isClientAuthorizedToRead": false, + "value": null + } + """ + pass From 12310ea453c9e0e86b475a1133ae1abae27ec879 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Nowacki?= Date: Tue, 11 May 2021 15:54:56 +0200 Subject: [PATCH 02/33] file lock related objects serilizers and deserializers --- b2sdk/file_lock.py | 176 +++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 171 insertions(+), 5 deletions(-) diff --git a/b2sdk/file_lock.py b/b2sdk/file_lock.py index a91cdcb82..9ad94e057 100644 --- a/b2sdk/file_lock.py +++ b/b2sdk/file_lock.py @@ -1,3 +1,14 @@ +###################################################################### +# +# File: b2sdk/file_lock.py +# +# Copyright 2021 Backblaze Inc. All Rights Reserved. +# +# License https://www.backblaze.com/using_b2_code.html +# +###################################################################### + +from typing import Optional import enum @@ -5,6 +16,11 @@ class RetentionMode(enum.Enum): COMPLIANCE = "compliance" # TODO: docs GOVERNANCE = "governance" # TODO: docs + NONE = None + UNKNOWN = "unknown" + + +RETENTION_MODES_REQUIRING_PERIODS = frozenset({RetentionMode.COMPLIANCE, RetentionMode.GOVERNANCE}) class RetentionPeriod: @@ -14,7 +30,8 @@ class RetentionPeriod: "unit": "years" } """ - def __init__(self, *, years=None, days=None): + + def __init__(self, years: Optional[int] = None, days: Optional[int] = None): assert (years is None) != (days is None) if years is not None: self.duration = years @@ -23,8 +40,104 @@ def __init__(self, *, years=None, days=None): self.duration = days self.unit = 'days' + @classmethod + def from_period_dict(cls, period_dict): + """ + { + "duration": 2, + "unit": "years" + } + """ + return cls(**{period_dict['unit']: period_dict['duration']}) + + def as_dict(self): + return { + "duration": self.duration, + "unit": self.unit, + } + + +class FileRetentionSetting: + def __init__(self, mode: RetentionMode, retain_until: Optional[int]): + if mode in RETENTION_MODES_REQUIRING_PERIODS and retain_until is None: + raise ValueError('must specify retain_until for retention mode %s' % (mode,)) + self.mode = mode + self.retain_until = retain_until + + @classmethod + def from_file_retention_dict(cls, retention_dict: dict): + """ + Returns FileRetentionSetting for the given retention_dict retrieved from the api. E.g. + + .. code-block :: + + { + "isClientAuthorizedToRead": false, + "value": null + } + + { + "isClientAuthorizedToRead": true, + "value": { + "mode": "governance", + "retainUntilTimestamp": 1628942493000 + } + } + """ + if retention_dict['value'] is None: + return cls(RetentionMode.UNKNOWN, None) + return cls( + RetentionMode(retention_dict['value']['mode'] or 'none'), + retention_dict['value']['retainUntilTimestamp'], + ) + + def serialize_to_json_for_request(self): + if self.mode is RetentionMode.UNKNOWN: + raise ValueError('cannot use an unknown file retention setting in requests') + return self.as_dict() + + def as_dict(self): + return { + "mode": self.mode.value, + "retainUntilTimestamp": self.retain_until, + } -class RetentionSetting: + def add_to_to_upload_headers(self, headers): + if self.mode is RetentionMode.UNKNOWN: + raise ValueError('cannot use an unknown file retention setting in requests') + + headers['X-Bz-File-Retention-Mode'] = str( + self.mode.value + ) # mode = NONE is not supported by the server at the + # moment, but it should be + headers['X-Bz-File-Retention-Retain-Until-Timestamp'] = self.retain_until + + +class LegalHoldSerializer: + @classmethod + def from_server(cls, legal_hold_dict) -> Optional[bool]: + if legal_hold_dict['value'] is None: + return None + if legal_hold_dict['value'] == 'on': + return True + elif legal_hold_dict['value'] == 'off': + return False + raise ValueError('Unknown legal hold value: %s' % (legal_hold_dict['value'],)) + + @classmethod + def to_server(cls, bool_value: Optional[bool]) -> str: + if bool_value is None: + raise ValueError('Cannot use unknown legal hold in requests') + if bool_value: + return 'on' + return 'off' + + @classmethod + def add_to_upload_headers(cls, bool_value: Optional[bool], headers): + headers['X-Bz-File-Legal-Hold'] = cls.to_server(bool_value) + + +class BucketRetentionSetting: """ "defaultRetention": { "mode": "compliance", @@ -35,9 +148,28 @@ class RetentionSetting: } """ + def __init__(self, mode: RetentionMode, period: Optional[RetentionPeriod]): + if mode in RETENTION_MODES_REQUIRING_PERIODS and period is None: + raise ValueError('must specify period for retention mode %s' % (mode,)) + self.mode = mode + self.period = period -class FileRetention: - pass + @classmethod + def from_bucket_retention_dict(cls, retention_dict: dict): + period = retention_dict['period'] + if period is not None: + period = RetentionPeriod.from_period_dict(period) + return cls(RetentionMode(retention_dict['mode'] or 'none'), period) + + def as_dict(self): + if self.period is None: + period_repr = None + else: + period_repr = self.period.as_dict() + return { + 'mode': self.mode.value, + 'period': period_repr, + } class FileLockConfiguration: @@ -61,4 +193,38 @@ class FileLockConfiguration: "value": null } """ - pass + + def __init__( + self, + default_retention: BucketRetentionSetting, + is_file_lock_enabled: Optional[bool], + ): + self.default_retention = default_retention + self.is_file_lock_enabled = is_file_lock_enabled + + @classmethod + def from_bucket_dict(cls, bucket_dict): + if bucket_dict['fileLockConfiguration']['value'] is None: + return cls(UNKNOWN_BUCKET_RETENTION, None) + retention = BucketRetentionSetting.from_bucket_retention_dict( + bucket_dict['fileLockConfiguration']['value']['defaultRetention'] + ) + is_file_lock_enabled = bucket_dict['fileLockConfiguration']['value']['isFileLockEnabled'] + return cls(retention, is_file_lock_enabled) + + def serialize_to_json_for_request(self): + if self.is_file_lock_enabled is None: + raise ValueError('cannot use an unknown file lock configuration in requests') + return self.as_dict() + + def as_dict(self): + return { + "defaultRetention": self.default_retention.as_dict(), + "isFileLockEnabled": self.is_file_lock_enabled + } + + +UNKNOWN_BUCKET_RETENTION = BucketRetentionSetting(RetentionMode.UNKNOWN, None) +UNKNOWN_FILE_LOCK_CONFIGURATION = FileLockConfiguration(UNKNOWN_BUCKET_RETENTION, None) +NO_RETENTION_BUCKET_SETTING = BucketRetentionSetting(RetentionMode.NONE, None) +NO_RETENTION_FILE_SETTING = FileRetentionSetting(RetentionMode.NONE, None) From f2c40f8d3e7160a4c1ed241396b209a532ae13a7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Nowacki?= Date: Tue, 11 May 2021 16:33:55 +0200 Subject: [PATCH 03/33] serializers finished --- b2sdk/file_lock.py | 112 ++++++++++++++++++++++++--------------------- 1 file changed, 59 insertions(+), 53 deletions(-) diff --git a/b2sdk/file_lock.py b/b2sdk/file_lock.py index 9ad94e057..9f605768d 100644 --- a/b2sdk/file_lock.py +++ b/b2sdk/file_lock.py @@ -11,6 +11,8 @@ from typing import Optional import enum +# TODO: write __repr__ and __eq__ methods for the classes below + @enum.unique class RetentionMode(enum.Enum): @@ -24,13 +26,6 @@ class RetentionMode(enum.Enum): class RetentionPeriod: - """ - "period": { - "duration": 2, - "unit": "years" - } - """ - def __init__(self, years: Optional[int] = None, days: Optional[int] = None): assert (years is None) != (days is None) if years is not None: @@ -43,10 +38,14 @@ def __init__(self, years: Optional[int] = None, days: Optional[int] = None): @classmethod def from_period_dict(cls, period_dict): """ - { - "duration": 2, - "unit": "years" - } + Build a RetentionPeriod from an object returned by the server, such as: + + .. code-block :: + + { + "duration": 2, + "unit": "years" + } """ return cls(**{period_dict['unit']: period_dict['duration']}) @@ -138,16 +137,6 @@ def add_to_upload_headers(cls, bool_value: Optional[bool], headers): class BucketRetentionSetting: - """ - "defaultRetention": { - "mode": "compliance", - "period": { - "duration": 7, - "unit": "days" - } - } - """ - def __init__(self, mode: RetentionMode, period: Optional[RetentionPeriod]): if mode in RETENTION_MODES_REQUIRING_PERIODS and period is None: raise ValueError('must specify period for retention mode %s' % (mode,)) @@ -156,44 +145,40 @@ def __init__(self, mode: RetentionMode, period: Optional[RetentionPeriod]): @classmethod def from_bucket_retention_dict(cls, retention_dict: dict): + """ + Build a BucketRetentionSetting from an object returned by the server, such as: + + .. code-block:: + + { + "mode": "compliance", + "period": { + "duration": 7, + "unit": "days" + } + } + + """ period = retention_dict['period'] if period is not None: period = RetentionPeriod.from_period_dict(period) return cls(RetentionMode(retention_dict['mode'] or 'none'), period) def as_dict(self): - if self.period is None: - period_repr = None - else: - period_repr = self.period.as_dict() - return { + result = { 'mode': self.mode.value, - 'period': period_repr, } + if self.period is not None: + result['period'] = self.period.as_dict() + return result + def serialize_to_json_for_request(self): + if self.mode == RetentionMode.UNKNOWN: + raise ValueError('cannot use an unknown file lock configuration in requests') + return self.as_dict() -class FileLockConfiguration: - """ - "fileLockConfiguration": { - "isClientAuthorizedToRead": true, - "value": { - "defaultRetention": { - "mode": "governance", - "period": { - "duration": 2, - "unit": "years" - } - }, - "isFileLockEnabled": true - } - } - - "fileLockConfiguration": { - "isClientAuthorizedToRead": false, - "value": null - } - """ +class FileLockConfiguration: def __init__( self, default_retention: BucketRetentionSetting, @@ -204,6 +189,32 @@ def __init__( @classmethod def from_bucket_dict(cls, bucket_dict): + """ + Build a FileLockConfiguration from an object returned by server, such as: + + .. code-block:: + { + "isClientAuthorizedToRead": true, + "value": { + "defaultRetention": { + "mode": "governance", + "period": { + "duration": 2, + "unit": "years" + } + }, + "isFileLockEnabled": true + } + } + + or + + { + "isClientAuthorizedToRead": false, + "value": null + } + """ + if bucket_dict['fileLockConfiguration']['value'] is None: return cls(UNKNOWN_BUCKET_RETENTION, None) retention = BucketRetentionSetting.from_bucket_retention_dict( @@ -212,11 +223,6 @@ def from_bucket_dict(cls, bucket_dict): is_file_lock_enabled = bucket_dict['fileLockConfiguration']['value']['isFileLockEnabled'] return cls(retention, is_file_lock_enabled) - def serialize_to_json_for_request(self): - if self.is_file_lock_enabled is None: - raise ValueError('cannot use an unknown file lock configuration in requests') - return self.as_dict() - def as_dict(self): return { "defaultRetention": self.default_retention.as_dict(), From 9aaf60a7ca60c9a64cf674cf5f9989077027c75d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Nowacki?= Date: Tue, 11 May 2021 16:48:31 +0200 Subject: [PATCH 04/33] lsing files works --- b2sdk/file_lock.py | 6 +++--- b2sdk/file_version.py | 29 +++++++++++++++++++++++++++-- 2 files changed, 30 insertions(+), 5 deletions(-) diff --git a/b2sdk/file_lock.py b/b2sdk/file_lock.py index 9f605768d..056bb2e50 100644 --- a/b2sdk/file_lock.py +++ b/b2sdk/file_lock.py @@ -86,7 +86,7 @@ def from_file_retention_dict(cls, retention_dict: dict): if retention_dict['value'] is None: return cls(RetentionMode.UNKNOWN, None) return cls( - RetentionMode(retention_dict['value']['mode'] or 'none'), + RetentionMode(retention_dict['value']['mode']), retention_dict['value']['retainUntilTimestamp'], ) @@ -162,7 +162,7 @@ def from_bucket_retention_dict(cls, retention_dict: dict): period = retention_dict['period'] if period is not None: period = RetentionPeriod.from_period_dict(period) - return cls(RetentionMode(retention_dict['mode'] or 'none'), period) + return cls(RetentionMode(retention_dict['mode']), period) def as_dict(self): result = { @@ -203,7 +203,7 @@ def from_bucket_dict(cls, bucket_dict): "unit": "years" } }, - "isFileLockEnabled": true + "isFileLockEnabled": true } } diff --git a/b2sdk/file_version.py b/b2sdk/file_version.py index 0887ac315..9a8550deb 100644 --- a/b2sdk/file_version.py +++ b/b2sdk/file_version.py @@ -12,6 +12,7 @@ import datetime from .encryption.setting import EncryptionSetting, EncryptionSettingFactory +from .file_lock import FileRetentionSetting, LegalHoldSerializer class FileVersionInfo(object): @@ -35,8 +36,18 @@ class FileVersionInfo(object): LS_ENTRY_TEMPLATE = '%83s %6s %10s %8s %9d %s' # order is file_id, action, date, time, size, name __slots__ = [ - 'id_', 'file_name', 'size', 'content_type', 'content_sha1', 'content_md5', 'file_info', - 'upload_timestamp', 'action', 'server_side_encryption' + 'id_', + 'file_name', + 'size', + 'content_type', + 'content_sha1', + 'content_md5', + 'file_info', + 'upload_timestamp', + 'action', + 'server_side_encryption', + 'legal_hold', + 'file_retention', ] def __init__( @@ -51,6 +62,8 @@ def __init__( action, content_md5=None, server_side_encryption: Optional[EncryptionSetting] = None, # TODO: make it mandatory in v2 + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): self.id_ = id_ self.file_name = file_name @@ -62,6 +75,8 @@ def __init__( self.upload_timestamp = upload_timestamp self.action = action self.server_side_encryption = server_side_encryption + self.legal_hold = legal_hold + self.file_retention = file_retention def as_dict(self): """ represents the object as a dict which looks almost exactly like the raw api output for upload/list """ @@ -69,6 +84,7 @@ def as_dict(self): 'fileId': self.id_, 'fileName': self.file_name, 'fileInfo': self.file_info, + 'legalHold': self.legal_hold, } if self.size is not None: result['size'] = self.size @@ -84,6 +100,8 @@ def as_dict(self): result['contentMd5'] = self.content_md5 if self.server_side_encryption is not None: # this is for backward compatibility of interface only, b2sdk always sets it result['serverSideEncryption'] = self.server_side_encryption.as_dict() + if self.file_retention is not None: # this is for backward compatibility of interface only, b2sdk always sets it + result['fileRetention'] = self.file_retention.as_dict() return result def format_ls_entry(self): @@ -170,6 +188,11 @@ def from_api_response(cls, file_info_dict, force_action=None): content_md5 = file_info_dict.get('contentMd5') file_info = file_info_dict.get('fileInfo') server_side_encryption = EncryptionSettingFactory.from_file_version_dict(file_info_dict) + file_retention = FileRetentionSetting.from_file_retention_dict( + file_info_dict.get('fileRetention') + ) + + legal_hold = LegalHoldSerializer.from_server(file_info_dict.get('legalHold')) return FileVersionInfo( id_, @@ -182,6 +205,8 @@ def from_api_response(cls, file_info_dict, force_action=None): action, content_md5, server_side_encryption, + legal_hold, + file_retention, ) @classmethod From 0a157f5df2e1da7ba034f58e51ee8c5aba4ffb97 Mon Sep 17 00:00:00 2001 From: Pawel Polewicz Date: Tue, 11 May 2021 16:58:20 +0200 Subject: [PATCH 05/33] Add file retention parameters to raw_api signatures --- b2sdk/raw_api.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/b2sdk/raw_api.py b/b2sdk/raw_api.py index ae6db091e..8f6d33e06 100644 --- a/b2sdk/raw_api.py +++ b/b2sdk/raw_api.py @@ -24,6 +24,7 @@ from .b2http import B2Http from .exception import FileOrBucketNotFound, ResourceNotFound, UnusableFileName, InvalidMetadataDirective, WrongEncryptionModeForBucketDefault from .encryption.setting import EncryptionAlgorithm, EncryptionMode, EncryptionSetting +from .file_lock import BucketRetentionSetting, FileRetentionSetting, RetentionMode, RetentionPeriod from .utils import b2_url_encode, hex_sha1_of_stream # All possible capabilities @@ -118,6 +119,7 @@ def create_bucket( cors_rules=None, lifecycle_rules=None, default_server_side_encryption: Optional[EncryptionSetting] = None, + is_file_lock_enabled: Optional[bool] = None, ): pass @@ -272,6 +274,7 @@ def update_bucket( lifecycle_rules=None, if_revision_is=None, default_server_side_encryption: Optional[EncryptionSetting] = None, + default_retention: Optional[BucketRetentionSetting] = None, ): pass @@ -364,7 +367,8 @@ def create_bucket( bucket_info=None, cors_rules=None, lifecycle_rules=None, - default_server_side_encryption=None, + default_server_side_encryption: Optional[EncryptionSetting] = None, + is_file_lock_enabled: Optional[bool] = None, ): kwargs = dict( accountId=account_id, @@ -382,6 +386,8 @@ def create_bucket( raise WrongEncryptionModeForBucketDefault(default_server_side_encryption.mode) kwargs['defaultServerSideEncryption' ] = default_server_side_encryption.serialize_to_json_for_request() + if is_file_lock_enabled is not None: + kwargs['fileLockConfiguration'] = {'isFileLockEnabled': is_file_lock_enabled} return self._post_json( api_url, 'b2_create_bucket', @@ -653,7 +659,8 @@ def update_bucket( cors_rules=None, lifecycle_rules=None, if_revision_is=None, - default_server_side_encryption=None, + default_server_side_encryption: Optional[EncryptionSetting] = None, + default_retention: Optional[BucketRetentionSetting] = None, ): assert bucket_info is not None or bucket_type is not None @@ -674,6 +681,8 @@ def update_bucket( raise WrongEncryptionModeForBucketDefault(default_server_side_encryption.mode) kwargs['defaultServerSideEncryption' ] = default_server_side_encryption.serialize_to_json_for_request() + if default_retention is not None: + kwargs['defaultRetention'] = default_retention.serialize_to_json_for_request() return self._post_json( api_url, From 70191e5b51f266ffad6445f2ec39f7de82e87606 Mon Sep 17 00:00:00 2001 From: Pawel Polewicz Date: Tue, 11 May 2021 16:59:11 +0200 Subject: [PATCH 06/33] Fix double 'if' in raw_api encryption handling --- b2sdk/raw_api.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/b2sdk/raw_api.py b/b2sdk/raw_api.py index 8f6d33e06..5b48d2b91 100644 --- a/b2sdk/raw_api.py +++ b/b2sdk/raw_api.py @@ -676,11 +676,10 @@ def update_bucket( if lifecycle_rules is not None: kwargs['lifecycleRules'] = lifecycle_rules if default_server_side_encryption is not None: - if default_server_side_encryption is not None: - if not default_server_side_encryption.mode.can_be_set_as_bucket_default(): - raise WrongEncryptionModeForBucketDefault(default_server_side_encryption.mode) - kwargs['defaultServerSideEncryption' - ] = default_server_side_encryption.serialize_to_json_for_request() + if not default_server_side_encryption.mode.can_be_set_as_bucket_default(): + raise WrongEncryptionModeForBucketDefault(default_server_side_encryption.mode) + kwargs['defaultServerSideEncryption' + ] = default_server_side_encryption.serialize_to_json_for_request() if default_retention is not None: kwargs['defaultRetention'] = default_retention.serialize_to_json_for_request() From f439e80ab82381bf70b7025a139f3d2877604e1e Mon Sep 17 00:00:00 2001 From: Pawel Polewicz Date: Tue, 11 May 2021 17:03:37 +0200 Subject: [PATCH 07/33] Refactor permission checking in raw_simulator --- b2sdk/raw_simulator.py | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/b2sdk/raw_simulator.py b/b2sdk/raw_simulator.py index ed99cb8aa..fa5f5b738 100644 --- a/b2sdk/raw_simulator.py +++ b/b2sdk/raw_simulator.py @@ -414,21 +414,28 @@ def __init__( default_server_side_encryption = EncryptionSetting(mode=EncryptionMode.NONE) self.default_server_side_encryption = default_server_side_encryption + def _check_capability(self, capability) + return capability in self.api.auth_token_to_key[account_auth_token].get_allowed()['capabilities'] + + @property + def is_allowed_to_read_bucket_encryption_setting(self): + return self._check_capability('readBucketEncryption') + + @property + def is_allowed_to_read_bucket_file_retention(self): + return self._check_capability('readBucketRetentions') + def bucket_dict(self, account_auth_token): default_sse = {'isClientAuthorizedToRead': False} - is_allowed_to_read_bucket_encryption_setting = 'readBucketEncryption' in self.api.auth_token_to_key[ - account_auth_token].get_allowed()['capabilities'] logger.debug( 'authtoken %s is %sallowed to read encryption setting of %s' % ( account_auth_token, - not is_allowed_to_read_bucket_encryption_setting and 'not ' or '', + not self.is_allowed_to_read_bucket_encryption_setting and 'not ' or '', self, ) ) - if is_allowed_to_read_bucket_encryption_setting: - default_sse = { - 'isClientAuthorizedToRead': True, - } + if self.is_allowed_to_read_bucket_encryption_setting: + default_sse['isClientAuthorizedToRead'] = True default_sse['value'] = {'mode': self.default_server_side_encryption.mode.value} if self.default_server_side_encryption.algorithm is not None: default_sse['value']['algorithm' From 0395bbd21ce1cd186719f411d8fba204e62e29ab Mon Sep 17 00:00:00 2001 From: Pawel Polewicz Date: Wed, 12 May 2021 04:43:23 +0200 Subject: [PATCH 08/33] Fix for empty period in NONE mode retention settings --- b2sdk/file_lock.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/b2sdk/file_lock.py b/b2sdk/file_lock.py index 056bb2e50..a784c229c 100644 --- a/b2sdk/file_lock.py +++ b/b2sdk/file_lock.py @@ -57,7 +57,7 @@ def as_dict(self): class FileRetentionSetting: - def __init__(self, mode: RetentionMode, retain_until: Optional[int]): + def __init__(self, mode: RetentionMode, retain_until: Optional[int] = None): if mode in RETENTION_MODES_REQUIRING_PERIODS and retain_until is None: raise ValueError('must specify retain_until for retention mode %s' % (mode,)) self.mode = mode @@ -137,7 +137,7 @@ def add_to_upload_headers(cls, bool_value: Optional[bool], headers): class BucketRetentionSetting: - def __init__(self, mode: RetentionMode, period: Optional[RetentionPeriod]): + def __init__(self, mode: RetentionMode, period: Optional[RetentionPeriod] = None): if mode in RETENTION_MODES_REQUIRING_PERIODS and period is None: raise ValueError('must specify period for retention mode %s' % (mode,)) self.mode = mode @@ -230,7 +230,7 @@ def as_dict(self): } -UNKNOWN_BUCKET_RETENTION = BucketRetentionSetting(RetentionMode.UNKNOWN, None) +UNKNOWN_BUCKET_RETENTION = BucketRetentionSetting(RetentionMode.UNKNOWN) UNKNOWN_FILE_LOCK_CONFIGURATION = FileLockConfiguration(UNKNOWN_BUCKET_RETENTION, None) -NO_RETENTION_BUCKET_SETTING = BucketRetentionSetting(RetentionMode.NONE, None) -NO_RETENTION_FILE_SETTING = FileRetentionSetting(RetentionMode.NONE, None) +NO_RETENTION_BUCKET_SETTING = BucketRetentionSetting(RetentionMode.NONE) +NO_RETENTION_FILE_SETTING = FileRetentionSetting(RetentionMode.NONE) From c33b978be660c459378e3859ea871d295985b6de Mon Sep 17 00:00:00 2001 From: Pawel Polewicz Date: Wed, 12 May 2021 04:45:36 +0200 Subject: [PATCH 09/33] Fix bucket creation, ignore the docs for now --- b2sdk/raw_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/b2sdk/raw_api.py b/b2sdk/raw_api.py index 5b48d2b91..2106afe47 100644 --- a/b2sdk/raw_api.py +++ b/b2sdk/raw_api.py @@ -387,7 +387,7 @@ def create_bucket( kwargs['defaultServerSideEncryption' ] = default_server_side_encryption.serialize_to_json_for_request() if is_file_lock_enabled is not None: - kwargs['fileLockConfiguration'] = {'isFileLockEnabled': is_file_lock_enabled} + kwargs['fileLockEnabled'] = is_file_lock_enabled return self._post_json( api_url, 'b2_create_bucket', From fb5b68c45bf693919cae9aaa854dce26fe4efe27 Mon Sep 17 00:00:00 2001 From: Pawel Polewicz Date: Wed, 12 May 2021 04:46:14 +0200 Subject: [PATCH 10/33] Add all new capabilities (key permissions) --- b2sdk/raw_api.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/b2sdk/raw_api.py b/b2sdk/raw_api.py index 2106afe47..3b31b6cb8 100644 --- a/b2sdk/raw_api.py +++ b/b2sdk/raw_api.py @@ -37,6 +37,12 @@ 'deleteBuckets', 'readBucketEncryption', 'writeBucketEncryption', + 'readBucketRetentions', + 'writeBucketRetentions', + 'writeFileRetentions', + 'writeFileLegalHolds', + 'readFileRetentions', + 'readFileLegalHolds', 'listFiles', 'readFiles', 'shareFiles', From 61ff868adb86b1e52f7315b71dd4b7d4bf11683f Mon Sep 17 00:00:00 2001 From: Pawel Polewicz Date: Wed, 12 May 2021 04:54:36 +0200 Subject: [PATCH 11/33] Add raw_api.update_file_retention --- b2sdk/raw_api.py | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/b2sdk/raw_api.py b/b2sdk/raw_api.py index 3b31b6cb8..b9e574c3d 100644 --- a/b2sdk/raw_api.py +++ b/b2sdk/raw_api.py @@ -284,6 +284,18 @@ def update_bucket( ): pass + @abstractmethod + def update_file_retention( + self, + api_url, + account_auth_token, + file_id, + file_name, + file_retention: FileRetentionSetting, + bypass_governance: bool = False, + ): + pass + @abstractmethod def upload_file( self, @@ -698,6 +710,27 @@ def update_bucket( **kwargs ) + def update_file_retention( + self, + api_url, + account_auth_token, + file_id, + file_name, + file_retention, + bypass_governance=False + ): + kwargs = {} + kwargs['fileRetention'] = file_retention.serialize_to_json_for_request() + return self._post_json( + api_url, + 'b2_update_file_retention', + account_auth_token, + fileId=file_id, + fileName=file_name, + bypassGovernance=bypass_governance, + **kwargs + ) + def unprintable_to_hex(self, string): """ Replace unprintable chars in string with a hex representation. From 9205666f097439814587d21f576c761e7a8807d3 Mon Sep 17 00:00:00 2001 From: Pawel Polewicz Date: Wed, 12 May 2021 04:56:00 +0200 Subject: [PATCH 12/33] Add basic retention scenario to test_raw_api --- b2sdk/raw_api.py | 35 +++++++++++++++++++++++++++++------ 1 file changed, 29 insertions(+), 6 deletions(-) diff --git a/b2sdk/raw_api.py b/b2sdk/raw_api.py index b9e574c3d..1dfaba84d 100644 --- a/b2sdk/raw_api.py +++ b/b2sdk/raw_api.py @@ -24,7 +24,7 @@ from .b2http import B2Http from .exception import FileOrBucketNotFound, ResourceNotFound, UnusableFileName, InvalidMetadataDirective, WrongEncryptionModeForBucketDefault from .encryption.setting import EncryptionAlgorithm, EncryptionMode, EncryptionSetting -from .file_lock import BucketRetentionSetting, FileRetentionSetting, RetentionMode, RetentionPeriod +from .file_lock import BucketRetentionSetting, FileRetentionSetting, NO_RETENTION_FILE_SETTING, RetentionMode, RetentionPeriod from .utils import b2_url_encode, hex_sha1_of_stream # All possible capabilities @@ -1027,7 +1027,12 @@ def test_raw_api_helper(raw_api): account_id, int(time.time()), random.randint(1000, 9999) ) bucket_dict = raw_api.create_bucket( - api_url, account_auth_token, account_id, bucket_name, 'allPublic' + api_url, + account_auth_token, + account_id, + bucket_name, + 'allPublic', + is_file_lock_enabled=True, ) bucket_id = bucket_dict['bucketId'] first_bucket_revision = bucket_dict['revision'] @@ -1039,9 +1044,13 @@ def test_raw_api_helper(raw_api): algorithm=EncryptionAlgorithm.AES256, ) sse_none = EncryptionSetting(mode=EncryptionMode.NONE) - for encryption_setting in [ - sse_none, - sse_b2_aes, + for encryption_setting, default_retention in [ + ( + sse_none, + BucketRetentionSetting(mode=RetentionMode.GOVERNANCE, period=RetentionPeriod(days=1)) + ), + (sse_b2_aes, None), + (sse_b2_aes, BucketRetentionSetting(RetentionMode.NONE)), ]: bucket_dict = raw_api.update_bucket( api_url, @@ -1050,6 +1059,7 @@ def test_raw_api_helper(raw_api): bucket_id, 'allPublic', default_server_side_encryption=encryption_setting, + default_retention=default_retention, ) # b2_list_buckets @@ -1233,7 +1243,10 @@ def test_raw_api_helper(raw_api): account_id, bucket_id, 'allPrivate', - bucket_info={'color': 'blue'} + bucket_info={'color': 'blue'}, + default_retention=BucketRetentionSetting( + mode=RetentionMode.GOVERNANCE, period=RetentionPeriod(days=1) + ), ) assert first_bucket_revision < updated_bucket['revision'] @@ -1259,6 +1272,16 @@ def _clean_and_delete_bucket(raw_api, api_url, account_auth_token, account_id, b action = version_dict['action'] if action in ['hide', 'upload']: print('b2_delete_file', file_name, action) + if action == 'upload' and version_dict[ + 'fileRetention'] and version_dict['fileRetention']['value']['mode'] is not None: + raw_api.update_file_retention( + api_url, + account_auth_token, + file_id, + file_name, + NO_RETENTION_FILE_SETTING, + bypass_governance=True + ) raw_api.delete_file_version(api_url, account_auth_token, file_id, file_name) else: print('b2_cancel_large_file', file_name) From 7d68336b9723212fd9ac92998508817acc3779d9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Nowacki?= Date: Wed, 12 May 2021 12:16:20 +0200 Subject: [PATCH 13/33] is_file_lock_enabled and default_retention are not stored in buckets and represented in as_dict() --- b2sdk/bucket.py | 29 ++++++++++++++++++++++++++++- 1 file changed, 28 insertions(+), 1 deletion(-) diff --git a/b2sdk/bucket.py b/b2sdk/bucket.py index 101bbd2bd..0189ba64f 100644 --- a/b2sdk/bucket.py +++ b/b2sdk/bucket.py @@ -14,6 +14,7 @@ from .encryption.setting import EncryptionSetting, EncryptionSettingFactory from .encryption.types import EncryptionMode from .exception import FileNotPresent, FileOrBucketNotFound, UnexpectedCloudBehaviour, UnrecognizedBucketType +from .file_lock import BucketRetentionSetting, UNKNOWN_BUCKET_RETENTION, FileLockConfiguration from .file_version import FileVersionInfo, FileVersionInfoFactory from .progress import DoNothingProgressListener from .transfer.emerge.executor import AUTO_CONTENT_TYPE @@ -48,6 +49,8 @@ def __init__( default_server_side_encryption: EncryptionSetting = EncryptionSetting( EncryptionMode.UNKNOWN ), + default_retention: BucketRetentionSetting = UNKNOWN_BUCKET_RETENTION, + is_file_lock_enabled: Optional[bool] = None, ): """ :param b2sdk.v1.B2Api api: an API object @@ -61,6 +64,8 @@ def __init__( :param dict bucket_dict: a dictionary which contains bucket parameters :param set options_set: set of bucket options strings :param b2sdk.v1.EncryptionSetting default_server_side_encryption: default server side encryption settings + :param b2sdk.v1.BucketRetentionSetting default_retention: default retention setting + :param bool is_file_lock_enabled: whether file locking is enabled or not """ self.api = api self.id_ = id_ @@ -73,6 +78,8 @@ def __init__( self.bucket_dict = bucket_dict or {} self.options_set = options_set or set() self.default_server_side_encryption = default_server_side_encryption + self.default_retention = default_retention + self.is_file_lock_enabled = is_file_lock_enabled def get_id(self): """ @@ -107,6 +114,8 @@ def update( lifecycle_rules=None, if_revision_is=None, default_server_side_encryption: Optional[EncryptionSetting] = None, + default_retention: Optional[BucketRetentionSetting] = None, + # is_file_lock_enabled = None, TODO: establish how to properly send it to B2 cloud ): """ Update various bucket parameters. @@ -129,6 +138,8 @@ def update( lifecycle_rules=lifecycle_rules, if_revision_is=if_revision_is, default_server_side_encryption=default_server_side_encryption, + # default_retention=default_retention, TODO: implement in session and raw_api + # is_file_lock_enabled=is_file_lock_enabled, TODO: establish how to properly send it to B2 cloud and implement in session and raw_api ) def cancel_large_file(self, file_id): @@ -828,6 +839,9 @@ def as_dict(self): result['revision'] = self.revision result['options'] = self.options_set result['defaultServerSideEncryption'] = self.default_server_side_encryption.as_dict() + result['isFileLockEnabled'] = self.is_file_lock_enabled + result['defaultRetention'] = self.default_retention.as_dict() + return result def __repr__(self): @@ -872,7 +886,17 @@ def from_api_bucket_dict(cls, api, bucket_dict): "algorithm" : "AES256", "mode" : "SSE-B2" } - } + }, + "fileLockConfiguration": { + "isClientAuthorizedToRead": true, + "value": { + "defaultRetention": { + "mode": null, + "period": null + }, + "isFileLockEnabled": false + } + } } into a Bucket object. @@ -896,6 +920,7 @@ def from_api_bucket_dict(cls, api, bucket_dict): if 'defaultServerSideEncryption' not in bucket_dict: raise UnexpectedCloudBehaviour('server did not provide `defaultServerSideEncryption`') default_server_side_encryption = EncryptionSettingFactory.from_bucket_dict(bucket_dict) + file_lock_configuration = FileLockConfiguration.from_bucket_dict(bucket_dict) return cls.BUCKET_CLASS( api, bucket_id, @@ -908,4 +933,6 @@ def from_api_bucket_dict(cls, api, bucket_dict): bucket_dict, options, default_server_side_encryption, + file_lock_configuration.default_retention, + file_lock_configuration.is_file_lock_enabled, ) From f6b6691c0cd6672feb6afbe1d3c531d9d9543cf9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Nowacki?= Date: Wed, 12 May 2021 12:26:16 +0200 Subject: [PATCH 14/33] missing colon causing a SyntaxError fixed --- b2sdk/raw_simulator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/b2sdk/raw_simulator.py b/b2sdk/raw_simulator.py index fa5f5b738..415054747 100644 --- a/b2sdk/raw_simulator.py +++ b/b2sdk/raw_simulator.py @@ -414,7 +414,7 @@ def __init__( default_server_side_encryption = EncryptionSetting(mode=EncryptionMode.NONE) self.default_server_side_encryption = default_server_side_encryption - def _check_capability(self, capability) + def _check_capability(self, capability): return capability in self.api.auth_token_to_key[account_auth_token].get_allowed()['capabilities'] @property From 18bd0f7c24a4ec08c7c2266a9c6889badde5fdc7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Nowacki?= Date: Wed, 12 May 2021 19:06:51 +0200 Subject: [PATCH 15/33] missing file lock metadata (legalHold and fileRetention) for hide markers and folders are now working --- b2sdk/file_lock.py | 49 ++++++++++++++++++++++++++++++++++--------- b2sdk/file_version.py | 6 ++---- 2 files changed, 41 insertions(+), 14 deletions(-) diff --git a/b2sdk/file_lock.py b/b2sdk/file_lock.py index a784c229c..b10f42e25 100644 --- a/b2sdk/file_lock.py +++ b/b2sdk/file_lock.py @@ -11,8 +11,11 @@ from typing import Optional import enum +from .exception import UnexpectedCloudBehaviour # TODO: write __repr__ and __eq__ methods for the classes below +ACTIONS_WITHOUT_LOCK_SETTINGS = frozenset(['hide', 'folder']) + @enum.unique class RetentionMode(enum.Enum): @@ -64,25 +67,43 @@ def __init__(self, mode: RetentionMode, retain_until: Optional[int] = None): self.retain_until = retain_until @classmethod - def from_file_retention_dict(cls, retention_dict: dict): + def from_file_version_dict(cls, file_version_dict: dict): """ - Returns FileRetentionSetting for the given retention_dict retrieved from the api. E.g. + Returns FileRetentionSetting for the given file_version_dict retrieved from the api. E.g. .. code-block :: { - "isClientAuthorizedToRead": false, - "value": null + "action": "upload", + "fileRetention": { + "isClientAuthorizedToRead": false, + "value": null + }, + ... } { - "isClientAuthorizedToRead": true, - "value": { - "mode": "governance", - "retainUntilTimestamp": 1628942493000 - } + "action": "upload", + "fileRetention": { + "isClientAuthorizedToRead": true, + "value": { + "mode": "governance", + "retainUntilTimestamp": 1628942493000 + } + }, + ... } """ + if 'fileRetention' not in file_version_dict: + if file_version_dict['action'] not in ACTIONS_WITHOUT_LOCK_SETTINGS: + raise UnexpectedCloudBehaviour( + 'No fileRetention provided for file version with action=%s' % + (file_version_dict['action']) + ) + return NO_RETENTION_FILE_SETTING + + retention_dict = file_version_dict['fileRetention'] + if retention_dict['value'] is None: return cls(RetentionMode.UNKNOWN, None) return cls( @@ -114,7 +135,15 @@ def add_to_to_upload_headers(self, headers): class LegalHoldSerializer: @classmethod - def from_server(cls, legal_hold_dict) -> Optional[bool]: + def from_server(cls, file_version_dict) -> Optional[bool]: + if 'legalHold' not in file_version_dict: + if file_version_dict['action'] not in ACTIONS_WITHOUT_LOCK_SETTINGS: + raise UnexpectedCloudBehaviour( + 'legalHold not provided for file version with action=%s' % + (file_version_dict['action']) + ) + return None + legal_hold_dict = file_version_dict['legalHold'] if legal_hold_dict['value'] is None: return None if legal_hold_dict['value'] == 'on': diff --git a/b2sdk/file_version.py b/b2sdk/file_version.py index 9a8550deb..49dbbc1f9 100644 --- a/b2sdk/file_version.py +++ b/b2sdk/file_version.py @@ -188,11 +188,9 @@ def from_api_response(cls, file_info_dict, force_action=None): content_md5 = file_info_dict.get('contentMd5') file_info = file_info_dict.get('fileInfo') server_side_encryption = EncryptionSettingFactory.from_file_version_dict(file_info_dict) - file_retention = FileRetentionSetting.from_file_retention_dict( - file_info_dict.get('fileRetention') - ) + file_retention = FileRetentionSetting.from_file_version_dict(file_info_dict) - legal_hold = LegalHoldSerializer.from_server(file_info_dict.get('legalHold')) + legal_hold = LegalHoldSerializer.from_server(file_info_dict) return FileVersionInfo( id_, From ac05c2771947f7b5b277acbb5faf938fc1161ea2 Mon Sep 17 00:00:00 2001 From: Pawel Polewicz Date: Thu, 13 May 2021 03:35:18 +0200 Subject: [PATCH 16/33] Add type hint for raw_api.update_file_retention --- b2sdk/raw_api.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/b2sdk/raw_api.py b/b2sdk/raw_api.py index 1dfaba84d..35f342759 100644 --- a/b2sdk/raw_api.py +++ b/b2sdk/raw_api.py @@ -716,8 +716,8 @@ def update_file_retention( account_auth_token, file_id, file_name, - file_retention, - bypass_governance=False + file_retention: FileRetentionSetting, + bypass_governance: bool = False, ): kwargs = {} kwargs['fileRetention'] = file_retention.serialize_to_json_for_request() From af265ddbc7b5340cbff727abe1621af4348b7b37 Mon Sep 17 00:00:00 2001 From: Pawel Polewicz Date: Thu, 13 May 2021 04:33:22 +0200 Subject: [PATCH 17/33] Fix retention deserialization so that it works when unauthorized too --- b2sdk/file_lock.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/b2sdk/file_lock.py b/b2sdk/file_lock.py index b10f42e25..d1b276203 100644 --- a/b2sdk/file_lock.py +++ b/b2sdk/file_lock.py @@ -104,10 +104,15 @@ def from_file_version_dict(cls, file_version_dict: dict): retention_dict = file_version_dict['fileRetention'] - if retention_dict['value'] is None: + if not retention_dict['isClientAuthorizedToRead']: return cls(RetentionMode.UNKNOWN, None) + + mode = retention_dict['value']['mode'] + if mode is None: + return NO_RETENTION_FILE_SETTING + return cls( - RetentionMode(retention_dict['value']['mode']), + RetentionMode(mode), retention_dict['value']['retainUntilTimestamp'], ) @@ -244,7 +249,7 @@ def from_bucket_dict(cls, bucket_dict): } """ - if bucket_dict['fileLockConfiguration']['value'] is None: + if not bucket_dict['fileLockConfiguration']['isClientAuthorizedToRead']: return cls(UNKNOWN_BUCKET_RETENTION, None) retention = BucketRetentionSetting.from_bucket_retention_dict( bucket_dict['fileLockConfiguration']['value']['defaultRetention'] From 03c40efa6491057f2918f6fc357486a5eefe9c8c Mon Sep 17 00:00:00 2001 From: Pawel Polewicz Date: Thu, 13 May 2021 04:34:23 +0200 Subject: [PATCH 18/33] Add snippets for raw_api integration test debugging --- b2sdk/raw_api.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/b2sdk/raw_api.py b/b2sdk/raw_api.py index 35f342759..b6c30aef2 100644 --- a/b2sdk/raw_api.py +++ b/b2sdk/raw_api.py @@ -1026,6 +1026,10 @@ def test_raw_api_helper(raw_api): bucket_name = 'test-raw-api-%s-%d-%d' % ( account_id, int(time.time()), random.randint(1000, 9999) ) + + # very verbose http debug + #import http.client; http.client.HTTPConnection.debuglevel = 1 + bucket_dict = raw_api.create_bucket( api_url, account_auth_token, @@ -1065,6 +1069,7 @@ def test_raw_api_helper(raw_api): # b2_list_buckets print('b2_list_buckets') bucket_list_dict = raw_api.list_buckets(api_url, account_auth_token, account_id) + #print(bucket_list_dict) # b2_get_upload_url print('b2_get_upload_url') From 77169c225cc4eee83244ac5974e9d81c53e54f96 Mon Sep 17 00:00:00 2001 From: Pawel Polewicz Date: Thu, 13 May 2021 04:41:00 +0200 Subject: [PATCH 19/33] Change FileSimulator bucket_id field to BucketSimulator type This is needed so that it is possible to access api simulator fields, specifically those related to token/capability registry, so that the FileSimulator can determine how much information it should return to the caller based on the capabilities. --- b2sdk/raw_simulator.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/b2sdk/raw_simulator.py b/b2sdk/raw_simulator.py index 415054747..e8f30c86a 100644 --- a/b2sdk/raw_simulator.py +++ b/b2sdk/raw_simulator.py @@ -141,7 +141,7 @@ class FileSimulator(object): def __init__( self, account_id, - bucket_id, + bucket, file_id, action, name, @@ -159,7 +159,7 @@ def __init__( else: assert server_side_encryption is not None self.account_id = account_id - self.bucket_id = bucket_id + self.bucket = bucket self.file_id = file_id self.action = action self.name = name @@ -223,7 +223,7 @@ def as_upload_result(self): fileId=self.file_id, fileName=self.name, accountId=self.account_id, - bucketId=self.bucket_id, + bucketId=self.bucket.bucket_id, contentLength=len(self.data_bytes) if self.data_bytes is not None else 0, contentType=self.content_type, contentSha1=self.content_sha1, @@ -257,7 +257,7 @@ def as_start_large_file_result(self): fileId=self.file_id, fileName=self.name, accountId=self.account_id, - bucketId=self.bucket_id, + bucketId=self.bucket.bucket_id, contentType=self.content_type, fileInfo=self.file_info, uploadTimestamp=self.upload_timestamp, @@ -524,7 +524,7 @@ def get_upload_part_url(self, file_id): def hide_file(self, file_name): file_id = self._next_file_id() file_sim = self.FILE_SIMULATOR_CLASS( - self.account_id, self.bucket_id, file_id, 'hide', file_name, None, "none", {}, b'', + self.account_id, self, file_id, 'hide', file_name, None, "none", {}, b'', next(self.upload_timestamp_counter) ) self.file_id_to_file[file_id] = file_sim @@ -562,12 +562,12 @@ def copy_file( data_bytes = get_bytes_range(file_sim.data_bytes, bytes_range) - destination_bucket_id = destination_bucket_id or self.bucket_id + destination_bucket = self.api.bucket_id_to_bucket.get(destination_bucket_id, self) sse = destination_server_side_encryption or self.default_server_side_encryption logger.debug('setting encryption to %s', sse) copy_file_sim = self.FILE_SIMULATOR_CLASS( self.account_id, - destination_bucket_id, + destination_bucket, new_file_id, 'copy', new_file_name, @@ -657,7 +657,7 @@ def list_unfinished_large_files(self, start_file_id=None, max_file_count=None, p fileId=file_sim.file_id, fileName=file_sim.name, accountId=file_sim.account_id, - bucketId=file_sim.bucket_id, + bucketId=file_sim.bucket.bucket_id, contentType=file_sim.content_type, fileInfo=file_sim.file_info ) @@ -683,7 +683,7 @@ def start_large_file( file_info = sse.add_key_id_to_file_info(file_info) logger.debug('setting encryption to %s', sse) file_sim = self.FILE_SIMULATOR_CLASS( - self.account_id, self.bucket_id, file_id, 'start', file_name, content_type, 'none', + self.account_id, self, file_id, 'start', file_name, content_type, 'none', file_info, None, next(self.upload_timestamp_counter), server_side_encryption=sse, ) # yapf: disable self.file_id_to_file[file_id] = file_sim @@ -747,7 +747,7 @@ def upload_file( file_sim = self.FILE_SIMULATOR_CLASS( self.account_id, - self.bucket_id, + self, file_id, 'upload', file_name, From a7c22477c8ca235fcb18a091f126932bc5721b08 Mon Sep 17 00:00:00 2001 From: Pawel Polewicz Date: Thu, 13 May 2021 04:46:11 +0200 Subject: [PATCH 20/33] Remove debug logs related to sse from raw_simulator They won't be useful anymore now that SSE-C support is released --- b2sdk/raw_simulator.py | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/b2sdk/raw_simulator.py b/b2sdk/raw_simulator.py index e8f30c86a..2299b8969 100644 --- a/b2sdk/raw_simulator.py +++ b/b2sdk/raw_simulator.py @@ -153,7 +153,6 @@ def __init__( range_=None, server_side_encryption: Optional[EncryptionSetting] = None, ): - logger.debug('FileSimulator called with sse=%s', server_side_encryption) if action == 'hide': assert server_side_encryption is None else: @@ -427,13 +426,6 @@ def is_allowed_to_read_bucket_file_retention(self): def bucket_dict(self, account_auth_token): default_sse = {'isClientAuthorizedToRead': False} - logger.debug( - 'authtoken %s is %sallowed to read encryption setting of %s' % ( - account_auth_token, - not self.is_allowed_to_read_bucket_encryption_setting and 'not ' or '', - self, - ) - ) if self.is_allowed_to_read_bucket_encryption_setting: default_sse['isClientAuthorizedToRead'] = True default_sse['value'] = {'mode': self.default_server_side_encryption.mode.value} @@ -564,7 +556,6 @@ def copy_file( destination_bucket = self.api.bucket_id_to_bucket.get(destination_bucket_id, self) sse = destination_server_side_encryption or self.default_server_side_encryption - logger.debug('setting encryption to %s', sse) copy_file_sim = self.FILE_SIMULATOR_CLASS( self.account_id, destination_bucket, @@ -681,7 +672,6 @@ def start_large_file( sse = server_side_encryption or self.default_server_side_encryption if sse: # FIXME: remove this part when RawApi<->Encryption adapters are implemented properly file_info = sse.add_key_id_to_file_info(file_info) - logger.debug('setting encryption to %s', sse) file_sim = self.FILE_SIMULATOR_CLASS( self.account_id, self, file_id, 'start', file_name, content_type, 'none', file_info, None, next(self.upload_timestamp_counter), server_side_encryption=sse, @@ -743,7 +733,6 @@ def upload_file( encryption = server_side_encryption or self.default_server_side_encryption if encryption: # FIXME: remove this part when RawApi<->Encryption adapters are implemented properly file_infos = encryption.add_key_id_to_file_info(file_infos) - logger.debug('setting encryption to %s', encryption) file_sim = self.FILE_SIMULATOR_CLASS( self.account_id, From a9574faf35c361db0cdcdcff98ecf6e2f1a13e60 Mon Sep 17 00:00:00 2001 From: Pawel Polewicz Date: Thu, 13 May 2021 05:24:30 +0200 Subject: [PATCH 21/33] Code formatting --- b2sdk/file_lock.py | 4 ++-- b2sdk/raw_simulator.py | 7 ++++++- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/b2sdk/file_lock.py b/b2sdk/file_lock.py index d1b276203..5cf6eecb9 100644 --- a/b2sdk/file_lock.py +++ b/b2sdk/file_lock.py @@ -44,7 +44,7 @@ def from_period_dict(cls, period_dict): Build a RetentionPeriod from an object returned by the server, such as: .. code-block :: - + { "duration": 2, "unit": "years" @@ -260,7 +260,7 @@ def from_bucket_dict(cls, bucket_dict): def as_dict(self): return { "defaultRetention": self.default_retention.as_dict(), - "isFileLockEnabled": self.is_file_lock_enabled + "isFileLockEnabled": self.is_file_lock_enabled, } diff --git a/b2sdk/raw_simulator.py b/b2sdk/raw_simulator.py index 2299b8969..bbc2eb288 100644 --- a/b2sdk/raw_simulator.py +++ b/b2sdk/raw_simulator.py @@ -576,7 +576,12 @@ def copy_file( return copy_file_sim - def list_file_names(self, start_file_name=None, max_file_count=None, prefix=None): + def list_file_names( + self, + start_file_name=None, + max_file_count=None, + prefix=None, + ): assert prefix is None or start_file_name is None or start_file_name.startswith(prefix ), locals() start_file_name = start_file_name or '' From 52a5bc603f36b41c32c16775a726b456fd08ea61 Mon Sep 17 00:00:00 2001 From: Pawel Polewicz Date: Thu, 13 May 2021 05:36:40 +0200 Subject: [PATCH 22/33] Change capability checkers in raw_simulator to take auth token into account --- b2sdk/raw_simulator.py | 52 +++++++++++++++++++++++++++++------------- 1 file changed, 36 insertions(+), 16 deletions(-) diff --git a/b2sdk/raw_simulator.py b/b2sdk/raw_simulator.py index bbc2eb288..47591193b 100644 --- a/b2sdk/raw_simulator.py +++ b/b2sdk/raw_simulator.py @@ -37,6 +37,7 @@ UnsatisfiableRange, SSECKeyError, ) +from .file_lock import BucketRetentionSetting, FileRetentionSetting, NO_RETENTION_BUCKET_SETTING, UNKNOWN_FILE_LOCK_CONFIGURATION from .raw_api import AbstractRawApi, HEX_DIGITS_AT_END, MetadataDirectiveMode, ALL_CAPABILITIES from .utils import ( b2_url_decode, @@ -251,6 +252,12 @@ def as_list_files_dict(self): ] = self.server_side_encryption.serialize_to_json_for_request() return result + def is_allowed_to_read_file_retention(self, account_auth_token): + return self.bucket._check_capability(account_auth_token, 'readFileRetentions') + + def is_allowed_to_read_file_legal_hold(self, account_auth_token): + return self.bucket._check_capability(account_auth_token, 'readFileLegalHolds') + def as_start_large_file_result(self): result = dict( fileId=self.file_id, @@ -412,21 +419,30 @@ def __init__( if default_server_side_encryption is None: default_server_side_encryption = EncryptionSetting(mode=EncryptionMode.NONE) self.default_server_side_encryption = default_server_side_encryption - - def _check_capability(self, capability): - return capability in self.api.auth_token_to_key[account_auth_token].get_allowed()['capabilities'] - - @property - def is_allowed_to_read_bucket_encryption_setting(self): - return self._check_capability('readBucketEncryption') - - @property - def is_allowed_to_read_bucket_file_retention(self): - return self._check_capability('readBucketRetentions') + self.is_file_lock_enabled = is_file_lock_enabled + self.default_retention = NO_RETENTION_BUCKET_SETTING + + def is_allowed_to_read_bucket_encryption_setting(self, account_auth_token): + return self._check_capability(account_auth_token, 'readBucketEncryption') + + def is_allowed_to_read_bucket_retention(self, account_auth_token): + return self._check_capability(account_auth_token, 'readBucketRetentions') + + def _check_capability(self, account_auth_token, capability): + try: + key = self.api.auth_token_to_key[account_auth_token] + except KeyError: + # looks like it's an upload token + # fortunately BucketSimulator makes it easy to retrieve the true account_auth_token + # from an upload url + real_auth_token = account_auth_token.split('/')[-1] + key = self.api.auth_token_to_key[real_auth_token] + capabilities = key.get_allowed()['capabilities'] + return capability in capabilities def bucket_dict(self, account_auth_token): default_sse = {'isClientAuthorizedToRead': False} - if self.is_allowed_to_read_bucket_encryption_setting: + if self.is_allowed_to_read_bucket_encryption_setting(account_auth_token): default_sse['isClientAuthorizedToRead'] = True default_sse['value'] = {'mode': self.default_server_side_encryption.mode.value} if self.default_server_side_encryption.algorithm is not None: @@ -504,13 +520,17 @@ def get_file_info_by_name(self, file_name): return file.as_download_headers() raise FileNotPresent(file_id_or_name=file_name, bucket_name=self.bucket_name) - def get_upload_url(self): + def get_upload_url(self, account_auth_token): upload_id = next(self.upload_url_counter) - upload_url = 'https://upload.example.com/%s/%s' % (self.bucket_id, upload_id) + upload_url = 'https://upload.example.com/%s/%d/%s' % ( + self.bucket_id, upload_id, account_auth_token + ) return dict(bucketId=self.bucket_id, uploadUrl=upload_url, authorizationToken=upload_url) - def get_upload_part_url(self, file_id): - upload_url = 'https://upload.example.com/part/%s/%d' % (file_id, random.randint(1, 10**9)) + def get_upload_part_url(self, account_auth_token, file_id): + upload_url = 'https://upload.example.com/part/%s/%d/%s' % ( + file_id, random.randint(1, 10**9), account_auth_token + ) return dict(bucketId=self.bucket_id, uploadUrl=upload_url, authorizationToken=upload_url) def hide_file(self, file_name): From e2e2675ca3660ca2e918e30614df1e53452c52f2 Mon Sep 17 00:00:00 2001 From: Pawel Polewicz Date: Thu, 13 May 2021 05:43:18 +0200 Subject: [PATCH 23/33] Add account_auth_token to context of responses for download and get_file_info_by_name operations in raw_simulator --- b2sdk/raw_simulator.py | 60 ++++++++++++++++++++++++++++++------------ 1 file changed, 43 insertions(+), 17 deletions(-) diff --git a/b2sdk/raw_simulator.py b/b2sdk/raw_simulator.py index 47591193b..5d94984c1 100644 --- a/b2sdk/raw_simulator.py +++ b/b2sdk/raw_simulator.py @@ -192,7 +192,7 @@ def sort_key(self): """ return (self.name, self.file_id) - def as_download_headers(self, range_=None): + def as_download_headers(self, account_auth_token_or_none, range_=None): if self.data_bytes is None: content_length = 0 elif range_ is not None: @@ -347,9 +347,9 @@ def _get_encryption_mode_and_secret(self, encryption: Optional[EncryptionSetting class FakeResponse(object): - def __init__(self, file_sim, url, range_=None): + def __init__(self, account_auth_token_or_none, file_sim, url, range_=None): self.data_bytes = file_sim.data_bytes - self.headers = file_sim.as_download_headers(range_) + self.headers = file_sim.as_download_headers(account_auth_token_or_none, range_) self.url = url self.range_ = range_ if range_ is not None: @@ -484,16 +484,27 @@ def delete_file_version(self, file_id, file_name): return dict(fileId=file_id, fileName=file_name, uploadTimestamp=file_sim.upload_timestamp) def download_file_by_id( - self, file_id, url, range_=None, encryption: Optional[EncryptionSetting] = None + self, + account_auth_token_or_none, + file_id, + url, + range_=None, + encryption: Optional[EncryptionSetting] = None, ): file_sim = self.file_id_to_file[file_id] file_sim.check_encryption(encryption) - return self._download_file_sim(file_sim, url, range_=range_) + return self._download_file_sim(account_auth_token_or_none, file_sim, url, range_=range_) def download_file_by_name( - self, file_name, url, range_=None, encryption: Optional[EncryptionSetting] = None + self, + account_auth_token_or_none, + file_name, + url, + range_=None, + encryption: Optional[EncryptionSetting] = None, ): - files = self.list_file_names(file_name, 1)['files'] + files = self.list_file_names(self.api.current_token, file_name, + 1)['files'] # token is not important here if len(files) == 0: raise FileNotPresent(file_id_or_name=file_name) file_dict = files[0] @@ -501,10 +512,17 @@ def download_file_by_name( raise FileNotPresent(file_id_or_name=file_name) file_sim = self.file_name_and_id_to_file[(file_name, file_dict['fileId'])] file_sim.check_encryption(encryption) - return self._download_file_sim(file_sim, url, range_=range_) - - def _download_file_sim(self, file_sim, url, range_=None): - return ResponseContextManager(self.RESPONSE_CLASS(file_sim, url, range_)) + return self._download_file_sim(account_auth_token_or_none, file_sim, url, range_=range_) + + def _download_file_sim(self, account_auth_token_or_none, file_sim, url, range_=None): + return ResponseContextManager( + self.RESPONSE_CLASS( + account_auth_token_or_none, + file_sim, + url, + range_, + ) + ) def finish_large_file(self, file_id, part_sha1_array): file_sim = self.file_id_to_file[file_id] @@ -514,10 +532,10 @@ def finish_large_file(self, file_id, part_sha1_array): def get_file_info_by_id(self, file_id): return self.file_id_to_file[file_id].as_upload_result() - def get_file_info_by_name(self, file_name): + def get_file_info_by_name(self, account_auth_token, file_name): for ((name, id), file) in self.file_name_and_id_to_file.items(): if file_name == name: - return file.as_download_headers() + return file.as_download_headers(account_auth_token_or_none=account_auth_token) raise FileNotPresent(file_id_or_name=file_name, bucket_name=self.bucket_name) def get_upload_url(self, account_auth_token): @@ -1091,12 +1109,20 @@ def download_file_from_url( bucket_id = self.file_id_to_bucket_id[file_id] bucket = self._get_bucket_by_id(bucket_id) return bucket.download_file_by_id( - file_id, range_=range_, url=url, encryption=encryption + account_auth_token_or_none, + file_id, + range_=range_, + url=url, + encryption=encryption, ) elif bucket_name is not None and file_name is not None: bucket = self._get_bucket_by_name(bucket_name) return bucket.download_file_by_name( - b2_url_decode(file_name), range_=range_, url=url, encryption=encryption + account_auth_token_or_none, + b2_url_decode(file_name), + range_=range_, + url=url, + encryption=encryption, ) else: assert False @@ -1137,11 +1163,11 @@ def get_download_authorization( def get_file_info_by_id(self, api_url, account_auth_token, file_id): bucket_id = self.file_id_to_bucket_id[file_id] bucket = self._get_bucket_by_id(bucket_id) - return bucket.get_file_info_by_id(file_id) + return bucket.get_file_info_by_id(account_auth_token, file_id) def get_file_info_by_name(self, api_url, account_auth_token, bucket_name, file_name): bucket = self._get_bucket_by_name(bucket_name) - info = bucket.get_file_info_by_name(file_name) + info = bucket.get_file_info_by_name(account_auth_token, file_name) return info def get_upload_url(self, api_url, account_auth_token, bucket_id): From 6645c6465e6e50f397a8cc090f52a62cd88bc697 Mon Sep 17 00:00:00 2001 From: Pawel Polewicz Date: Thu, 13 May 2021 05:48:51 +0200 Subject: [PATCH 24/33] Add account_auth_token to context of responses for upload and ls operations --- b2sdk/raw_simulator.py | 50 +++++++++++++++++++++++++----------------- 1 file changed, 30 insertions(+), 20 deletions(-) diff --git a/b2sdk/raw_simulator.py b/b2sdk/raw_simulator.py index 5d94984c1..82079fed4 100644 --- a/b2sdk/raw_simulator.py +++ b/b2sdk/raw_simulator.py @@ -218,7 +218,7 @@ def as_download_headers(self, account_auth_token_or_none, range_=None): headers['x-bz-info-' + key] = value return headers - def as_upload_result(self): + def as_upload_result(self, account_auth_token): result = dict( fileId=self.file_id, fileName=self.name, @@ -236,7 +236,7 @@ def as_upload_result(self): ] = self.server_side_encryption.serialize_to_json_for_request() return result - def as_list_files_dict(self): + def as_list_files_dict(self, account_auth_token): result = dict( fileId=self.file_id, fileName=self.name, @@ -258,7 +258,7 @@ def is_allowed_to_read_file_retention(self, account_auth_token): def is_allowed_to_read_file_legal_hold(self, account_auth_token): return self.bucket._check_capability(account_auth_token, 'readFileLegalHolds') - def as_start_large_file_result(self): + def as_start_large_file_result(self, account_auth_token): result = dict( fileId=self.file_id, fileName=self.name, @@ -524,13 +524,13 @@ def _download_file_sim(self, account_auth_token_or_none, file_sim, url, range_=N ) ) - def finish_large_file(self, file_id, part_sha1_array): + def finish_large_file(self, account_auth_token, file_id, part_sha1_array): file_sim = self.file_id_to_file[file_id] file_sim.finish(part_sha1_array) - return file_sim.as_upload_result() + return file_sim.as_upload_result(account_auth_token) - def get_file_info_by_id(self, file_id): - return self.file_id_to_file[file_id].as_upload_result() + def get_file_info_by_id(self, account_auth_token, file_id): + return self.file_id_to_file[file_id].as_upload_result(account_auth_token) def get_file_info_by_name(self, account_auth_token, file_name): for ((name, id), file) in self.file_name_and_id_to_file.items(): @@ -551,7 +551,7 @@ def get_upload_part_url(self, account_auth_token, file_id): ) return dict(bucketId=self.bucket_id, uploadUrl=upload_url, authorizationToken=upload_url) - def hide_file(self, file_name): + def hide_file(self, account_auth_token, file_name): file_id = self._next_file_id() file_sim = self.FILE_SIMULATOR_CLASS( self.account_id, self, file_id, 'hide', file_name, None, "none", {}, b'', @@ -559,7 +559,7 @@ def hide_file(self, file_name): ) self.file_id_to_file[file_id] = file_sim self.file_name_and_id_to_file[file_sim.sort_key()] = file_sim - return file_sim.as_list_files_dict() + return file_sim.as_list_files_dict(account_auth_token) def copy_file( self, @@ -616,6 +616,7 @@ def copy_file( def list_file_names( self, + account_auth_token, start_file_name=None, max_file_count=None, prefix=None, @@ -636,7 +637,7 @@ def list_file_names( prev_file_name = file_name file_sim = self.file_name_and_id_to_file[key] if file_sim.is_visible(): - result_files.append(file_sim.as_list_files_dict()) + result_files.append(file_sim.as_list_files_dict(account_auth_token)) if len(result_files) == max_file_count: next_file_name = file_sim.name + ' ' break @@ -644,6 +645,7 @@ def list_file_names( def list_file_versions( self, + account_auth_token, start_file_name=None, start_file_id=None, max_file_count=None, @@ -666,7 +668,7 @@ def list_file_versions( file_sim = self.file_name_and_id_to_file[key] if prefix is not None and not file_name.startswith(prefix): break - result_files.append(file_sim.as_list_files_dict()) + result_files.append(file_sim.as_list_files_dict(account_auth_token)) if len(result_files) == max_file_count: next_file_name = file_sim.name next_file_id = str(int(file_id) + 1) @@ -706,6 +708,7 @@ def list_unfinished_large_files(self, start_file_id=None, max_file_count=None, p def start_large_file( self, + account_auth_token, file_name, content_type, file_info, @@ -721,7 +724,7 @@ def start_large_file( ) # yapf: disable self.file_id_to_file[file_id] = file_sim self.file_name_and_id_to_file[file_sim.sort_key()] = file_sim - return file_sim.as_start_large_file_result() + return file_sim.as_start_large_file_result(account_auth_token) def _update_bucket( self, @@ -792,7 +795,7 @@ def upload_file( ) self.file_id_to_file[file_id] = file_sim self.file_name_and_id_to_file[file_sim.sort_key()] = file_sim - return file_sim.as_upload_result() + return file_sim.as_upload_result(upload_auth_token) def upload_part( self, @@ -1140,7 +1143,7 @@ def finish_large_file(self, api_url, account_auth_token, file_id, part_sha1_arra bucket_id = self.file_id_to_bucket_id[file_id] bucket = self._get_bucket_by_id(bucket_id) self._assert_account_auth(api_url, account_auth_token, bucket.account_id, 'writeFiles') - return bucket.finish_large_file(file_id, part_sha1_array) + return bucket.finish_large_file(account_auth_token, file_id, part_sha1_array) def get_download_authorization( self, api_url, account_auth_token, bucket_id, file_name_prefix, valid_duration_in_seconds @@ -1173,18 +1176,18 @@ def get_file_info_by_name(self, api_url, account_auth_token, bucket_name, file_n def get_upload_url(self, api_url, account_auth_token, bucket_id): bucket = self._get_bucket_by_id(bucket_id) self._assert_account_auth(api_url, account_auth_token, bucket.account_id, 'writeFiles') - return self._get_bucket_by_id(bucket_id).get_upload_url() + return self._get_bucket_by_id(bucket_id).get_upload_url(account_auth_token) def get_upload_part_url(self, api_url, account_auth_token, file_id): bucket_id = self.file_id_to_bucket_id[file_id] bucket = self._get_bucket_by_id(bucket_id) self._assert_account_auth(api_url, account_auth_token, bucket.account_id, 'writeFiles') - return self._get_bucket_by_id(bucket_id).get_upload_part_url(file_id) + return self._get_bucket_by_id(bucket_id).get_upload_part_url(account_auth_token, file_id) def hide_file(self, api_url, account_auth_token, bucket_id, file_name): bucket = self._get_bucket_by_id(bucket_id) self._assert_account_auth(api_url, account_auth_token, bucket.account_id, 'writeFiles') - response = bucket.hide_file(file_name) + response = bucket.hide_file(account_auth_token, file_name) self.file_id_to_bucket_id[response['fileId']] = bucket_id return response @@ -1226,7 +1229,7 @@ def copy_file( dest_bucket.file_id_to_file[copy_file_sim.file_id] = copy_file_sim dest_bucket.file_name_and_id_to_file[copy_file_sim.sort_key()] = copy_file_sim - return copy_file_sim.as_upload_result() + return copy_file_sim.as_upload_result(account_auth_token) def copy_part( self, @@ -1318,7 +1321,7 @@ def list_file_names( bucket_id=bucket_id, file_name=prefix, ) - return bucket.list_file_names(start_file_name, max_file_count, prefix) + return bucket.list_file_names(account_auth_token, start_file_name, max_file_count, prefix) def list_file_versions( self, @@ -1339,7 +1342,13 @@ def list_file_versions( bucket_id=bucket_id, file_name=prefix, ) - return bucket.list_file_versions(start_file_name, start_file_id, max_file_count, prefix) + return bucket.list_file_versions( + account_auth_token, + start_file_name, + start_file_id, + max_file_count, + prefix, + ) def list_keys( self, @@ -1389,6 +1398,7 @@ def start_large_file( bucket = self._get_bucket_by_id(bucket_id) self._assert_account_auth(api_url, account_auth_token, bucket.account_id, 'writeFiles') result = bucket.start_large_file( + account_auth_token, file_name, content_type, file_info, From f2f8dc13d13fdeb83ae468c9fe6c087319a6ec7e Mon Sep 17 00:00:00 2001 From: Pawel Polewicz Date: Thu, 13 May 2021 05:50:55 +0200 Subject: [PATCH 25/33] Add update_file_retention to raw_simulator --- b2sdk/raw_simulator.py | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/b2sdk/raw_simulator.py b/b2sdk/raw_simulator.py index 82079fed4..a570cbb43 100644 --- a/b2sdk/raw_simulator.py +++ b/b2sdk/raw_simulator.py @@ -561,6 +561,20 @@ def hide_file(self, account_auth_token, file_name): self.file_name_and_id_to_file[file_sim.sort_key()] = file_sim return file_sim.as_list_files_dict(account_auth_token) + def update_file_retention( + self, + account_auth_token, + file_id, + file_name, + file_retention: FileRetentionSetting, + bypass_governance: bool = False, + ): + file_sim = self.file_id_to_file[file_id] + assert self.is_file_lock_enabled + assert file_sim.name == file_name + # TODO: check bypass etc + file_sim.file_retention = file_retention + def copy_file( self, file_id, @@ -1087,6 +1101,21 @@ def delete_file_version(self, api_url, account_auth_token, file_id, file_name): self._assert_account_auth(api_url, account_auth_token, bucket.account_id, 'deleteFiles') return bucket.delete_file_version(file_id, file_name) + def update_file_retention( + self, + api_url, + account_auth_token, + file_id, + file_name, + file_retention: FileRetentionSetting, + bypass_governance: bool = False, + ): + bucket_id = self.file_id_to_bucket_id[file_id] + bucket = self._get_bucket_by_id(bucket_id) + return bucket.update_file_retention( + account_auth_token, file_id, file_name, file_retention, bypass_governance + ) + def delete_bucket(self, api_url, account_auth_token, account_id, bucket_id): self._assert_account_auth(api_url, account_auth_token, account_id, 'deleteBuckets') bucket = self._get_bucket_by_id(bucket_id) From 22ee991a9d715736be9fdc0b90112d907323e089 Mon Sep 17 00:00:00 2001 From: Pawel Polewicz Date: Thu, 13 May 2021 05:56:06 +0200 Subject: [PATCH 26/33] Add file retention to FileSimulator --- b2sdk/raw_simulator.py | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/b2sdk/raw_simulator.py b/b2sdk/raw_simulator.py index a570cbb43..bc1d50121 100644 --- a/b2sdk/raw_simulator.py +++ b/b2sdk/raw_simulator.py @@ -153,6 +153,7 @@ def __init__( upload_timestamp, range_=None, server_side_encryption: Optional[EncryptionSetting] = None, + file_retention: Optional[FileRetentionSetting] = None, ): if action == 'hide': assert server_side_encryption is None @@ -174,6 +175,7 @@ def __init__( self.upload_timestamp = upload_timestamp self.range_ = range_ self.server_side_encryption = server_side_encryption + self.file_retention = file_retention if action == 'start': self.parts = [] @@ -210,6 +212,20 @@ def as_download_headers(self, account_auth_token_or_none, range_=None): 'x-bz-file-id': self.file_id, 'x-bz-file-name': self.name, } + + if account_auth_token_or_none is not None: + not_permitted = [] + + if not self.is_allowed_to_read_file_retention(account_auth_token_or_none): + not_permitted.append('X-Bz-File-Retention-Mode') + not_permitted.append('X-Bz-File-Retain-Until-Timestamp') + else: + if self.file_retention is not None: + self.file_retention.add_to_to_upload_headers(headers) + + if not_permitted: + headers['X-Bz-Client-Unauthorized-To-Read'] = ','.join(not_permitted) + if range_ is not None: headers['Content-Range'] = 'bytes %d-%d/%d' % ( range_[0], range_[0] + content_length, len(self.data_bytes) @@ -234,6 +250,7 @@ def as_upload_result(self, account_auth_token): if self.server_side_encryption is not None: result['serverSideEncryption' ] = self.server_side_encryption.serialize_to_json_for_request() + result['fileRetention'] = self._file_retention_dict(account_auth_token) return result def as_list_files_dict(self, account_auth_token): @@ -250,6 +267,7 @@ def as_list_files_dict(self, account_auth_token): if self.server_side_encryption is not None: result['serverSideEncryption' ] = self.server_side_encryption.serialize_to_json_for_request() + result['fileRetention'] = self._file_retention_dict(account_auth_token) return result def is_allowed_to_read_file_retention(self, account_auth_token): @@ -271,8 +289,22 @@ def as_start_large_file_result(self, account_auth_token): if self.server_side_encryption is not None: result['serverSideEncryption' ] = self.server_side_encryption.serialize_to_json_for_request() + result['fileRetention'] = self._file_retention_dict(account_auth_token) return result + def _file_retention_dict(self, account_auth_token): + if not self.is_allowed_to_read_file_retention(account_auth_token): + return UNKNOWN_FILE_LOCK_CONFIGURATION + + file_lock_configuration = {'isClientAuthorizedToRead': True} + if self.file_retention is None: + file_lock_configuration['value'] = {'mode': None} + else: + file_lock_configuration['value'] = {'mode': self.file_retention.mode.value} + if self.file_retention.period is not None: + file_lock_configuration['value']['period'] = self.file_retention.period + return file_lock_configuration + def add_part(self, part_number, part): while len(self.parts) < part_number + 1: self.parts.append(None) From 08eefde5e501a8c8dbb99842b4c19388c9fe6e72 Mon Sep 17 00:00:00 2001 From: Pawel Polewicz Date: Thu, 13 May 2021 05:57:03 +0200 Subject: [PATCH 27/33] Add legal hold to FileSimulator --- b2sdk/raw_simulator.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/b2sdk/raw_simulator.py b/b2sdk/raw_simulator.py index bc1d50121..08c460bed 100644 --- a/b2sdk/raw_simulator.py +++ b/b2sdk/raw_simulator.py @@ -154,6 +154,7 @@ def __init__( range_=None, server_side_encryption: Optional[EncryptionSetting] = None, file_retention: Optional[FileRetentionSetting] = None, + legal_hold: Optional[bool] = None, ): if action == 'hide': assert server_side_encryption is None @@ -176,6 +177,7 @@ def __init__( self.range_ = range_ self.server_side_encryption = server_side_encryption self.file_retention = file_retention + self.legal_hold = legal_hold if action == 'start': self.parts = [] @@ -223,6 +225,11 @@ def as_download_headers(self, account_auth_token_or_none, range_=None): if self.file_retention is not None: self.file_retention.add_to_to_upload_headers(headers) + if not self.is_allowed_to_read_file_legal_hold(account_auth_token_or_none): + not_permitted.append('X-Bz-File-Legal-Hold') + else: + headers['X-Bz-File-Legal-Hold'] = self.legal_hold and 'on' or 'off' + if not_permitted: headers['X-Bz-Client-Unauthorized-To-Read'] = ','.join(not_permitted) @@ -251,6 +258,7 @@ def as_upload_result(self, account_auth_token): result['serverSideEncryption' ] = self.server_side_encryption.serialize_to_json_for_request() result['fileRetention'] = self._file_retention_dict(account_auth_token) + result['legalHold'] = self._legal_hold_dict(account_auth_token) return result def as_list_files_dict(self, account_auth_token): @@ -268,6 +276,7 @@ def as_list_files_dict(self, account_auth_token): result['serverSideEncryption' ] = self.server_side_encryption.serialize_to_json_for_request() result['fileRetention'] = self._file_retention_dict(account_auth_token) + result['legalHold'] = self._legal_hold_dict(account_auth_token) return result def is_allowed_to_read_file_retention(self, account_auth_token): @@ -290,6 +299,7 @@ def as_start_large_file_result(self, account_auth_token): result['serverSideEncryption' ] = self.server_side_encryption.serialize_to_json_for_request() result['fileRetention'] = self._file_retention_dict(account_auth_token) + result['legalHold'] = self._legal_hold_dict(account_auth_token) return result def _file_retention_dict(self, account_auth_token): @@ -305,6 +315,14 @@ def _file_retention_dict(self, account_auth_token): file_lock_configuration['value']['period'] = self.file_retention.period return file_lock_configuration + def _legal_hold_dict(self, account_auth_token): + if not self.is_allowed_to_read_file_legal_hold(account_auth_token): + return UNKNOWN_FILE_LOCK_CONFIGURATION + return { + 'isClientAuthorizedToRead': True, + 'value': self.legal_hold and 'on' or 'off', + } + def add_part(self, part_number, part): while len(self.parts) < part_number + 1: self.parts.append(None) From 1dea0edd31753d9faab2298c64c2db926008d94a Mon Sep 17 00:00:00 2001 From: Pawel Polewicz Date: Thu, 13 May 2021 05:58:13 +0200 Subject: [PATCH 28/33] Add file lock enabling to BucketSimulator --- b2sdk/raw_simulator.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/b2sdk/raw_simulator.py b/b2sdk/raw_simulator.py index 08c460bed..d124537b8 100644 --- a/b2sdk/raw_simulator.py +++ b/b2sdk/raw_simulator.py @@ -447,6 +447,7 @@ def __init__( lifecycle_rules=None, options_set=None, default_server_side_encryption=None, + is_file_lock_enabled: Optional[bool] = None, ): assert bucket_type in ['allPrivate', 'allPublic'] self.api = api @@ -1074,6 +1075,7 @@ def create_bucket( cors_rules=None, lifecycle_rules=None, default_server_side_encryption: Optional[EncryptionSetting] = None, + is_file_lock_enabled: Optional[bool] = None, ): if not re.match(r'^[-a-zA-Z0-9]*$', bucket_name): raise BadJson('illegal bucket name: ' + bucket_name) @@ -1092,6 +1094,7 @@ def create_bucket( lifecycle_rules, # watch out for options! default_server_side_encryption=default_server_side_encryption, + is_file_lock_enabled=is_file_lock_enabled, ) self.bucket_name_to_bucket[bucket_name] = bucket self.bucket_id_to_bucket[bucket_id] = bucket From f4ee785bd41426e7e1a689235839e30ada462832 Mon Sep 17 00:00:00 2001 From: Pawel Polewicz Date: Thu, 13 May 2021 05:58:59 +0200 Subject: [PATCH 29/33] Add default retention setting to BucketSimulator --- b2sdk/raw_simulator.py | 27 +++++++++++++++++++++++---- 1 file changed, 23 insertions(+), 4 deletions(-) diff --git a/b2sdk/raw_simulator.py b/b2sdk/raw_simulator.py index d124537b8..c19eaaec9 100644 --- a/b2sdk/raw_simulator.py +++ b/b2sdk/raw_simulator.py @@ -501,7 +501,20 @@ def bucket_dict(self, account_auth_token): ] = self.default_server_side_encryption.algorithm.value else: default_sse['value'] = {'mode': EncryptionMode.UNKNOWN} - logger.debug('default sse returned is: %s', default_sse) + + if self.is_allowed_to_read_bucket_retention(account_auth_token): + file_lock_configuration = { + 'isClientAuthorizedToRead': True, + 'value': { + 'defaultRetention': { + 'mode': self.default_retention.mode.value, + 'period': self.default_retention.period, + }, + 'isFileLockEnabled': self.is_file_lock_enabled, + }, + } # yapf: disable + else: + file_lock_configuration = {'isClientAuthorizedToRead': False, 'value': None} return dict( accountId=self.account_id, bucketName=self.bucket_name, @@ -513,6 +526,7 @@ def bucket_dict(self, account_auth_token): options=self.options_set, revision=self.revision, defaultServerSideEncryption=default_sse, + fileLockConfiguration=file_lock_configuration, ) def cancel_large_file(self, file_id): @@ -797,8 +811,9 @@ def _update_bucket( bucket_info=None, cors_rules=None, lifecycle_rules=None, - if_revision_is=None, - default_server_side_encryption=None, + if_revision_is: Optional[int] = None, + default_server_side_encryption: Optional[EncryptionSetting] = None, + default_retention: Optional[BucketRetentionSetting] = None, ): if if_revision_is is not None and self.revision != if_revision_is: raise Conflict() @@ -813,6 +828,8 @@ def _update_bucket( self.lifecycle_rules = lifecycle_rules if default_server_side_encryption is not None: self.default_server_side_encryption = default_server_side_encryption + if default_retention: + self.default_retention = default_retention self.revision += 1 return self.bucket_dict(self.api.current_token) @@ -1500,7 +1517,8 @@ def update_bucket( cors_rules=None, lifecycle_rules=None, if_revision_is=None, - default_server_side_encryption=None, + default_server_side_encryption: Optional[EncryptionSetting] = None, + default_retention: Optional[BucketRetentionSetting] = None, ): assert bucket_type or bucket_info or cors_rules or lifecycle_rules or default_server_side_encryption bucket = self._get_bucket_by_id(bucket_id) @@ -1512,6 +1530,7 @@ def update_bucket( lifecycle_rules=lifecycle_rules, if_revision_is=if_revision_is, default_server_side_encryption=default_server_side_encryption, + default_retention=default_retention, ) def upload_file( From bae7c3f5a1ea315dd6d67bf64d827f2e9d8b07dd Mon Sep 17 00:00:00 2001 From: Pawel Polewicz Date: Thu, 13 May 2021 06:04:21 +0200 Subject: [PATCH 30/33] Only output refention headers in file lock enabled buckets --- b2sdk/raw_simulator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/b2sdk/raw_simulator.py b/b2sdk/raw_simulator.py index c19eaaec9..6f9f17a97 100644 --- a/b2sdk/raw_simulator.py +++ b/b2sdk/raw_simulator.py @@ -215,7 +215,7 @@ def as_download_headers(self, account_auth_token_or_none, range_=None): 'x-bz-file-name': self.name, } - if account_auth_token_or_none is not None: + if account_auth_token_or_none is not None and self.bucket.is_file_lock_enabled: not_permitted = [] if not self.is_allowed_to_read_file_retention(account_auth_token_or_none): From ea0e763c5b2054fdf0abfdf79b1a79468087f200 Mon Sep 17 00:00:00 2001 From: Michal Zukowski Date: Thu, 13 May 2021 14:56:55 +0200 Subject: [PATCH 31/33] Bucket layer for File-Lock --- b2sdk/api.py | 13 ++++ b2sdk/bucket.py | 66 +++++++++++++++++++-- b2sdk/large_file/services.py | 7 +++ b2sdk/large_file/unfinished_large_file.py | 4 ++ b2sdk/raw_api.py | 19 ++++++ b2sdk/raw_simulator.py | 29 +++++---- b2sdk/session.py | 17 ++++++ b2sdk/transfer/emerge/emerger.py | 9 +++ b2sdk/transfer/emerge/executor.py | 72 ++++++++++++++++++++--- b2sdk/transfer/outbound/copy_manager.py | 9 +++ b2sdk/transfer/outbound/upload_manager.py | 13 +++- 11 files changed, 235 insertions(+), 23 deletions(-) diff --git a/b2sdk/api.py b/b2sdk/api.py index 62a2eb563..2e30f1955 100644 --- a/b2sdk/api.py +++ b/b2sdk/api.py @@ -13,6 +13,7 @@ from .bucket import Bucket, BucketFactory from .encryption.setting import EncryptionSetting from .exception import NonExistentBucket, RestrictedBucket +from .file_lock import BucketRetentionSetting from .file_version import FileIdAndName from .large_file.services import LargeFileServices from .raw_api import API_VERSION @@ -177,6 +178,9 @@ def create_bucket( cors_rules=None, lifecycle_rules=None, default_server_side_encryption: Optional[EncryptionSetting] = None, + default_retention: Optional[BucketRetentionSetting] = None, + is_file_lock_enabled: Optional[bool] = None, + ): """ Create a bucket. @@ -187,6 +191,8 @@ def create_bucket( :param dict cors_rules: bucket CORS rules to store with the bucket :param dict lifecycle_rules: bucket lifecycle rules to store with the bucket :param b2sdk.v1.EncryptionSetting default_server_side_encryption: default server side encryption settings (``None`` if unknown) + :param b2sdk.v1.BucketRetentionSetting default_retention: default retention setting + :param bool is_file_lock_enabled: boolean value specifies whether bucket is File Lock-enabled :return: a Bucket object :rtype: b2sdk.v1.Bucket """ @@ -200,6 +206,7 @@ def create_bucket( cors_rules=cors_rules, lifecycle_rules=lifecycle_rules, default_server_side_encryption=default_server_side_encryption, + is_file_lock_enabled=is_file_lock_enabled, ) bucket = self.BUCKET_FACTORY_CLASS.from_api_bucket_dict(self, response) assert name == bucket.name, 'API created a bucket with different name\ @@ -208,6 +215,12 @@ def create_bucket( than requested: %s != %s' % ( bucket_type, bucket.type_ ) + if default_retention is not None: + # server does not support setting default retention on create + # so we provide convinient helper for it + bucket = self.BUCKET_FACTORY_CLASS.from_api_bucket_dict( + self, bucket.update(default_retention=default_retention) + ) self.cache.save_bucket(bucket) return bucket diff --git a/b2sdk/bucket.py b/b2sdk/bucket.py index 0189ba64f..f15c5bbcb 100644 --- a/b2sdk/bucket.py +++ b/b2sdk/bucket.py @@ -14,7 +14,12 @@ from .encryption.setting import EncryptionSetting, EncryptionSettingFactory from .encryption.types import EncryptionMode from .exception import FileNotPresent, FileOrBucketNotFound, UnexpectedCloudBehaviour, UnrecognizedBucketType -from .file_lock import BucketRetentionSetting, UNKNOWN_BUCKET_RETENTION, FileLockConfiguration +from .file_lock import ( + BucketRetentionSetting, + FileLockConfiguration, + FileRetentionSetting, + UNKNOWN_BUCKET_RETENTION, +) from .file_version import FileVersionInfo, FileVersionInfoFactory from .progress import DoNothingProgressListener from .transfer.emerge.executor import AUTO_CONTENT_TYPE @@ -115,7 +120,6 @@ def update( if_revision_is=None, default_server_side_encryption: Optional[EncryptionSetting] = None, default_retention: Optional[BucketRetentionSetting] = None, - # is_file_lock_enabled = None, TODO: establish how to properly send it to B2 cloud ): """ Update various bucket parameters. @@ -127,6 +131,7 @@ def update( :param dict lifecycle_rules: lifecycle rules to store with a bucket :param int if_revision_is: revision number, update the info **only if** *revision* equals to *if_revision_is* :param b2sdk.v1.EncryptionSetting default_server_side_encryption: default server side encryption settings (``None`` if unknown) + :param b2sdk.v1.BucketRetentionSetting default_retention: bucket default retention setting """ account_id = self.api.account_info.get_account_id() return self.api.session.update_bucket( @@ -138,8 +143,7 @@ def update( lifecycle_rules=lifecycle_rules, if_revision_is=if_revision_is, default_server_side_encryption=default_server_side_encryption, - # default_retention=default_retention, TODO: implement in session and raw_api - # is_file_lock_enabled=is_file_lock_enabled, TODO: establish how to properly send it to B2 cloud and implement in session and raw_api + default_retention=default_retention, ) def cancel_large_file(self, file_id): @@ -411,6 +415,8 @@ def upload_bytes( file_infos=None, progress_listener=None, encryption: Optional[EncryptionSetting] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): """ Upload bytes in memory to a B2 file. @@ -421,6 +427,8 @@ def upload_bytes( :param dict,None file_infos: a file info to store with the file or ``None`` to not store anything :param b2sdk.v1.AbstractProgressListener,None progress_listener: a progress listener object to use, or ``None`` to not track progress :param b2sdk.v1.EncryptionSetting encryption: encryption settings (``None`` if unknown) + :param bool legal_hold: legal hold setting + :param b2sdk.v1.FileRetentionSetting file_retention: file retention setting :rtype: generator[b2sdk.v1.FileVersion] """ upload_source = UploadSourceBytes(data_bytes) @@ -431,6 +439,8 @@ def upload_bytes( file_info=file_infos, progress_listener=progress_listener, encryption=encryption, + legal_hold=legal_hold, + file_retention=file_retention, ) def upload_local_file( @@ -443,6 +453,8 @@ def upload_local_file( min_part_size=None, progress_listener=None, encryption: Optional[EncryptionSetting] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): """ Upload a file on local disk to a B2 file. @@ -459,6 +471,8 @@ def upload_local_file( :param int min_part_size: a minimum size of a part :param b2sdk.v1.AbstractProgressListener,None progress_listener: a progress listener object to use, or ``None`` to not report progress :param b2sdk.v1.EncryptionSetting encryption: encryption settings (``None`` if unknown) + :param bool legal_hold: legal hold setting + :param b2sdk.v1.FileRetentionSetting file_retention: file retention setting :rtype: b2sdk.v1.FileVersionInfo """ upload_source = UploadSourceLocalFile(local_path=local_file, content_sha1=sha1_sum) @@ -470,6 +484,8 @@ def upload_local_file( min_part_size=min_part_size, progress_listener=progress_listener, encryption=encryption, + legal_hold=legal_hold, + file_retention=file_retention, ) def upload( @@ -481,6 +497,8 @@ def upload( min_part_size=None, progress_listener=None, encryption: Optional[EncryptionSetting] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): """ Upload a file to B2, retrying as needed. @@ -500,6 +518,8 @@ def upload( :param int,None min_part_size: the smallest part size to use or ``None`` to determine automatically :param b2sdk.v1.AbstractProgressListener,None progress_listener: a progress listener object to use, or ``None`` to not report progress :param b2sdk.v1.EncryptionSetting encryption: encryption settings (``None`` if unknown) + :param bool legal_hold: legal hold setting + :param b2sdk.v1.FileRetentionSetting file_retention: file retention setting :rtype: b2sdk.v1.FileVersionInfo """ return self.create_file( @@ -511,6 +531,8 @@ def upload( # FIXME: Bucket.upload documents wrong logic recommended_upload_part_size=min_part_size, encryption=encryption, + legal_hold=legal_hold, + file_retention=file_retention, ) def create_file( @@ -523,6 +545,8 @@ def create_file( recommended_upload_part_size=None, continue_large_file_id=None, encryption: Optional[EncryptionSetting] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): """ Creates a new file in this bucket using an iterable (list, tuple etc) of remote or local sources. @@ -545,6 +569,8 @@ def create_file( :param str,None continue_large_file_id: large file id that should be selected to resume file creation for multipart upload/copy, ``None`` for automatic search for this id :param b2sdk.v1.EncryptionSetting encryption: encryption settings (``None`` if unknown) + :param bool legal_hold: legal hold setting + :param b2sdk.v1.FileRetentionSetting file_retention: file retention setting """ return self._create_file( self.api.services.emerger.emerge, @@ -556,6 +582,8 @@ def create_file( continue_large_file_id=continue_large_file_id, recommended_upload_part_size=recommended_upload_part_size, encryption=encryption, + legal_hold=legal_hold, + file_retention=file_retention, ) def create_file_stream( @@ -568,6 +596,8 @@ def create_file_stream( recommended_upload_part_size=None, continue_large_file_id=None, encryption: Optional[EncryptionSetting] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): """ Creates a new file in this bucket using a stream of multiple remote or local sources. @@ -592,6 +622,8 @@ def create_file_stream( for multipart upload/copy, if ``None`` in multipart case it would always start a new large file :param b2sdk.v1.EncryptionSetting encryption: encryption settings (``None`` if unknown) + :param bool legal_hold: legal hold setting + :param b2sdk.v1.FileRetentionSetting file_retention: file retention setting """ return self._create_file( self.api.services.emerger.emerge_stream, @@ -603,6 +635,8 @@ def create_file_stream( continue_large_file_id=continue_large_file_id, recommended_upload_part_size=recommended_upload_part_size, encryption=encryption, + legal_hold=legal_hold, + file_retention=file_retention, ) def _create_file( @@ -616,6 +650,8 @@ def _create_file( recommended_upload_part_size=None, continue_large_file_id=None, encryption: Optional[EncryptionSetting] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): validate_b2_file_name(file_name) progress_listener = progress_listener or DoNothingProgressListener() @@ -630,6 +666,8 @@ def _create_file( recommended_upload_part_size=recommended_upload_part_size, continue_large_file_id=continue_large_file_id, encryption=encryption, + legal_hold=legal_hold, + file_retention=file_retention, ) def concatenate( @@ -642,6 +680,8 @@ def concatenate( recommended_upload_part_size=None, continue_large_file_id=None, encryption: Optional[EncryptionSetting] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): """ Creates a new file in this bucket by concatenating multiple remote or local sources. @@ -661,6 +701,8 @@ def concatenate( :param str,None continue_large_file_id: large file id that should be selected to resume file creation for multipart upload/copy, ``None`` for automatic search for this id :param b2sdk.v1.EncryptionSetting encryption: encryption settings (``None`` if unknown) + :param bool legal_hold: legal hold setting + :param b2sdk.v1.FileRetentionSetting file_retention: file retention setting """ return self.create_file( WriteIntent.wrap_sources_iterator(outbound_sources), @@ -671,6 +713,8 @@ def concatenate( recommended_upload_part_size=recommended_upload_part_size, continue_large_file_id=continue_large_file_id, encryption=encryption, + legal_hold=legal_hold, + file_retention=file_retention, ) def concatenate_stream( @@ -683,6 +727,8 @@ def concatenate_stream( recommended_upload_part_size=None, continue_large_file_id=None, encryption: Optional[EncryptionSetting] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): """ Creates a new file in this bucket by concatenating stream of multiple remote or local sources. @@ -703,6 +749,8 @@ def concatenate_stream( for multipart upload/copy, if ``None`` in multipart case it would always start a new large file :param b2sdk.v1.EncryptionSetting encryption: encryption setting (``None`` if unknown) + :param bool legal_hold: legal hold setting + :param b2sdk.v1.FileRetentionSetting file_retention: file retention setting """ return self.create_file_stream( WriteIntent.wrap_sources_iterator(outbound_sources_iterator), @@ -713,6 +761,8 @@ def concatenate_stream( recommended_upload_part_size=recommended_upload_part_size, continue_large_file_id=continue_large_file_id, encryption=encryption, + legal_hold=legal_hold, + file_retention=file_retention, ) def get_download_url(self, filename): @@ -751,6 +801,8 @@ def copy( source_encryption: Optional[EncryptionSetting] = None, source_file_info: Optional[dict] = None, source_content_type: Optional[str] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): """ Creates a new file in this bucket by (server-side) copying from an existing file. @@ -774,6 +826,8 @@ def copy( (``None`` if unknown) :param dict,None source_file_info: source file's file_info dict, useful when copying files with SSE-C :param str,None source_content_type: source file's content type, useful when copying files with SSE-C + :param bool legal_hold: legal hold setting for the new file. + :param b2sdk.v1.FileRetentionSetting file_retention: file retention setting for the new file. """ copy_source = CopySource( @@ -797,6 +851,8 @@ def copy( progress_listener=progress_listener, destination_encryption=destination_encryption, source_encryption=source_encryption, + legal_hold=legal_hold, + file_retention=file_retention, ).result() else: return self.create_file( @@ -806,6 +862,8 @@ def copy( file_info=file_info, progress_listener=progress_listener, encryption=destination_encryption, + legal_hold=legal_hold, + file_retention=file_retention, ) def delete_file_version(self, file_id, file_name): diff --git a/b2sdk/large_file/services.py b/b2sdk/large_file/services.py index f7b4180cc..6985e5d58 100644 --- a/b2sdk/large_file/services.py +++ b/b2sdk/large_file/services.py @@ -11,6 +11,7 @@ from typing import Optional from b2sdk.encryption.setting import EncryptionSetting +from b2sdk.file_lock import FileRetentionSetting from b2sdk.file_version import FileVersionInfoFactory from b2sdk.large_file.part import PartFactory from b2sdk.large_file.unfinished_large_file import UnfinishedLargeFile @@ -84,6 +85,8 @@ def start_large_file( content_type=None, file_info=None, encryption: Optional[EncryptionSetting] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): """ Start a large file transfer. @@ -92,6 +95,8 @@ def start_large_file( :param str,None content_type: the MIME type, or ``None`` to accept the default based on file extension of the B2 file name :param dict,None file_info: a file info to store with the file or ``None`` to not store anything :param b2sdk.v1.EncryptionSetting encryption: encryption settings (``None`` if unknown) + :param bool legal_hold: legal hold setting + :param b2sdk.v1.FileRetentionSetting file_retention: file retention setting """ return UnfinishedLargeFile( self.services.session.start_large_file( @@ -100,6 +105,8 @@ def start_large_file( content_type, file_info, server_side_encryption=encryption, + legal_hold=legal_hold, + file_retention=file_retention, ) ) diff --git a/b2sdk/large_file/unfinished_large_file.py b/b2sdk/large_file/unfinished_large_file.py index 9d2c2d6bf..953f969c7 100644 --- a/b2sdk/large_file/unfinished_large_file.py +++ b/b2sdk/large_file/unfinished_large_file.py @@ -9,6 +9,7 @@ ###################################################################### from b2sdk.encryption.setting import EncryptionSettingFactory +from b2sdk.file_lock import FileRetentionSetting, LegalHoldSerializer class UnfinishedLargeFile(object): @@ -34,6 +35,9 @@ def __init__(self, file_dict): self.content_type = file_dict['contentType'] self.file_info = file_dict['fileInfo'] self.encryption = EncryptionSettingFactory.from_file_version_dict(file_dict) + self.file_retention = FileRetentionSetting.from_file_version_dict(file_dict) + + self.legal_hold = LegalHoldSerializer.from_server(file_dict) def __repr__(self): return '<%s %s %s>' % (self.__class__.__name__, self.bucket_id, self.file_name) diff --git a/b2sdk/raw_api.py b/b2sdk/raw_api.py index b6c30aef2..14625f963 100644 --- a/b2sdk/raw_api.py +++ b/b2sdk/raw_api.py @@ -96,6 +96,8 @@ def copy_file( destination_bucket_id=None, destination_server_side_encryption: Optional[EncryptionSetting] = None, source_server_side_encryption: Optional[EncryptionSetting] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): pass @@ -264,6 +266,8 @@ def start_large_file( content_type, file_info, server_side_encryption: Optional[EncryptionSetting] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): pass @@ -308,6 +312,8 @@ def upload_file( file_infos, data_stream, server_side_encryption: Optional[EncryptionSetting] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): pass @@ -648,6 +654,8 @@ def start_large_file( content_type, file_info, server_side_encryption: Optional[EncryptionSetting] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): kwargs = {} if server_side_encryption is not None: @@ -655,6 +663,9 @@ def start_large_file( EncryptionMode.NONE, EncryptionMode.SSE_B2, EncryptionMode.SSE_C ) kwargs['serverSideEncryption'] = server_side_encryption.serialize_to_json_for_request() + + # FIXME: implement `legal_hold` and `file_retention` + return self._post_json( api_url, 'b2_start_large_file', @@ -789,6 +800,8 @@ def upload_file( file_infos, data_stream, server_side_encryption: Optional[EncryptionSetting] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): """ Upload one, small file to b2. @@ -820,6 +833,8 @@ def upload_file( ) server_side_encryption.add_to_upload_headers(headers) + # FIXME: implement `legal_hold` and `file_retention` + return self.b2_http.post_content_return_json(upload_url, headers, data_stream) def upload_part( @@ -859,6 +874,8 @@ def copy_file( destination_bucket_id=None, destination_server_side_encryption: Optional[EncryptionSetting] = None, source_server_side_encryption: Optional[EncryptionSetting] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): kwargs = {} if bytes_range is not None: @@ -897,6 +914,8 @@ def copy_file( kwargs['sourceServerSideEncryption' ] = source_server_side_encryption.serialize_to_json_for_request() + # FIXME: implement `legal_hold` and `file_retention` + return self._post_json( api_url, 'b2_copy_file', diff --git a/b2sdk/raw_simulator.py b/b2sdk/raw_simulator.py index 6f9f17a97..8cf44508f 100644 --- a/b2sdk/raw_simulator.py +++ b/b2sdk/raw_simulator.py @@ -758,7 +758,7 @@ def list_parts(self, file_id, start_part_number, max_part_count): file_sim = self.file_id_to_file[file_id] return file_sim.list_parts(start_part_number, max_part_count) - def list_unfinished_large_files(self, start_file_id=None, max_file_count=None, prefix=None): + def list_unfinished_large_files(self, account_auth_token, start_file_id=None, max_file_count=None, prefix=None): start_file_id = start_file_id or self.FIRST_FILE_ID max_file_count = max_file_count or 100 all_unfinished_ids = set( @@ -767,15 +767,9 @@ def list_unfinished_large_files(self, start_file_id=None, max_file_count=None, p (prefix is None or v.name.startswith(prefix)) ) ids_in_order = sorted(all_unfinished_ids, reverse=True) + file_dict_list = [ - dict( - fileId=file_sim.file_id, - fileName=file_sim.name, - accountId=file_sim.account_id, - bucketId=file_sim.bucket.bucket_id, - contentType=file_sim.content_type, - fileInfo=file_sim.file_info - ) + file_sim.as_start_large_file_result(account_auth_token) for file_sim in ( self.file_id_to_file[file_id] for file_id in ids_in_order[:max_file_count] ) @@ -1303,6 +1297,8 @@ def copy_file( destination_bucket_id=None, destination_server_side_encryption=None, source_server_side_encryption=None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): bucket_id = self.file_id_to_bucket_id[source_file_id] bucket = self._get_bucket_by_id(bucket_id) @@ -1328,6 +1324,9 @@ def copy_file( dest_bucket.file_id_to_file[copy_file_sim.file_id] = copy_file_sim dest_bucket.file_name_and_id_to_file[copy_file_sim.sort_key()] = copy_file_sim + + # FIXME: implement `legal_hold` and `file_retention` + return copy_file_sim.as_upload_result(account_auth_token) def copy_part( @@ -1482,7 +1481,7 @@ def list_unfinished_large_files( ) start_file_id = start_file_id or '' max_file_count = max_file_count or 100 - return bucket.list_unfinished_large_files(start_file_id, max_file_count, prefix) + return bucket.list_unfinished_large_files(account_auth_token, start_file_id, max_file_count, prefix) def start_large_file( self, @@ -1493,6 +1492,8 @@ def start_large_file( content_type, file_info, server_side_encryption: Optional[EncryptionSetting] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): bucket = self._get_bucket_by_id(bucket_id) self._assert_account_auth(api_url, account_auth_token, bucket.account_id, 'writeFiles') @@ -1504,6 +1505,9 @@ def start_large_file( server_side_encryption, ) self.file_id_to_bucket_id[result['fileId']] = bucket_id + + # FIXME: implement `legal_hold` and `file_retention` + return result def update_bucket( @@ -1544,6 +1548,8 @@ def upload_file( file_infos, data_stream, server_side_encryption: Optional[EncryptionSetting] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): with ConcurrentUsedAuthTokenGuard( self.currently_used_auth_tokens[upload_auth_token], upload_auth_token @@ -1574,6 +1580,9 @@ def upload_file( ) file_id = response['fileId'] self.file_id_to_bucket_id[file_id] = bucket_id + + # FIXME: implement `legal_hold` and `file_retention` + return response def upload_part( diff --git a/b2sdk/session.py b/b2sdk/session.py index 7f57a93d1..ba225f470 100644 --- a/b2sdk/session.py +++ b/b2sdk/session.py @@ -19,6 +19,7 @@ from b2sdk.cache import AuthInfoCache, DummyCache from b2sdk.encryption.setting import EncryptionSetting from b2sdk.exception import (InvalidAuthToken, Unauthorized) +from b2sdk.file_lock import BucketRetentionSetting, FileRetentionSetting from b2sdk.raw_api import ALL_CAPABILITIES, B2RawApi logger = logging.getLogger(__name__) @@ -141,6 +142,7 @@ def create_bucket( cors_rules=None, lifecycle_rules=None, default_server_side_encryption=None, + is_file_lock_enabled: Optional[bool] = None, ): return self._wrap_default_token( self.raw_api.create_bucket, @@ -151,6 +153,7 @@ def create_bucket( cors_rules=cors_rules, lifecycle_rules=lifecycle_rules, default_server_side_encryption=default_server_side_encryption, + is_file_lock_enabled=is_file_lock_enabled, ) def create_key( @@ -285,6 +288,8 @@ def start_large_file( content_type, file_info, server_side_encryption: Optional[EncryptionSetting] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): return self._wrap_default_token( self.raw_api.start_large_file, @@ -293,6 +298,8 @@ def start_large_file( content_type, file_info, server_side_encryption, + legal_hold=legal_hold, + file_retention=file_retention, ) def update_bucket( @@ -305,6 +312,7 @@ def update_bucket( lifecycle_rules=None, if_revision_is=None, default_server_side_encryption: Optional[EncryptionSetting] = None, + default_retention: Optional[BucketRetentionSetting] = None, ): return self._wrap_default_token( self.raw_api.update_bucket, @@ -316,6 +324,7 @@ def update_bucket( lifecycle_rules=lifecycle_rules, if_revision_is=if_revision_is, default_server_side_encryption=default_server_side_encryption, + default_retention=default_retention, ) def upload_file( @@ -328,6 +337,8 @@ def upload_file( file_infos, data_stream, server_side_encryption: Optional[EncryptionSetting] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): return self._wrap_token( self.raw_api.upload_file, @@ -340,6 +351,8 @@ def upload_file( file_infos, data_stream, server_side_encryption, + legal_hold=legal_hold, + file_retention=file_retention, ) def upload_part( @@ -381,6 +394,8 @@ def copy_file( destination_bucket_id=None, destination_server_side_encryption: Optional[EncryptionSetting] = None, source_server_side_encryption: Optional[EncryptionSetting] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): return self._wrap_default_token( self.raw_api.copy_file, @@ -393,6 +408,8 @@ def copy_file( destination_bucket_id=destination_bucket_id, destination_server_side_encryption=destination_server_side_encryption, source_server_side_encryption=source_server_side_encryption, + legal_hold=legal_hold, + file_retention=file_retention, ) def copy_part( diff --git a/b2sdk/transfer/emerge/emerger.py b/b2sdk/transfer/emerge/emerger.py index 93af5b3c1..89b3a064f 100644 --- a/b2sdk/transfer/emerge/emerger.py +++ b/b2sdk/transfer/emerge/emerger.py @@ -12,6 +12,7 @@ from typing import Optional from b2sdk.encryption.setting import EncryptionSetting +from b2sdk.file_lock import FileRetentionSetting from b2sdk.utils import B2TraceMetaAbstract from b2sdk.transfer.emerge.executor import EmergeExecutor from b2sdk.transfer.emerge.planner.planner import EmergePlanner @@ -50,6 +51,8 @@ def emerge( recommended_upload_part_size=None, continue_large_file_id=None, encryption: Optional[EncryptionSetting] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): """ Create a new file (object in the cloud, really) from an iterable (list, tuple etc) of write intents. @@ -74,6 +77,8 @@ def emerge( progress_listener, continue_large_file_id=continue_large_file_id, encryption=encryption, + legal_hold=legal_hold, + file_retention=file_retention, ) def emerge_stream( @@ -88,6 +93,8 @@ def emerge_stream( continue_large_file_id=None, max_queue_size=DEFAULT_STREAMING_MAX_QUEUE_SIZE, encryption: Optional[EncryptionSetting] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): """ Create a new file (object in the cloud, really) from a stream of write intents. @@ -112,6 +119,8 @@ def emerge_stream( continue_large_file_id=continue_large_file_id, max_queue_size=max_queue_size, encryption=encryption, + legal_hold=legal_hold, + file_retention=file_retention, ) def get_emerge_planner(self, recommended_upload_part_size=None): diff --git a/b2sdk/transfer/emerge/executor.py b/b2sdk/transfer/emerge/executor.py index edec968a0..75c3766f6 100644 --- a/b2sdk/transfer/emerge/executor.py +++ b/b2sdk/transfer/emerge/executor.py @@ -15,6 +15,7 @@ from b2sdk.encryption.setting import EncryptionSetting from b2sdk.exception import MaxFileSizeExceeded +from b2sdk.file_lock import FileRetentionSetting, NO_RETENTION_FILE_SETTING from b2sdk.file_version import FileVersionInfoFactory from b2sdk.transfer.outbound.large_file_upload_state import LargeFileUploadState from b2sdk.transfer.outbound.upload_source import UploadSourceStream @@ -38,6 +39,8 @@ def execute_emerge_plan( continue_large_file_id=None, max_queue_size=None, encryption: Optional[EncryptionSetting] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): if emerge_plan.is_large_file(): execution = LargeFileEmergeExecution( @@ -47,7 +50,9 @@ def execute_emerge_plan( content_type, file_info, progress_listener, - encryption, + encryption=encryption, + file_retention=file_retention, + legal_hold=legal_hold, continue_large_file_id=continue_large_file_id, max_queue_size=max_queue_size, ) @@ -61,7 +66,9 @@ def execute_emerge_plan( content_type, file_info, progress_listener, - encryption, + encryption=encryption, + file_retention=file_retention, + legal_hold=legal_hold, ) return execution.execute_plan(emerge_plan) @@ -78,6 +85,8 @@ def __init__( file_info, progress_listener, encryption: Optional[EncryptionSetting] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): self.services = services self.bucket_id = bucket_id @@ -86,6 +95,8 @@ def __init__( self.file_info = file_info self.progress_listener = progress_listener self.encryption = encryption + self.legal_hold = legal_hold + self.file_retention = file_retention @abstractmethod def execute_plan(self, emerge_plan): @@ -115,6 +126,8 @@ def __init__( file_info, progress_listener, encryption: Optional[EncryptionSetting] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, continue_large_file_id=None, max_queue_size=None, ): @@ -125,7 +138,9 @@ def __init__( content_type, file_info, progress_listener, - encryption, + encryption=encryption, + legal_hold=legal_hold, + file_retention=file_retention, ) self.continue_large_file_id = continue_large_file_id self.max_queue_size = max_queue_size @@ -136,6 +151,7 @@ def __init__( def execute_plan(self, emerge_plan): total_length = emerge_plan.get_total_length() encryption = self.encryption + encryption = self.encryption if total_length is not None and total_length > self.MAX_LARGE_FILE_SIZE: raise MaxFileSizeExceeded(total_length, self.MAX_LARGE_FILE_SIZE) @@ -158,6 +174,8 @@ def execute_plan(self, emerge_plan): file_info, self.continue_large_file_id, encryption=encryption, + legal_hold=self.legal_hold, + file_retention=self.file_retention, emerge_parts_dict=emerge_parts_dict, ) @@ -171,7 +189,9 @@ def execute_plan(self, emerge_plan): self.file_name, content_type, file_info, - encryption, + encryption=encryption, + legal_hold=self.legal_hold, + file_retention=self.file_retention, ) file_id = unfinished_file.file_id @@ -224,6 +244,8 @@ def _get_unfinished_file_and_parts( file_info, continue_large_file_id, encryption: EncryptionSetting, + legal_hold, + file_retention: FileRetentionSetting, emerge_parts_dict=None, ): if 'listFiles' not in self.services.session.account_info.get_allowed()['capabilities']: @@ -237,7 +259,6 @@ def _get_unfinished_file_and_parts( bucket_id, continue_large_file_id, prefix=file_name, - encryption=encryption, ) if unfinished_file.file_info != file_info: raise ValueError( @@ -255,6 +276,8 @@ def _get_unfinished_file_and_parts( file_info, emerge_parts_dict, encryption, + legal_hold, + file_retention, ) elif emerge_parts_dict is not None: unfinished_file, finished_parts = self._match_unfinished_file_if_possible( @@ -263,12 +286,22 @@ def _get_unfinished_file_and_parts( file_info, emerge_parts_dict, encryption, + legal_hold, + file_retention, ) return unfinished_file, finished_parts def _find_unfinished_file_by_plan_id( - self, bucket_id, file_name, file_info, emerge_parts_dict, encryption: EncryptionSetting + self, + bucket_id, + file_name, + file_info, + emerge_parts_dict, + encryption: EncryptionSetting, + legal_hold, + file_retention: FileRetentionSetting, ): + file_retention = file_retention or NO_RETENTION_FILE_SETTING assert 'plan_id' in file_info best_match_file = None best_match_parts = {} @@ -278,8 +311,17 @@ def _find_unfinished_file_by_plan_id( ): if file_.file_info != file_info: continue + # FIXME: encryption is None ??? if encryption is None or file_.encryption != encryption: continue + if bool(legal_hold) != file_.legal_hold: + # when `file_.legal_hold is None` it means that `legal_hold` is unknown and we skip + continue + if file_retention != file_.file_retention: + # if `file_.file_retention` is UNKNOWN then we skip - lib user can still + # pass UKNOWN file_retention here - but raw_api/server won't allow it + # and we don't check it here + continue finished_parts = {} for part in self.services.large_file.list_parts(file_.file_id): emerge_part = emerge_parts_dict.get(part.part_number) @@ -307,6 +349,8 @@ def _match_unfinished_file_if_possible( file_info, emerge_parts_dict, encryption: EncryptionSetting, + legal_hold, + file_retention: FileRetentionSetting, ): """ Find an unfinished file that may be used to resume a large file upload. The @@ -315,6 +359,7 @@ def _match_unfinished_file_if_possible( This is only possible if the application key being used allows ``listFiles`` access. """ + file_retention = file_retention or NO_RETENTION_FILE_SETTING for file_ in self.services.large_file.list_unfinished_large_files( bucket_id, prefix=file_name ): @@ -322,8 +367,17 @@ def _match_unfinished_file_if_possible( continue if file_.file_info != file_info: continue + # FIXME: what if `encryption is None` - match ANY encryption? :) if encryption is not None and encryption != file_.encryption: continue + if bool(legal_hold) != file_.legal_hold: + # when `file_.legal_hold is None` it means that `legal_hold` is unknown and we skip + continue + if file_retention != file_.file_retention: + # if `file_.file_retention` is UNKNOWN then we skip - lib user can still + # pass UKNOWN file_retention here - but raw_api/server won't allow it + # and we don't check it here + continue files_match = True finished_parts = {} for part in self.services.large_file.list_parts(file_.file_id): @@ -456,6 +510,8 @@ def execute(self): progress_listener=execution.progress_listener, destination_encryption=execution.encryption, source_encryption=self.copy_source_range.encryption, + legal_hold=execution.legal_hold, + file_retention=execution.file_retention, ) @@ -509,7 +565,9 @@ def execute(self): execution.content_type or execution.DEFAULT_CONTENT_TYPE, execution.file_info or {}, execution.progress_listener, - execution.encryption, + encryption=execution.encryption, + legal_hold=execution.legal_hold, + file_retention=execution.file_retention, ) diff --git a/b2sdk/transfer/outbound/copy_manager.py b/b2sdk/transfer/outbound/copy_manager.py index e797aea5b..0b3ebd263 100644 --- a/b2sdk/transfer/outbound/copy_manager.py +++ b/b2sdk/transfer/outbound/copy_manager.py @@ -14,6 +14,7 @@ from b2sdk.encryption.setting import EncryptionMode, EncryptionSetting, SSE_C_KEY_ID_FILE_INFO_KEY_NAME from b2sdk.exception import AlreadyFailed, SSECKeyIdMismatchInCopy +from b2sdk.file_lock import FileRetentionSetting from b2sdk.file_version import FileVersionInfoFactory from b2sdk.raw_api import MetadataDirectiveMode from b2sdk.utils import B2TraceMetaAbstract @@ -73,6 +74,8 @@ def copy_file( progress_listener, destination_encryption: Optional[EncryptionSetting] = None, source_encryption: Optional[EncryptionSetting] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): # Run small copies in the same thread pool as large file copies, # so that they share resources during a sync. @@ -86,6 +89,8 @@ def copy_file( progress_listener=progress_listener, destination_encryption=destination_encryption, source_encryption=source_encryption, + legal_hold=legal_hold, + file_retention=file_retention, ) def copy_part( @@ -173,6 +178,8 @@ def _copy_small_file( progress_listener, destination_encryption: Optional[EncryptionSetting], source_encryption: Optional[EncryptionSetting], + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): with progress_listener: progress_listener.set_total_bytes(copy_source.get_content_length() or 0) @@ -206,6 +213,8 @@ def _copy_small_file( destination_bucket_id=destination_bucket_id, destination_server_side_encryption=destination_encryption, source_server_side_encryption=source_encryption, + legal_hold=legal_hold, + file_retention=file_retention, ) file_info = FileVersionInfoFactory.from_api_response(response) if progress_listener is not None: diff --git a/b2sdk/transfer/outbound/upload_manager.py b/b2sdk/transfer/outbound/upload_manager.py index fab793497..a65ae2811 100644 --- a/b2sdk/transfer/outbound/upload_manager.py +++ b/b2sdk/transfer/outbound/upload_manager.py @@ -11,12 +11,15 @@ import logging import concurrent.futures as futures +from typing import Optional + from b2sdk.encryption.setting import EncryptionMode, EncryptionSetting from b2sdk.exception import ( AlreadyFailed, B2Error, MaxRetriesExceeded, ) +from b2sdk.file_lock import FileRetentionSetting from b2sdk.file_version import FileVersionInfoFactory from b2sdk.stream.progress import ReadingStreamWithProgress from b2sdk.stream.hashing import StreamWithHash @@ -78,7 +81,9 @@ def upload_file( content_type, file_info, progress_listener, - encryption: EncryptionSetting = None, + encryption: Optional[EncryptionSetting] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): f = self.get_thread_pool().submit( self._upload_small_file, @@ -203,7 +208,9 @@ def _upload_small_file( content_type, file_info, progress_listener, - encryption: EncryptionSetting, + encryption: Optional[EncryptionSetting] = None, + legal_hold: Optional[bool] = None, + file_retention: Optional[FileRetentionSetting] = None, ): content_length = upload_source.get_content_length() exception_info_list = [] @@ -232,6 +239,8 @@ def _upload_small_file( file_info, input_stream, server_side_encryption=encryption, # todo: client side encryption + legal_hold=legal_hold, + file_retention=file_retention, ) if content_sha1 == HEX_DIGITS_AT_END: content_sha1 = input_stream.hash From 749dd0bc66acbb0a26d6904695301fb54dbe1f2c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Polewicz?= Date: Thu, 13 May 2021 22:17:27 +0200 Subject: [PATCH 32/33] Update spacing --- b2sdk/large_file/unfinished_large_file.py | 1 - 1 file changed, 1 deletion(-) diff --git a/b2sdk/large_file/unfinished_large_file.py b/b2sdk/large_file/unfinished_large_file.py index 953f969c7..0ecdef235 100644 --- a/b2sdk/large_file/unfinished_large_file.py +++ b/b2sdk/large_file/unfinished_large_file.py @@ -36,7 +36,6 @@ def __init__(self, file_dict): self.file_info = file_dict['fileInfo'] self.encryption = EncryptionSettingFactory.from_file_version_dict(file_dict) self.file_retention = FileRetentionSetting.from_file_version_dict(file_dict) - self.legal_hold = LegalHoldSerializer.from_server(file_dict) def __repr__(self): From 982a39078928822581dd9ee58865f6fad4c27a88 Mon Sep 17 00:00:00 2001 From: Pawel Polewicz Date: Thu, 13 May 2021 22:57:49 +0200 Subject: [PATCH 33/33] Consistently place legal_hold after file_retention in all methods --- b2sdk/api.py | 1 - b2sdk/bucket.py | 54 +++++++++++++++---------------- b2sdk/raw_api.py | 6 ++-- b2sdk/raw_simulator.py | 6 ++-- b2sdk/session.py | 12 +++---- b2sdk/transfer/emerge/emerger.py | 8 ++--- b2sdk/transfer/emerge/executor.py | 34 +++++++++---------- 7 files changed, 60 insertions(+), 61 deletions(-) diff --git a/b2sdk/api.py b/b2sdk/api.py index 2e30f1955..b97989206 100644 --- a/b2sdk/api.py +++ b/b2sdk/api.py @@ -180,7 +180,6 @@ def create_bucket( default_server_side_encryption: Optional[EncryptionSetting] = None, default_retention: Optional[BucketRetentionSetting] = None, is_file_lock_enabled: Optional[bool] = None, - ): """ Create a bucket. diff --git a/b2sdk/bucket.py b/b2sdk/bucket.py index f15c5bbcb..9584737bc 100644 --- a/b2sdk/bucket.py +++ b/b2sdk/bucket.py @@ -415,8 +415,8 @@ def upload_bytes( file_infos=None, progress_listener=None, encryption: Optional[EncryptionSetting] = None, - legal_hold: Optional[bool] = None, file_retention: Optional[FileRetentionSetting] = None, + legal_hold: Optional[bool] = None, ): """ Upload bytes in memory to a B2 file. @@ -427,8 +427,8 @@ def upload_bytes( :param dict,None file_infos: a file info to store with the file or ``None`` to not store anything :param b2sdk.v1.AbstractProgressListener,None progress_listener: a progress listener object to use, or ``None`` to not track progress :param b2sdk.v1.EncryptionSetting encryption: encryption settings (``None`` if unknown) - :param bool legal_hold: legal hold setting :param b2sdk.v1.FileRetentionSetting file_retention: file retention setting + :param bool legal_hold: legal hold setting :rtype: generator[b2sdk.v1.FileVersion] """ upload_source = UploadSourceBytes(data_bytes) @@ -439,8 +439,8 @@ def upload_bytes( file_info=file_infos, progress_listener=progress_listener, encryption=encryption, - legal_hold=legal_hold, file_retention=file_retention, + legal_hold=legal_hold, ) def upload_local_file( @@ -453,8 +453,8 @@ def upload_local_file( min_part_size=None, progress_listener=None, encryption: Optional[EncryptionSetting] = None, - legal_hold: Optional[bool] = None, file_retention: Optional[FileRetentionSetting] = None, + legal_hold: Optional[bool] = None, ): """ Upload a file on local disk to a B2 file. @@ -471,8 +471,8 @@ def upload_local_file( :param int min_part_size: a minimum size of a part :param b2sdk.v1.AbstractProgressListener,None progress_listener: a progress listener object to use, or ``None`` to not report progress :param b2sdk.v1.EncryptionSetting encryption: encryption settings (``None`` if unknown) - :param bool legal_hold: legal hold setting :param b2sdk.v1.FileRetentionSetting file_retention: file retention setting + :param bool legal_hold: legal hold setting :rtype: b2sdk.v1.FileVersionInfo """ upload_source = UploadSourceLocalFile(local_path=local_file, content_sha1=sha1_sum) @@ -484,8 +484,8 @@ def upload_local_file( min_part_size=min_part_size, progress_listener=progress_listener, encryption=encryption, - legal_hold=legal_hold, file_retention=file_retention, + legal_hold=legal_hold, ) def upload( @@ -497,8 +497,8 @@ def upload( min_part_size=None, progress_listener=None, encryption: Optional[EncryptionSetting] = None, - legal_hold: Optional[bool] = None, file_retention: Optional[FileRetentionSetting] = None, + legal_hold: Optional[bool] = None, ): """ Upload a file to B2, retrying as needed. @@ -518,8 +518,8 @@ def upload( :param int,None min_part_size: the smallest part size to use or ``None`` to determine automatically :param b2sdk.v1.AbstractProgressListener,None progress_listener: a progress listener object to use, or ``None`` to not report progress :param b2sdk.v1.EncryptionSetting encryption: encryption settings (``None`` if unknown) - :param bool legal_hold: legal hold setting :param b2sdk.v1.FileRetentionSetting file_retention: file retention setting + :param bool legal_hold: legal hold setting :rtype: b2sdk.v1.FileVersionInfo """ return self.create_file( @@ -531,8 +531,8 @@ def upload( # FIXME: Bucket.upload documents wrong logic recommended_upload_part_size=min_part_size, encryption=encryption, - legal_hold=legal_hold, file_retention=file_retention, + legal_hold=legal_hold, ) def create_file( @@ -545,8 +545,8 @@ def create_file( recommended_upload_part_size=None, continue_large_file_id=None, encryption: Optional[EncryptionSetting] = None, - legal_hold: Optional[bool] = None, file_retention: Optional[FileRetentionSetting] = None, + legal_hold: Optional[bool] = None, ): """ Creates a new file in this bucket using an iterable (list, tuple etc) of remote or local sources. @@ -569,8 +569,8 @@ def create_file( :param str,None continue_large_file_id: large file id that should be selected to resume file creation for multipart upload/copy, ``None`` for automatic search for this id :param b2sdk.v1.EncryptionSetting encryption: encryption settings (``None`` if unknown) - :param bool legal_hold: legal hold setting :param b2sdk.v1.FileRetentionSetting file_retention: file retention setting + :param bool legal_hold: legal hold setting """ return self._create_file( self.api.services.emerger.emerge, @@ -582,8 +582,8 @@ def create_file( continue_large_file_id=continue_large_file_id, recommended_upload_part_size=recommended_upload_part_size, encryption=encryption, - legal_hold=legal_hold, file_retention=file_retention, + legal_hold=legal_hold, ) def create_file_stream( @@ -596,8 +596,8 @@ def create_file_stream( recommended_upload_part_size=None, continue_large_file_id=None, encryption: Optional[EncryptionSetting] = None, - legal_hold: Optional[bool] = None, file_retention: Optional[FileRetentionSetting] = None, + legal_hold: Optional[bool] = None, ): """ Creates a new file in this bucket using a stream of multiple remote or local sources. @@ -622,8 +622,8 @@ def create_file_stream( for multipart upload/copy, if ``None`` in multipart case it would always start a new large file :param b2sdk.v1.EncryptionSetting encryption: encryption settings (``None`` if unknown) - :param bool legal_hold: legal hold setting :param b2sdk.v1.FileRetentionSetting file_retention: file retention setting + :param bool legal_hold: legal hold setting """ return self._create_file( self.api.services.emerger.emerge_stream, @@ -635,8 +635,8 @@ def create_file_stream( continue_large_file_id=continue_large_file_id, recommended_upload_part_size=recommended_upload_part_size, encryption=encryption, - legal_hold=legal_hold, file_retention=file_retention, + legal_hold=legal_hold, ) def _create_file( @@ -650,8 +650,8 @@ def _create_file( recommended_upload_part_size=None, continue_large_file_id=None, encryption: Optional[EncryptionSetting] = None, - legal_hold: Optional[bool] = None, file_retention: Optional[FileRetentionSetting] = None, + legal_hold: Optional[bool] = None, ): validate_b2_file_name(file_name) progress_listener = progress_listener or DoNothingProgressListener() @@ -666,8 +666,8 @@ def _create_file( recommended_upload_part_size=recommended_upload_part_size, continue_large_file_id=continue_large_file_id, encryption=encryption, - legal_hold=legal_hold, file_retention=file_retention, + legal_hold=legal_hold, ) def concatenate( @@ -680,8 +680,8 @@ def concatenate( recommended_upload_part_size=None, continue_large_file_id=None, encryption: Optional[EncryptionSetting] = None, - legal_hold: Optional[bool] = None, file_retention: Optional[FileRetentionSetting] = None, + legal_hold: Optional[bool] = None, ): """ Creates a new file in this bucket by concatenating multiple remote or local sources. @@ -701,8 +701,8 @@ def concatenate( :param str,None continue_large_file_id: large file id that should be selected to resume file creation for multipart upload/copy, ``None`` for automatic search for this id :param b2sdk.v1.EncryptionSetting encryption: encryption settings (``None`` if unknown) - :param bool legal_hold: legal hold setting :param b2sdk.v1.FileRetentionSetting file_retention: file retention setting + :param bool legal_hold: legal hold setting """ return self.create_file( WriteIntent.wrap_sources_iterator(outbound_sources), @@ -713,8 +713,8 @@ def concatenate( recommended_upload_part_size=recommended_upload_part_size, continue_large_file_id=continue_large_file_id, encryption=encryption, - legal_hold=legal_hold, file_retention=file_retention, + legal_hold=legal_hold, ) def concatenate_stream( @@ -727,8 +727,8 @@ def concatenate_stream( recommended_upload_part_size=None, continue_large_file_id=None, encryption: Optional[EncryptionSetting] = None, - legal_hold: Optional[bool] = None, file_retention: Optional[FileRetentionSetting] = None, + legal_hold: Optional[bool] = None, ): """ Creates a new file in this bucket by concatenating stream of multiple remote or local sources. @@ -749,8 +749,8 @@ def concatenate_stream( for multipart upload/copy, if ``None`` in multipart case it would always start a new large file :param b2sdk.v1.EncryptionSetting encryption: encryption setting (``None`` if unknown) - :param bool legal_hold: legal hold setting :param b2sdk.v1.FileRetentionSetting file_retention: file retention setting + :param bool legal_hold: legal hold setting """ return self.create_file_stream( WriteIntent.wrap_sources_iterator(outbound_sources_iterator), @@ -761,8 +761,8 @@ def concatenate_stream( recommended_upload_part_size=recommended_upload_part_size, continue_large_file_id=continue_large_file_id, encryption=encryption, - legal_hold=legal_hold, file_retention=file_retention, + legal_hold=legal_hold, ) def get_download_url(self, filename): @@ -801,8 +801,8 @@ def copy( source_encryption: Optional[EncryptionSetting] = None, source_file_info: Optional[dict] = None, source_content_type: Optional[str] = None, - legal_hold: Optional[bool] = None, file_retention: Optional[FileRetentionSetting] = None, + legal_hold: Optional[bool] = None, ): """ Creates a new file in this bucket by (server-side) copying from an existing file. @@ -826,8 +826,8 @@ def copy( (``None`` if unknown) :param dict,None source_file_info: source file's file_info dict, useful when copying files with SSE-C :param str,None source_content_type: source file's content type, useful when copying files with SSE-C - :param bool legal_hold: legal hold setting for the new file. :param b2sdk.v1.FileRetentionSetting file_retention: file retention setting for the new file. + :param bool legal_hold: legal hold setting for the new file. """ copy_source = CopySource( @@ -851,8 +851,8 @@ def copy( progress_listener=progress_listener, destination_encryption=destination_encryption, source_encryption=source_encryption, - legal_hold=legal_hold, file_retention=file_retention, + legal_hold=legal_hold, ).result() else: return self.create_file( @@ -862,8 +862,8 @@ def copy( file_info=file_info, progress_listener=progress_listener, encryption=destination_encryption, - legal_hold=legal_hold, file_retention=file_retention, + legal_hold=legal_hold, ) def delete_file_version(self, file_id, file_name): diff --git a/b2sdk/raw_api.py b/b2sdk/raw_api.py index 14625f963..c1b8ba55c 100644 --- a/b2sdk/raw_api.py +++ b/b2sdk/raw_api.py @@ -654,8 +654,8 @@ def start_large_file( content_type, file_info, server_side_encryption: Optional[EncryptionSetting] = None, - legal_hold: Optional[bool] = None, file_retention: Optional[FileRetentionSetting] = None, + legal_hold: Optional[bool] = None, ): kwargs = {} if server_side_encryption is not None: @@ -800,8 +800,8 @@ def upload_file( file_infos, data_stream, server_side_encryption: Optional[EncryptionSetting] = None, - legal_hold: Optional[bool] = None, file_retention: Optional[FileRetentionSetting] = None, + legal_hold: Optional[bool] = None, ): """ Upload one, small file to b2. @@ -874,8 +874,8 @@ def copy_file( destination_bucket_id=None, destination_server_side_encryption: Optional[EncryptionSetting] = None, source_server_side_encryption: Optional[EncryptionSetting] = None, - legal_hold: Optional[bool] = None, file_retention: Optional[FileRetentionSetting] = None, + legal_hold: Optional[bool] = None, ): kwargs = {} if bytes_range is not None: diff --git a/b2sdk/raw_simulator.py b/b2sdk/raw_simulator.py index 8cf44508f..3377501bc 100644 --- a/b2sdk/raw_simulator.py +++ b/b2sdk/raw_simulator.py @@ -1297,8 +1297,8 @@ def copy_file( destination_bucket_id=None, destination_server_side_encryption=None, source_server_side_encryption=None, - legal_hold: Optional[bool] = None, file_retention: Optional[FileRetentionSetting] = None, + legal_hold: Optional[bool] = None, ): bucket_id = self.file_id_to_bucket_id[source_file_id] bucket = self._get_bucket_by_id(bucket_id) @@ -1492,8 +1492,8 @@ def start_large_file( content_type, file_info, server_side_encryption: Optional[EncryptionSetting] = None, - legal_hold: Optional[bool] = None, file_retention: Optional[FileRetentionSetting] = None, + legal_hold: Optional[bool] = None, ): bucket = self._get_bucket_by_id(bucket_id) self._assert_account_auth(api_url, account_auth_token, bucket.account_id, 'writeFiles') @@ -1548,8 +1548,8 @@ def upload_file( file_infos, data_stream, server_side_encryption: Optional[EncryptionSetting] = None, - legal_hold: Optional[bool] = None, file_retention: Optional[FileRetentionSetting] = None, + legal_hold: Optional[bool] = None, ): with ConcurrentUsedAuthTokenGuard( self.currently_used_auth_tokens[upload_auth_token], upload_auth_token diff --git a/b2sdk/session.py b/b2sdk/session.py index ba225f470..b7375d42a 100644 --- a/b2sdk/session.py +++ b/b2sdk/session.py @@ -288,8 +288,8 @@ def start_large_file( content_type, file_info, server_side_encryption: Optional[EncryptionSetting] = None, - legal_hold: Optional[bool] = None, file_retention: Optional[FileRetentionSetting] = None, + legal_hold: Optional[bool] = None, ): return self._wrap_default_token( self.raw_api.start_large_file, @@ -298,8 +298,8 @@ def start_large_file( content_type, file_info, server_side_encryption, - legal_hold=legal_hold, file_retention=file_retention, + legal_hold=legal_hold, ) def update_bucket( @@ -337,8 +337,8 @@ def upload_file( file_infos, data_stream, server_side_encryption: Optional[EncryptionSetting] = None, - legal_hold: Optional[bool] = None, file_retention: Optional[FileRetentionSetting] = None, + legal_hold: Optional[bool] = None, ): return self._wrap_token( self.raw_api.upload_file, @@ -351,8 +351,8 @@ def upload_file( file_infos, data_stream, server_side_encryption, - legal_hold=legal_hold, file_retention=file_retention, + legal_hold=legal_hold, ) def upload_part( @@ -394,8 +394,8 @@ def copy_file( destination_bucket_id=None, destination_server_side_encryption: Optional[EncryptionSetting] = None, source_server_side_encryption: Optional[EncryptionSetting] = None, - legal_hold: Optional[bool] = None, file_retention: Optional[FileRetentionSetting] = None, + legal_hold: Optional[bool] = None, ): return self._wrap_default_token( self.raw_api.copy_file, @@ -408,8 +408,8 @@ def copy_file( destination_bucket_id=destination_bucket_id, destination_server_side_encryption=destination_server_side_encryption, source_server_side_encryption=source_server_side_encryption, - legal_hold=legal_hold, file_retention=file_retention, + legal_hold=legal_hold, ) def copy_part( diff --git a/b2sdk/transfer/emerge/emerger.py b/b2sdk/transfer/emerge/emerger.py index 89b3a064f..e598ccc67 100644 --- a/b2sdk/transfer/emerge/emerger.py +++ b/b2sdk/transfer/emerge/emerger.py @@ -51,8 +51,8 @@ def emerge( recommended_upload_part_size=None, continue_large_file_id=None, encryption: Optional[EncryptionSetting] = None, - legal_hold: Optional[bool] = None, file_retention: Optional[FileRetentionSetting] = None, + legal_hold: Optional[bool] = None, ): """ Create a new file (object in the cloud, really) from an iterable (list, tuple etc) of write intents. @@ -77,8 +77,8 @@ def emerge( progress_listener, continue_large_file_id=continue_large_file_id, encryption=encryption, - legal_hold=legal_hold, file_retention=file_retention, + legal_hold=legal_hold, ) def emerge_stream( @@ -93,8 +93,8 @@ def emerge_stream( continue_large_file_id=None, max_queue_size=DEFAULT_STREAMING_MAX_QUEUE_SIZE, encryption: Optional[EncryptionSetting] = None, - legal_hold: Optional[bool] = None, file_retention: Optional[FileRetentionSetting] = None, + legal_hold: Optional[bool] = None, ): """ Create a new file (object in the cloud, really) from a stream of write intents. @@ -119,8 +119,8 @@ def emerge_stream( continue_large_file_id=continue_large_file_id, max_queue_size=max_queue_size, encryption=encryption, - legal_hold=legal_hold, file_retention=file_retention, + legal_hold=legal_hold, ) def get_emerge_planner(self, recommended_upload_part_size=None): diff --git a/b2sdk/transfer/emerge/executor.py b/b2sdk/transfer/emerge/executor.py index 75c3766f6..b613d62f2 100644 --- a/b2sdk/transfer/emerge/executor.py +++ b/b2sdk/transfer/emerge/executor.py @@ -39,8 +39,8 @@ def execute_emerge_plan( continue_large_file_id=None, max_queue_size=None, encryption: Optional[EncryptionSetting] = None, - legal_hold: Optional[bool] = None, file_retention: Optional[FileRetentionSetting] = None, + legal_hold: Optional[bool] = None, ): if emerge_plan.is_large_file(): execution = LargeFileEmergeExecution( @@ -85,8 +85,8 @@ def __init__( file_info, progress_listener, encryption: Optional[EncryptionSetting] = None, - legal_hold: Optional[bool] = None, file_retention: Optional[FileRetentionSetting] = None, + legal_hold: Optional[bool] = None, ): self.services = services self.bucket_id = bucket_id @@ -95,8 +95,8 @@ def __init__( self.file_info = file_info self.progress_listener = progress_listener self.encryption = encryption - self.legal_hold = legal_hold self.file_retention = file_retention + self.legal_hold = legal_hold @abstractmethod def execute_plan(self, emerge_plan): @@ -126,8 +126,8 @@ def __init__( file_info, progress_listener, encryption: Optional[EncryptionSetting] = None, - legal_hold: Optional[bool] = None, file_retention: Optional[FileRetentionSetting] = None, + legal_hold: Optional[bool] = None, continue_large_file_id=None, max_queue_size=None, ): @@ -139,8 +139,8 @@ def __init__( file_info, progress_listener, encryption=encryption, - legal_hold=legal_hold, file_retention=file_retention, + legal_hold=legal_hold, ) self.continue_large_file_id = continue_large_file_id self.max_queue_size = max_queue_size @@ -174,8 +174,8 @@ def execute_plan(self, emerge_plan): file_info, self.continue_large_file_id, encryption=encryption, - legal_hold=self.legal_hold, file_retention=self.file_retention, + legal_hold=self.legal_hold, emerge_parts_dict=emerge_parts_dict, ) @@ -190,8 +190,8 @@ def execute_plan(self, emerge_plan): content_type, file_info, encryption=encryption, - legal_hold=self.legal_hold, file_retention=self.file_retention, + legal_hold=self.legal_hold, ) file_id = unfinished_file.file_id @@ -244,8 +244,8 @@ def _get_unfinished_file_and_parts( file_info, continue_large_file_id, encryption: EncryptionSetting, - legal_hold, - file_retention: FileRetentionSetting, + file_retention: Optional[FileRetentionSetting] = None, + legal_hold: Optional[bool] = None, emerge_parts_dict=None, ): if 'listFiles' not in self.services.session.account_info.get_allowed()['capabilities']: @@ -276,8 +276,8 @@ def _get_unfinished_file_and_parts( file_info, emerge_parts_dict, encryption, - legal_hold, file_retention, + legal_hold, ) elif emerge_parts_dict is not None: unfinished_file, finished_parts = self._match_unfinished_file_if_possible( @@ -286,8 +286,8 @@ def _get_unfinished_file_and_parts( file_info, emerge_parts_dict, encryption, - legal_hold, file_retention, + legal_hold, ) return unfinished_file, finished_parts @@ -298,8 +298,8 @@ def _find_unfinished_file_by_plan_id( file_info, emerge_parts_dict, encryption: EncryptionSetting, - legal_hold, - file_retention: FileRetentionSetting, + file_retention: Optional[FileRetentionSetting] = None, + legal_hold: Optional[bool] = None, ): file_retention = file_retention or NO_RETENTION_FILE_SETTING assert 'plan_id' in file_info @@ -349,8 +349,8 @@ def _match_unfinished_file_if_possible( file_info, emerge_parts_dict, encryption: EncryptionSetting, - legal_hold, - file_retention: FileRetentionSetting, + file_retention: Optional[FileRetentionSetting] = None, + legal_hold: Optional[bool] = None, ): """ Find an unfinished file that may be used to resume a large file upload. The @@ -510,8 +510,8 @@ def execute(self): progress_listener=execution.progress_listener, destination_encryption=execution.encryption, source_encryption=self.copy_source_range.encryption, - legal_hold=execution.legal_hold, file_retention=execution.file_retention, + legal_hold=execution.legal_hold, ) @@ -566,8 +566,8 @@ def execute(self): execution.file_info or {}, execution.progress_listener, encryption=execution.encryption, - legal_hold=execution.legal_hold, file_retention=execution.file_retention, + legal_hold=execution.legal_hold, )