From f28346af40ad69f9b69fdd82b325ddf27ab28e90 Mon Sep 17 00:00:00 2001 From: ikkamens Date: Mon, 28 Sep 2020 22:46:22 +0200 Subject: [PATCH] use f-strings --- examples/attributes.py | 2 +- examples/indexes.py | 4 +- examples/model.py | 26 +++---- pynamodb/attributes.py | 38 +++++----- pynamodb/connection/base.py | 64 ++++++++-------- pynamodb/exceptions.py | 4 +- pynamodb/expressions/condition.py | 4 +- pynamodb/expressions/operand.py | 19 +++-- pynamodb/expressions/update.py | 2 +- pynamodb/expressions/util.py | 2 +- pynamodb/models.py | 10 +-- pynamodb/settings.py | 4 +- tests/deep_eq.py | 3 +- tests/integration/base_integration_test.py | 2 +- tests/integration/model_integration_test.py | 14 ++-- tests/integration/table_integration_test.py | 2 +- tests/mypy_helpers.py | 2 +- tests/test_base_connection.py | 6 +- tests/test_model.py | 84 ++++++++++----------- tests/test_table_connection.py | 4 +- 20 files changed, 147 insertions(+), 149 deletions(-) diff --git a/examples/attributes.py b/examples/attributes.py index 761eaf359..20c60ad95 100644 --- a/examples/attributes.py +++ b/examples/attributes.py @@ -14,7 +14,7 @@ def __init__(self, name): self.name = name def __str__(self): - return "".format(self.name) + return f"" class PickleAttribute(BinaryAttribute): diff --git a/examples/indexes.py b/examples/indexes.py index ae6e5aa82..82ce2ee3f 100644 --- a/examples/indexes.py +++ b/examples/indexes.py @@ -47,7 +47,7 @@ class Meta: # Indexes can be queried easily using the index's hash key for item in TestModel.view_index.query(1): - print("Item queried from index: {0}".format(item)) + print(f"Item queried from index: {item}") class GamePlayerOpponentIndex(LocalSecondaryIndex): @@ -96,7 +96,7 @@ class Meta: # Indexes can be queried easily using the index's hash key for item in GameModel.player_opponent_index.query('1234'): - print("Item queried from index: {0}".format(item)) + print(f"Item queried from index: {item}") # Count on an index print(GameModel.player_opponent_index.count('1234')) diff --git a/examples/model.py b/examples/model.py index 0f10025d9..b1f3b789c 100644 --- a/examples/model.py +++ b/examples/model.py @@ -59,7 +59,7 @@ class Meta: with Thread.batch_write() as batch: threads = [] for x in range(100): - thread = Thread('forum-{0}'.format(x), 'subject-{0}'.format(x)) + thread = Thread(f'forum-{x}', f'subject-{x}') thread.tags = ['tag1', 'tag2'] thread.last_post_datetime = datetime.now() threads.append(thread) @@ -74,7 +74,7 @@ class Meta: print(Thread.count('forum-1')) # Batch get -item_keys = [('forum-{0}'.format(x), 'subject-{0}'.format(x)) for x in range(100)] +item_keys = [(f'forum-{x}', f'subject-{x}') for x in range(100)] for item in Thread.batch_get(item_keys): print(item) @@ -121,7 +121,7 @@ class Meta: with AliasedModel.batch_write() as batch: threads = [] for x in range(100): - thread = AliasedModel('forum-{0}'.format(x), 'subject-{0}'.format(x)) + thread = AliasedModel(f'forum-{x}', f'subject-{x}') thread.tags = ['tag1', 'tag2'] thread.last_post_datetime = datetime.now() threads.append(thread) @@ -130,30 +130,30 @@ class Meta: batch.save(thread) # Batch get -item_keys = [('forum-{0}'.format(x), 'subject-{0}'.format(x)) for x in range(100)] +item_keys = [(f'forum-{x}', f'subject-{x}') for x in range(100)] for item in AliasedModel.batch_get(item_keys): - print("Batch get item: {0}".format(item)) + print(f"Batch get item: {item}") # Scan for item in AliasedModel.scan(): - print("Scanned item: {0}".format(item)) + print(f"Scanned item: {item}") # Query for item in AliasedModel.query('forum-1', AliasedModel.subject.startswith('subject')): - print("Query using aliased attribute: {0}".format(item)) + print(f"Query using aliased attribute: {item}") # Query with filters for item in Thread.query('forum-1', (Thread.views == 0) | (Thread.replies == 0)): - print("Query result: {0}".format(item)) + print(f"Query result: {item}") # Scan with filters for item in Thread.scan(Thread.subject.startswith('subject') & (Thread.views == 0)): - print("Scanned item: {0} {1}".format(item.subject, item.views)) + print(f"Scanned item: {item.subject} {item.views}") # Scan with null filter for item in Thread.scan(Thread.subject.startswith('subject') & Thread.last_post_datetime.does_not_exist()): - print("Scanned item: {0} {1}".format(item.subject, item.views)) + print(f"Scanned item: {item.subject} {item.views}") # Conditionally save an item thread_item = Thread( @@ -204,7 +204,7 @@ class Meta: # Backup/restore example # Print the size of the table -print("Table size: {}".format(Thread.describe_table().get('ItemCount'))) +print(f"Table size: {Thread.describe_table().get('ItemCount')}") # Dump the entire table to a file Thread.dump('thread.json') @@ -213,11 +213,11 @@ class Meta: # Commented out for safety # for item in Thread.scan(): # item.delete() -print("Table size: {}".format(Thread.describe_table().get('ItemCount'))) +print(f"Table size: {Thread.describe_table().get('ItemCount')}") # Restore table from a file Thread.load('thread.json') -print("Table size: {}".format(Thread.describe_table().get('ItemCount'))) +print(f"Table size: {Thread.describe_table().get('ItemCount')}") # Dump the entire table to a string serialized = Thread.dumps() diff --git a/pynamodb/attributes.py b/pynamodb/attributes.py index 5ea496d03..75ed5eb7e 100644 --- a/pynamodb/attributes.py +++ b/pynamodb/attributes.py @@ -127,7 +127,7 @@ def get_value(self, value: Dict[str, Any]) -> Any: def __iter__(self): # Because we define __getitem__ below for condition expression support - raise TypeError("'{}' object is not iterable".format(self.__class__.__name__)) + raise TypeError(f"'{self.__class__.__name__}' object is not iterable") # Condition Expression Support def __eq__(self, other: Any) -> 'Comparison': # type: ignore @@ -264,7 +264,7 @@ def _initialize_attributes(cls, discriminator_value): raise NotImplementedError("Discriminators are not yet supported in model classes.") if discriminator_value is not None: if not cls._discriminator: - raise ValueError("{} does not have a discriminator attribute".format(cls.__name__)) + raise ValueError(f"{cls.__name__} does not have a discriminator attribute") cls._attributes[cls._discriminator].register_class(cls, discriminator_value) @@ -338,7 +338,7 @@ def _set_attributes(self, **attributes: Attribute) -> None: """ for attr_name, attr_value in attributes.items(): if attr_name not in self.get_attributes(): - raise ValueError("Attribute {} specified does not exist".format(attr_name)) + raise ValueError(f"Attribute {attr_name} specified does not exist") setattr(self, attr_name, attr_value) def _serialize(self, null_check=True) -> Dict[str, Dict[str, Any]]: @@ -349,11 +349,11 @@ def _serialize(self, null_check=True) -> Dict[str, Dict[str, Any]]: for name, attr in self.get_attributes().items(): value = getattr(self, name) if isinstance(value, MapAttribute) and not value.validate(): - raise ValueError("Attribute '{}' is not correctly typed".format(name)) + raise ValueError(f"Attribute '{name}' is not correctly typed") attr_value = attr.serialize(value) if value is not None else None if null_check and attr_value is None and not attr.null: - raise ValueError("Attribute '{}' cannot be None".format(name)) + raise ValueError(f"Attribute '{name}' cannot be None") if attr_value is not None: attribute_values[attr.attr_name] = {attr.attr_type: attr_value} @@ -405,7 +405,7 @@ def get_discriminator(self, cls: type) -> Optional[Any]: return self._class_map.get(cls) def __set__(self, instance: Any, value: Optional[type]) -> None: - raise TypeError("'{}' object does not support item assignment".format(self.__class__.__name__)) + raise TypeError(f"'{self.__class__.__name__}' object does not support item assignment") def serialize(self, value): """ @@ -418,7 +418,7 @@ def deserialize(self, value): Returns the class corresponding to the given discriminator value. """ if value not in self._discriminator_map: - raise ValueError("Unknown discriminator value: {}".format(value)) + raise ValueError(f"Unknown discriminator value: {value}") return self._discriminator_map[value] @@ -678,14 +678,14 @@ def _fast_parse_utc_date_string(date_string: str) -> datetime: if (len(date_string) != 31 or date_string[4] != '-' or date_string[7] != '-' or date_string[10] != 'T' or date_string[13] != ':' or date_string[16] != ':' or date_string[19] != '.' or date_string[26:31] != '+0000'): - raise ValueError("Datetime string '{}' does not match format '{}'".format(date_string, DATETIME_FORMAT)) + raise ValueError(f"Datetime string '{date_string}' does not match format '{DATETIME_FORMAT}'") return datetime( _int(date_string[0:4]), _int(date_string[5:7]), _int(date_string[8:10]), _int(date_string[11:13]), _int(date_string[14:16]), _int(date_string[17:19]), _int(date_string[20:26]), timezone.utc ) except (TypeError, ValueError): - raise ValueError("Datetime string '{}' does not match format '{}'".format(date_string, DATETIME_FORMAT)) + raise ValueError(f"Datetime string '{date_string}' does not match format '{DATETIME_FORMAT}'") class NullAttribute(Attribute[None]): @@ -838,17 +838,17 @@ def __getitem__(self, item: _KT) -> _VT: # type: ignore elif item in self._attributes: # type: ignore return getattr(self, item) else: - raise AttributeError("'{}' has no attribute '{}'".format(self.__class__.__name__, item)) + raise AttributeError(f"'{self.__class__.__name__}' has no attribute '{item}'") def __setitem__(self, item, value): if not self._is_attribute_container(): - raise TypeError("'{}' object does not support item assignment".format(self.__class__.__name__)) + raise TypeError(f"'{self.__class__.__name__}' object does not support item assignment") if self.is_raw(): self.attribute_values[item] = value elif item in self._attributes: # type: ignore setattr(self, item, value) else: - raise AttributeError("'{}' has no attribute '{}'".format(self.__class__.__name__, item)) + raise AttributeError(f"'{self.__class__.__name__}' has no attribute '{item}'") def __getattr__(self, attr: str) -> _VT: # This should only be called for "raw" (i.e. non-subclassed) MapAttribute instances. @@ -858,7 +858,7 @@ def __getattr__(self, attr: str) -> _VT: return self.attribute_values[attr] except KeyError: pass - raise AttributeError("'{}' has no attribute '{}'".format(self.__class__.__name__, attr)) + raise AttributeError(f"'{self.__class__.__name__}' has no attribute '{attr}'") @overload # type: ignore def __get__(self: _A, instance: None, owner: Any) -> _A: ... @@ -897,7 +897,7 @@ def is_correctly_typed(self, key, attr): if can_be_null and value is None: return True if getattr(self, key) is None: - raise ValueError("Attribute '{}' cannot be None".format(key)) + raise ValueError(f"Attribute '{key}' cannot be None") return True # TODO: check that the actual type of `value` meets requirements of `attr` def validate(self): @@ -974,7 +974,7 @@ def _get_class_for_serialize(value): return value value_type = type(value) if value_type not in SERIALIZE_CLASS_MAP: - raise ValueError('Unknown value: {}'.format(value_type)) + raise ValueError(f'Unknown value: {value_type}') return SERIALIZE_CLASS_MAP[value_type] @@ -1011,7 +1011,7 @@ def serialize(self, values): for v in values: attr_class = self._get_serialize_class(v) if self.element_type and v is not None and not isinstance(attr_class, self.element_type): - raise ValueError("List elements must be of type: {}".format(self.element_type.__name__)) + raise ValueError(f"List elements must be of type: {self.element_type.__name__}") attr_type = attr_class.attr_type attr_value = attr_class.serialize(v) if attr_value is None: @@ -1034,7 +1034,7 @@ def deserialize(self, values): value = None if NULL not in attribute_value: # set attr_name in case `get_value` raises an exception - element_attr.attr_name = '{}[{}]'.format(self.attr_name, idx) + element_attr.attr_name = f'{self.attr_name}[{idx}]' value = element_attr.deserialize(element_attr.get_value(attribute_value)) deserialized_lst.append(value) return deserialized_lst @@ -1046,7 +1046,7 @@ def deserialize(self, values): def __getitem__(self, idx: int) -> Path: # type: ignore if not isinstance(idx, int): - raise TypeError("list indices must be integers, not {}".format(type(idx).__name__)) + raise TypeError(f"list indices must be integers, not {type(idx).__name__}") if self.element_type: # If this instance is typed, return a properly configured attribute on list element access. @@ -1054,7 +1054,7 @@ def __getitem__(self, idx: int) -> Path: # type: ignore if isinstance(element_attr, MapAttribute): element_attr._make_attribute() element_attr.attr_path = list(self.attr_path) # copy the document path before indexing last element - element_attr.attr_name = '{}[{}]'.format(element_attr.attr_name, idx) + element_attr.attr_name = f'{element_attr.attr_name}[{idx}]' if isinstance(element_attr, MapAttribute): for path_segment in reversed(element_attr.attr_path): element_attr._update_attribute_paths(path_segment) diff --git a/pynamodb/connection/base.py b/pynamodb/connection/base.py index c0105be08..45e73a0d6 100644 --- a/pynamodb/connection/base.py +++ b/pynamodb/connection/base.py @@ -75,7 +75,7 @@ def __init__(self, data: Dict) -> None: def __repr__(self) -> str: if self.data: - return "MetaTable<{}>".format(self.data.get(TABLE_NAME)) + return f"MetaTable<{self.data.get(TABLE_NAME)}>" return "" @property @@ -144,7 +144,7 @@ def get_index_hash_keyname(self, index_name: str) -> str: for schema_key in index.get(KEY_SCHEMA): if schema_key.get(KEY_TYPE) == HASH: return schema_key.get(ATTR_NAME) - raise ValueError("No hash key attribute for index: {}".format(index_name)) + raise ValueError(f"No hash key attribute for index: {index_name}") def get_index_range_keyname(self, index_name): """ @@ -196,7 +196,7 @@ def get_attribute_type(self, attribute_name: str, value: Optional[Any] = None) - if key in value: return key attr_names = [attr.get(ATTR_NAME) for attr in self.data.get(ATTR_DEFINITIONS, [])] - raise ValueError("No attribute {} in {}".format(attribute_name, attr_names)) + raise ValueError(f"No attribute {attribute_name} in {attr_names}") def get_identifier_map(self, hash_key: str, range_key: Optional[str] = None, key: str = KEY): """ @@ -289,7 +289,7 @@ def __init__(self, self._extra_headers = get_settings_value('extra_headers') def __repr__(self) -> str: - return "Connection<{}>".format(self.client.meta.endpoint_url) + return f"Connection<{self.client.meta.endpoint_url}>" def _log_debug(self, operation: str, kwargs: str): """ @@ -383,7 +383,7 @@ def _make_api_call(self, operation_name, operation_kwargs): prepared_request = self._create_prepared_request(request_dict, operation_model) # Implement the before-send event from botocore - event_name = 'before-send.dynamodb.{}'.format(operation_model.name) + event_name = f'before-send.dynamodb.{operation_model.name}' event_responses = self.client._endpoint._event_emitter.emit(event_name, request=prepared_request) event_response = first_non_none_response(event_responses) @@ -551,7 +551,7 @@ def get_meta_table(self, table_name: str, refresh: bool = False): data = self.dispatch(DESCRIBE_TABLE, operation_kwargs) self._tables[table_name] = MetaTable(data.get(TABLE_KEY)) except BotoCoreError as e: - raise TableError("Unable to describe table: {}".format(e), e) + raise TableError(f"Unable to describe table: {e}", e) except ClientError as e: if 'ResourceNotFound' in e.response['Error']['Code']: raise TableDoesNotExist(e.response['Error']['Message']) @@ -593,7 +593,7 @@ def create_table( operation_kwargs[ATTR_DEFINITIONS] = attrs_list if billing_mode not in AVAILABLE_BILLING_MODES: - raise ValueError("incorrect value for billing_mode, available modes: {}".format(AVAILABLE_BILLING_MODES)) + raise ValueError(f"incorrect value for billing_mode, available modes: {AVAILABLE_BILLING_MODES}") if billing_mode == PAY_PER_REQUEST_BILLING_MODE: del operation_kwargs[PROVISIONED_THROUGHPUT] elif billing_mode == PROVISIONED_BILLING_MODE: @@ -642,7 +642,7 @@ def create_table( try: data = self.dispatch(CREATE_TABLE, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: - raise TableError("Failed to create table: {}".format(e), e) + raise TableError(f"Failed to create table: {e}", e) return data def update_time_to_live(self, table_name: str, ttl_attribute_name: str) -> Dict: @@ -659,7 +659,7 @@ def update_time_to_live(self, table_name: str, ttl_attribute_name: str) -> Dict: try: return self.dispatch(UPDATE_TIME_TO_LIVE, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: - raise TableError("Failed to update TTL on table: {}".format(e), e) + raise TableError(f"Failed to update TTL on table: {e}", e) def delete_table(self, table_name: str) -> Dict: """ @@ -671,7 +671,7 @@ def delete_table(self, table_name: str) -> Dict: try: data = self.dispatch(DELETE_TABLE, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: - raise TableError("Failed to delete table: {}".format(e), e) + raise TableError(f"Failed to delete table: {e}", e) return data def update_table( @@ -710,7 +710,7 @@ def update_table( try: return self.dispatch(UPDATE_TABLE, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: - raise TableError("Failed to update table: {}".format(e), e) + raise TableError(f"Failed to update table: {e}", e) def list_tables( self, @@ -732,7 +732,7 @@ def list_tables( try: return self.dispatch(LIST_TABLES, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: - raise TableError("Unable to list tables: {}".format(e), e) + raise TableError(f"Unable to list tables: {e}", e) def describe_table(self, table_name: str) -> Dict: """ @@ -758,7 +758,7 @@ def get_item_attribute_map( """ tbl = self.get_meta_table(table_name) if tbl is None: - raise TableError("No such table {}".format(table_name)) + raise TableError(f"No such table {table_name}") return tbl.get_item_attribute_map( attributes, item_key=item_key, @@ -780,7 +780,7 @@ def parse_attribute( if return_type: return key, attribute.get(key) return attribute.get(key) - raise ValueError("Invalid attribute supplied: {}".format(attribute)) + raise ValueError(f"Invalid attribute supplied: {attribute}") else: if return_type: return None, attribute @@ -798,7 +798,7 @@ def get_attribute_type( """ tbl = self.get_meta_table(table_name) if tbl is None: - raise TableError("No such table {}".format(table_name)) + raise TableError(f"No such table {table_name}") return tbl.get_attribute_type(attribute_name, value=value) def get_identifier_map( @@ -813,7 +813,7 @@ def get_identifier_map( """ tbl = self.get_meta_table(table_name) if tbl is None: - raise TableError("No such table {}".format(table_name)) + raise TableError(f"No such table {table_name}") return tbl.get_identifier_map(hash_key, range_key=range_key, key=key) def get_consumed_capacity_map(self, return_consumed_capacity: str) -> Dict: @@ -821,7 +821,7 @@ def get_consumed_capacity_map(self, return_consumed_capacity: str) -> Dict: Builds the consumed capacity map that is common to several operations """ if return_consumed_capacity.upper() not in RETURN_CONSUMED_CAPACITY_VALUES: - raise ValueError("{} must be one of {}".format(RETURN_ITEM_COLL_METRICS, RETURN_CONSUMED_CAPACITY_VALUES)) + raise ValueError(f"{RETURN_ITEM_COLL_METRICS} must be one of {RETURN_CONSUMED_CAPACITY_VALUES}") return { RETURN_CONSUMED_CAPACITY: str(return_consumed_capacity).upper() } @@ -831,7 +831,7 @@ def get_return_values_map(self, return_values: str) -> Dict: Builds the return values map that is common to several operations """ if return_values.upper() not in RETURN_VALUES_VALUES: - raise ValueError("{} must be one of {}".format(RETURN_VALUES, RETURN_VALUES_VALUES)) + raise ValueError(f"{RETURN_VALUES} must be one of {RETURN_VALUES_VALUES}") return { RETURN_VALUES: str(return_values).upper() } @@ -857,7 +857,7 @@ def get_item_collection_map(self, return_item_collection_metrics: str) -> Dict: Builds the item collection map """ if return_item_collection_metrics.upper() not in RETURN_ITEM_COLL_METRICS_VALUES: - raise ValueError("{} must be one of {}".format(RETURN_ITEM_COLL_METRICS, RETURN_ITEM_COLL_METRICS_VALUES)) + raise ValueError(f"{RETURN_ITEM_COLL_METRICS} must be one of {RETURN_ITEM_COLL_METRICS_VALUES}") return { RETURN_ITEM_COLL_METRICS: str(return_item_collection_metrics).upper() } @@ -868,7 +868,7 @@ def get_exclusive_start_key_map(self, table_name: str, exclusive_start_key: str) """ tbl = self.get_meta_table(table_name) if tbl is None: - raise TableError("No such table {}".format(table_name)) + raise TableError(f"No such table {table_name}") return tbl.get_exclusive_start_key_map(exclusive_start_key) def get_operation_kwargs( @@ -951,7 +951,7 @@ def delete_item( try: return self.dispatch(DELETE_ITEM, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: - raise DeleteError("Failed to delete item: {}".format(e), e) + raise DeleteError(f"Failed to delete item: {e}", e) def update_item( self, @@ -983,7 +983,7 @@ def update_item( try: return self.dispatch(UPDATE_ITEM, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: - raise UpdateError("Failed to update item: {}".format(e), e) + raise UpdateError(f"Failed to update item: {e}", e) def put_item( self, @@ -1013,7 +1013,7 @@ def put_item( try: return self.dispatch(PUT_ITEM, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: - raise PutError("Failed to put item: {}".format(e), e) + raise PutError(f"Failed to put item: {e}", e) def _get_transact_operation_kwargs( self, @@ -1126,7 +1126,7 @@ def batch_write_item( try: return self.dispatch(BATCH_WRITE_ITEM, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: - raise PutError("Failed to batch write items: {}".format(e), e) + raise PutError(f"Failed to batch write items: {e}", e) def batch_get_item( self, @@ -1167,7 +1167,7 @@ def batch_get_item( try: return self.dispatch(BATCH_GET_ITEM, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: - raise GetError("Failed to batch get items: {}".format(e), e) + raise GetError(f"Failed to batch get items: {e}", e) def get_item( self, @@ -1190,7 +1190,7 @@ def get_item( try: return self.dispatch(GET_ITEM, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: - raise GetError("Failed to get item: {}".format(e), e) + raise GetError(f"Failed to get item: {e}", e) def scan( self, @@ -1242,7 +1242,7 @@ def scan( try: return self.dispatch(SCAN, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: - raise ScanError("Failed to scan table: {}".format(e), e) + raise ScanError(f"Failed to scan table: {e}", e) def query( self, @@ -1271,10 +1271,10 @@ def query( tbl = self.get_meta_table(table_name) if tbl is None: - raise TableError("No such table: {}".format(table_name)) + raise TableError(f"No such table: {table_name}") if index_name: if not tbl.has_index_name(index_name): - raise ValueError("Table {} has no index: {}".format(table_name, index_name)) + raise ValueError(f"Table {table_name} has no index: {index_name}") hash_keyname = tbl.get_index_hash_keyname(index_name) else: hash_keyname = tbl.hash_keyname @@ -1304,7 +1304,7 @@ def query( operation_kwargs.update(self.get_consumed_capacity_map(return_consumed_capacity)) if select: if select.upper() not in SELECT_VALUES: - raise ValueError("{} must be one of {}".format(SELECT, SELECT_VALUES)) + raise ValueError(f"{SELECT} must be one of {SELECT_VALUES}") operation_kwargs[SELECT] = str(select).upper() if scan_index_forward is not None: operation_kwargs[SCAN_INDEX_FORWARD] = scan_index_forward @@ -1316,12 +1316,12 @@ def query( try: return self.dispatch(QUERY, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: - raise QueryError("Failed to query items: {}".format(e), e) + raise QueryError(f"Failed to query items: {e}", e) def _check_condition(self, name, condition): if condition is not None: if not isinstance(condition, Condition): - raise ValueError("'{}' must be an instance of Condition".format(name)) + raise ValueError(f"'{name}' must be an instance of Condition") @staticmethod def _reverse_dict(d): diff --git a/pynamodb/exceptions.py b/pynamodb/exceptions.py index 971ddaaa8..79c24ce97 100644 --- a/pynamodb/exceptions.py +++ b/pynamodb/exceptions.py @@ -93,7 +93,7 @@ class TableDoesNotExist(PynamoDBException): Raised when an operation is attempted on a table that doesn't exist """ def __init__(self, table_name: str) -> None: - msg = "Table does not exist: `{}`".format(table_name) + msg = f"Table does not exist: `{table_name}`" super(TableDoesNotExist, self).__init__(msg) @@ -123,7 +123,7 @@ class AttributeDeserializationError(TypeError): Raised when attribute type is invalid """ def __init__(self, attr_name: str, attr_type: str): - msg = "Cannot deserialize '{}' attribute from type: {}".format(attr_name, attr_type) + msg = f"Cannot deserialize '{attr_name}' attribute from type: {attr_type}" super(AttributeDeserializationError, self).__init__(msg) diff --git a/pynamodb/expressions/condition.py b/pynamodb/expressions/condition.py index b624e56ae..ed35ff528 100644 --- a/pynamodb/expressions/condition.py +++ b/pynamodb/expressions/condition.py @@ -46,7 +46,7 @@ def __repr__(self) -> str: def __bool__(self): # Prevent users from accidentally comparing the condition object instead of the attribute instance - raise TypeError("unsupported operand type(s) for bool: {}".format(self.__class__.__name__)) + raise TypeError(f"unsupported operand type(s) for bool: {self.__class__.__name__}") class Comparison(Condition): @@ -54,7 +54,7 @@ class Comparison(Condition): def __init__(self, operator, lhs, rhs): if operator not in ['=', '<>', '<', '<=', '>', '>=']: - raise ValueError("{0} is not a valid comparison operator: {0}".format(operator)) + raise ValueError(f"{operator} is not a valid comparison operator.") super().__init__(operator, lhs, rhs) diff --git a/pynamodb/expressions/operand.py b/pynamodb/expressions/operand.py index 3c88a9c59..047a67f98 100644 --- a/pynamodb/expressions/operand.py +++ b/pynamodb/expressions/operand.py @@ -49,7 +49,7 @@ def _to_value(self, value): def _type_check(self, *types): if self.attr_type and self.attr_type not in types: - raise ValueError("The data type of '{}' must be one of {}".format(self, list(types))) + raise ValueError(f"The data type of '{self}' must be one of {list(types)}") class _ConditionOperand(_Operand): @@ -254,26 +254,26 @@ def path(self) -> List[str]: def __iter__(self): # Because we define __getitem__ Path is considered an iterable - raise TypeError("'{}' object is not iterable".format(self.__class__.__name__)) + raise TypeError(f"'{self.__class__.__name__}' object is not iterable") def __getitem__(self, item: Union[int, str]) -> 'Path': # The __getitem__ call returns a new Path instance without any attribute set. # This is intended since the nested element is not the same attribute as ``self``. if self.attribute and self.attribute.attr_type not in [LIST, MAP]: - raise TypeError("'{}' object has no attribute __getitem__".format(self.attribute.__class__.__name__)) + raise TypeError(f"'{self.attribute.__class__.__name__}' object has no attribute __getitem__") if self.attr_type == LIST and not isinstance(item, int): - raise TypeError("list indices must be integers, not {}".format(type(item).__name__)) + raise TypeError(f"list indices must be integers, not {type(item).__name__}") if self.attr_type == MAP and not isinstance(item, str): - raise TypeError("map attributes must be strings, not {}".format(type(item).__name__)) + raise TypeError(f"map attributes must be strings, not {type(item).__name__}") if isinstance(item, int): # list dereference operator element_path = Path(self.path) # copy the document path before indexing last element - element_path.path[-1] = '{}[{}]'.format(self.path[-1], item) + element_path.path[-1] = f'{self.path[-1]}[{item}]' return element_path if isinstance(item, str): # map dereference operator return Path(self.path + [item]) - raise TypeError("item must be an integer or string, not {}".format(type(item).__name__)) + raise TypeError(f"item must be an integer or string, not {type(item).__name__}") def __or__(self, other): return _IfNotExists(self, self._to_operand(other)) @@ -304,8 +304,7 @@ def does_not_exist(self) -> NotExists: def is_type(self, attr_type: str) -> IsType: if attr_type not in ATTRIBUTE_TYPES: - raise ValueError("{} is not a valid attribute type. Must be one of {}".format( - attr_type, ATTRIBUTE_TYPES)) + raise ValueError(f"{attr_type} is not a valid attribute type. Must be one of {ATTRIBUTE_TYPES}") return IsType(self, Value(attr_type)) def startswith(self, prefix: str) -> BeginsWith: @@ -333,7 +332,7 @@ def __str__(self) -> str: return '.'.join(quoted_path) def __repr__(self) -> str: - return "Path({})".format(self.path) + return f"Path({self.path})" @staticmethod def _quote_path(path: str) -> str: diff --git a/pynamodb/expressions/update.py b/pynamodb/expressions/update.py index 4fd840a63..96248a182 100644 --- a/pynamodb/expressions/update.py +++ b/pynamodb/expressions/update.py @@ -94,7 +94,7 @@ def add_action(self, action: Action) -> None: elif isinstance(action, DeleteAction): self.delete_actions.append(action) else: - raise ValueError("unsupported action type: '{}'".format(action.__class__.__name__)) + raise ValueError(f"unsupported action type: '{action.__class__.__name__}'") def serialize(self, placeholder_names: Dict[str, str], expression_attribute_values: Dict[str, str]) -> Optional[str]: clauses = [ diff --git a/pynamodb/expressions/util.py b/pynamodb/expressions/util.py index 73351c9c4..7f08b07fc 100644 --- a/pynamodb/expressions/util.py +++ b/pynamodb/expressions/util.py @@ -37,7 +37,7 @@ def substitute_names(document_path: Union[str, List[str]], placeholders: Dict[st for idx, segment in enumerate(path_segments): match = PATH_SEGMENT_REGEX.match(segment) if not match: - raise ValueError('{} is not a valid document path'.format('.'.join(document_path))) + raise ValueError(f"{'.'.join(document_path)} is not a valid document path") name, indexes = match.groups() if name in placeholders: placeholder = placeholders[name] diff --git a/pynamodb/models.py b/pynamodb/models.py index 96e23b38f..f5a04293c 100644 --- a/pynamodb/models.py +++ b/pynamodb/models.py @@ -252,7 +252,7 @@ def __init__(self, name: str, bases: Any, attrs: Dict[str, Any]) -> None: ttl_attr_names = [name for name, attr_obj in attrs.items() if isinstance(attr_obj, TTLAttribute)] if len(ttl_attr_names) > 1: - raise ValueError("The model has more than one TTL attribute: {}".format(", ".join(ttl_attr_names))) + raise ValueError(f"The model has more than one TTL attribute: {', '.join(ttl_attr_names)}") if META_CLASS_NAME not in attrs: setattr(cls, META_CLASS_NAME, DefaultMeta) @@ -383,9 +383,9 @@ def __repr__(self) -> str: table_name = self.Meta.table_name if self.Meta.table_name else 'unknown' serialized = self._serialize(null_check=False) if self._range_keyname: - msg = "{}<{}, {}>".format(self.Meta.table_name, serialized.get(HASH), serialized.get(RANGE)) + msg = f"{self.Meta.table_name}<{serialized.get(HASH)}, {serialized.get(RANGE)}>" else: - msg = "{}<{}>".format(self.Meta.table_name, serialized.get(HASH)) + msg = f"{self.Meta.table_name}<{serialized.get(HASH)}>" return msg def delete(self, condition: Optional[Condition] = None) -> Any: @@ -820,7 +820,7 @@ def update_ttl(cls, ignore_update_ttl_errors: bool) -> None: cls._get_connection().update_time_to_live(ttl_attribute.attr_name) except Exception: if ignore_update_ttl_errors: - log.info("Unable to update the TTL for {}".format(cls.Meta.table_name)) + log.info(f"Unable to update the TTL for {cls.Meta.table_name}") else: raise @@ -1140,7 +1140,7 @@ def _serialize_value(cls, attr, value): if serialized is None: if not attr.null: - raise ValueError("Attribute '{}' cannot be None".format(attr.attr_name)) + raise ValueError(f"Attribute '{attr.attr_name}' cannot be None") return {NULL: True} return {attr.attr_type: serialized} diff --git a/pynamodb/settings.py b/pynamodb/settings.py index e1ccd6549..2741ac14a 100644 --- a/pynamodb/settings.py +++ b/pynamodb/settings.py @@ -34,9 +34,9 @@ def _load_module(name, path): override_settings = _load_module('__pynamodb_override_settings__', OVERRIDE_SETTINGS_PATH) if hasattr(override_settings, 'session_cls') or hasattr(override_settings, 'request_timeout_seconds'): warnings.warn("The `session_cls` and `request_timeout_second` options are no longer supported") - log.info('Override settings for pynamo available {}'.format(OVERRIDE_SETTINGS_PATH)) + log.info(f'Override settings for pynamo available {OVERRIDE_SETTINGS_PATH}') else: - log.info('Override settings for pynamo not available {}'.format(OVERRIDE_SETTINGS_PATH)) + log.info(f'Override settings for pynamo not available {OVERRIDE_SETTINGS_PATH}') log.info('Using Default settings value') diff --git a/tests/deep_eq.py b/tests/deep_eq.py index f72bf76df..ffc032048 100644 --- a/tests/deep_eq.py +++ b/tests/deep_eq.py @@ -92,8 +92,7 @@ def deep_eq(_v1, _v2, datetime_fudge=default_fudge, _assert=False): def _check_assert(R, a, b, reason=''): if _assert and not R: - assert 0, "an assertion has failed in deep_eq ({}) {} != {}".format( - reason, str(a), str(b)) + assert 0, f"an assertion has failed in deep_eq ({reason}) {str(a)} != {str(b)}" return R def _deep_dict_eq(d1, d2): diff --git a/tests/integration/base_integration_test.py b/tests/integration/base_integration_test.py index 87c386361..b1ae6747a 100644 --- a/tests/integration/base_integration_test.py +++ b/tests/integration/base_integration_test.py @@ -143,7 +143,7 @@ def test_connection_integration(ddb_url): items = [] for i in range(10): items.append( - {"Forum": "FooForum", "Thread": "thread-{}".format(i)} + {"Forum": "FooForum", "Thread": f"thread-{i}"} ) print("conn.batch_write_items...") conn.batch_write_item( diff --git a/tests/integration/model_integration_test.py b/tests/integration/model_integration_test.py index 92649e295..ab8804ece 100644 --- a/tests/integration/model_integration_test.py +++ b/tests/integration/model_integration_test.py @@ -68,26 +68,26 @@ class Meta: obj3.refresh() with TestModel.batch_write() as batch: - items = [TestModel('hash-{}'.format(x), '{}'.format(x)) for x in range(10)] + items = [TestModel(f'hash-{x}', f'{x}') for x in range(10)] for item in items: batch.save(item) - item_keys = [('hash-{}'.format(x), 'thread-{}'.format(x)) for x in range(10)] + item_keys = [(f'hash-{x}', f'thread-{x}') for x in range(10)] for item in TestModel.batch_get(item_keys): print(item) for item in TestModel.query('setitem', TestModel.thread.startswith('set')): - print("Query Item {}".format(item)) + print(f"Query Item {item}") with TestModel.batch_write() as batch: - items = [TestModel('hash-{}'.format(x), '{}'.format(x)) for x in range(10)] + items = [TestModel(f'hash-{x}', f'{x}') for x in range(10)] for item in items: print("Batch delete") batch.delete(item) for item in TestModel.scan(): - print("Scanned item: {}".format(item)) + print(f"Scanned item: {item}") tstamp = datetime.now() query_obj = TestModel('query_forum', 'query_thread') @@ -95,10 +95,10 @@ class Meta: query_obj.save() query_obj.update([TestModel.view.add(1)]) for item in TestModel.epoch_index.query(tstamp): - print("Item queried from index: {}".format(item)) + print(f"Item queried from index: {item}") for item in TestModel.view_index.query('foo', TestModel.view > 0): - print("Item queried from index: {}".format(item.view)) + print(f"Item queried from index: {item.view}") print(query_obj.update([TestModel.view.add(1)], condition=TestModel.forum.exists())) TestModel.delete_table() diff --git a/tests/integration/table_integration_test.py b/tests/integration/table_integration_test.py index f05b5e711..71b291293 100644 --- a/tests/integration/table_integration_test.py +++ b/tests/integration/table_integration_test.py @@ -135,7 +135,7 @@ def test_table_integration(ddb_url): items = [] for i in range(10): items.append( - {"Forum": "FooForum", "Thread": "thread-{}".format(i)} + {"Forum": "FooForum", "Thread": f"thread-{i}"} ) print("conn.batch_write_items...") conn.batch_write_item( diff --git a/tests/mypy_helpers.py b/tests/mypy_helpers.py index b7cb4c7e1..4223a7b98 100644 --- a/tests/mypy_helpers.py +++ b/tests/mypy_helpers.py @@ -15,7 +15,7 @@ def _run_mypy(program: str, *, use_pdb: bool) -> Iterable[str]: import mypy.api with TemporaryDirectory() as tempdirname: - with open('{}/__main__.py'.format(tempdirname), 'w') as f: + with open(f'{tempdirname}/__main__.py', 'w') as f: f.write(program) error_pattern = re.compile(fr'^{re.escape(f.name)}:' r'(?P\d+): (?Pnote|warning|error): (?P.*)$') diff --git a/tests/test_base_connection.py b/tests/test_base_connection.py index 48c8310be..372c284a7 100644 --- a/tests/test_base_connection.py +++ b/tests/test_base_connection.py @@ -72,7 +72,7 @@ def test_create_connection(self): conn = Connection(host='http://foohost') self.assertIsNotNone(conn.client) self.assertIsNotNone(conn) - self.assertEqual(repr(conn), "Connection<{}>".format(conn.host)) + self.assertEqual(repr(conn), f"Connection<{conn.host}>") def test_subsequent_client_is_not_cached_when_credentials_none(self): with patch('pynamodb.connection.Connection.session') as session_mock: @@ -932,7 +932,7 @@ def test_batch_write_item(self): table_name = 'Thread' for i in range(10): items.append( - {"ForumName": "FooForum", "Subject": "thread-{}".format(i)} + {"ForumName": "FooForum", "Subject": f"thread-{i}"} ) self.assertRaises( ValueError, @@ -1066,7 +1066,7 @@ def test_batch_get_item(self): table_name = 'Thread' for i in range(10): items.append( - {"ForumName": "FooForum", "Subject": "thread-{}".format(i)} + {"ForumName": "FooForum", "Subject": f"thread-{i}"} ) with patch(PATCH_METHOD) as req: req.return_value = DESCRIBE_TABLE_DATA diff --git a/tests/test_model.py b/tests/test_model.py index 174067fd9..587341395 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -477,14 +477,14 @@ def assert_dict_lists_equal(self, list1, list2): This function allows both the lists and dictionaries to have any order """ if len(list1) != len(list2): - raise AssertionError("Values not equal: {} {}".format(list1, list2)) + raise AssertionError(f"Values not equal: {list1} {list2}") for d1_item in list1: found = False for d2_item in list2: if d2_item == d1_item: found = True if not found: - raise AssertionError("Values not equal: {} {}".format(list1, list2)) + raise AssertionError(f"Values not equal: {list1} {list2}") def test_create_model(self): """ @@ -658,12 +658,12 @@ def test_model_attrs(self): self.assertEqual(item.email, 'needs_email') self.assertEqual(item.callable_field, 42) self.assertEqual( - repr(item), '{}<{}, {}>'.format(UserModel.Meta.table_name, item.custom_user_name, item.user_id) + repr(item), f'{UserModel.Meta.table_name}<{item.custom_user_name}, {item.user_id}>' ) self.init_table_meta(SimpleUserModel, SIMPLE_MODEL_TABLE_DATA) item = SimpleUserModel('foo') - self.assertEqual(repr(item), '{}<{}>'.format(SimpleUserModel.Meta.table_name, item.user_name)) + self.assertEqual(repr(item), f'{SimpleUserModel.Meta.table_name}<{item.user_name}>') self.assertRaises(ValueError, item.save) self.assertRaises(ValueError, UserModel.from_raw_data, None) @@ -1211,7 +1211,7 @@ def test_query_limit_greater_than_available_items_single_page(self): items = [] for idx in range(5): item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM)) - item['user_id'] = {STRING: 'id-{}'.format(idx)} + item['user_id'] = {STRING: f'id-{idx}'} items.append(item) req.return_value = {'Count': len(items), 'ScannedCount': len(items), 'Items': items} @@ -1228,7 +1228,7 @@ def test_query_limit_identical_to_available_items_single_page(self): items = [] for idx in range(5): item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM)) - item['user_id'] = {STRING: 'id-{}'.format(idx)} + item['user_id'] = {STRING: f'id-{idx}'} items.append(item) req.return_value = {'Count': len(items), 'ScannedCount': len(items), 'Items': items} @@ -1245,7 +1245,7 @@ def test_query_limit_less_than_available_items_multiple_page(self): items = [] for idx in range(30): item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM)) - item['user_id'] = {STRING: 'id-{}'.format(idx)} + item['user_id'] = {STRING: f'id-{idx}'} items.append(item) req.side_effect = [ @@ -1273,7 +1273,7 @@ def test_query_limit_less_than_available_and_page_size(self): items = [] for idx in range(30): item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM)) - item['user_id'] = {STRING: 'id-{}'.format(idx)} + item['user_id'] = {STRING: f'id-{idx}'} items.append(item) req.side_effect = [ @@ -1301,7 +1301,7 @@ def test_query_limit_greater_than_available_items_multiple_page(self): items = [] for idx in range(30): item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM)) - item['user_id'] = {STRING: 'id-{}'.format(idx)} + item['user_id'] = {STRING: f'id-{idx}'} items.append(item) req.side_effect = [ @@ -1328,7 +1328,7 @@ def test_query_limit_greater_than_available_items_and_page_size(self): items = [] for idx in range(30): item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM)) - item['user_id'] = {STRING: 'id-{}'.format(idx)} + item['user_id'] = {STRING: f'id-{idx}'} items.append(item) req.side_effect = [ @@ -1356,7 +1356,7 @@ def test_query_with_exclusive_start_key(self): items = [] for idx in range(30): item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM)) - item['user_id'] = {STRING: 'id-{}'.format(idx)} + item['user_id'] = {STRING: f'id-{idx}'} items.append(item) req.side_effect = [ @@ -1385,7 +1385,7 @@ def test_query(self): items = [] for idx in range(10): item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM)) - item['user_id'] = {STRING: 'id-{}'.format(idx)} + item['user_id'] = {STRING: f'id-{idx}'} items.append(item) req.return_value = {'Count': len(items), 'ScannedCount': len(items), 'Items': items} queried = [] @@ -1400,7 +1400,7 @@ def test_query(self): items = [] for idx in range(10): item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM)) - item['user_id'] = {STRING: 'id-{}'.format(idx)} + item['user_id'] = {STRING: f'id-{idx}'} items.append(item) req.return_value = {'Count': len(items), 'ScannedCount': len(items), 'Items': items} queried = [] @@ -1412,7 +1412,7 @@ def test_query(self): items = [] for idx in range(10): item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM)) - item['user_id'] = {STRING: 'id-{}'.format(idx)} + item['user_id'] = {STRING: f'id-{idx}'} items.append(item) req.return_value = {'Count': len(items), 'ScannedCount': len(items), 'Items': items} queried = [] @@ -1424,7 +1424,7 @@ def test_query(self): items = [] for idx in range(10): item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM)) - item['user_id'] = {STRING: 'id-{}'.format(idx)} + item['user_id'] = {STRING: f'id-{idx}'} items.append(item) req.return_value = {'Count': len(items), 'ScannedCount': len(items), 'Items': items} queried = [] @@ -1436,7 +1436,7 @@ def test_query(self): items = [] for idx in range(10): item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM)) - item['user_id'] = {STRING: 'id-{}'.format(idx)} + item['user_id'] = {STRING: f'id-{idx}'} items.append(item) req.return_value = {'Count': len(items), 'ScannedCount': len(items), 'Items': items} queried = [] @@ -1448,7 +1448,7 @@ def test_query(self): items = [] for idx in range(10): item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM)) - item['user_id'] = {STRING: 'id-{}'.format(idx)} + item['user_id'] = {STRING: f'id-{idx}'} items.append(item) req.return_value = {'Count': len(items), 'ScannedCount': len(items), 'Items': items} queried = [] @@ -1460,7 +1460,7 @@ def test_query(self): items = [] for idx in range(10): item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM)) - item['user_id'] = {STRING: 'id-{}'.format(idx)} + item['user_id'] = {STRING: f'id-{idx}'} items.append(item) req.return_value = {'Count': len(items), 'ScannedCount': len(items), 'Items': items} queried = [] @@ -1503,7 +1503,7 @@ def fake_query(*args): items = [] for idx in range(10): item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM)) - item['user_id'] = {STRING: 'id-{}'.format(idx)} + item['user_id'] = {STRING: f'id-{idx}'} items.append(item) req.return_value = {'Count': len(items), 'ScannedCount': len(items), 'Items': items} queried = [] @@ -1550,7 +1550,7 @@ def test_scan_limit_with_page_size(self): items = [] for idx in range(30): item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM)) - item['user_id'] = {STRING: 'id-{}'.format(idx)} + item['user_id'] = {STRING: f'id-{idx}'} items.append(item) req.side_effect = [ @@ -1613,7 +1613,7 @@ def test_scan(self): items = [] for idx in range(10): item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM)) - item['user_id'] = {STRING: 'id-{}'.format(idx)} + item['user_id'] = {STRING: f'id-{idx}'} items.append(item) req.return_value = {'Count': len(items), 'ScannedCount': len(items), 'Items': items} scanned_items = [] @@ -1655,7 +1655,7 @@ def fake_scan(*args): items = [] for idx in range(10): item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM)) - item['user_id'] = {STRING: 'id-{0}'.format(idx)} + item['user_id'] = {STRING: f'id-{idx}'} items.append(item) req.return_value = {'Count': len(items), 'ScannedCount': len(items), 'Items': items} for item in UserModel.scan( @@ -1767,7 +1767,7 @@ def test_batch_get(self): with patch(PATCH_METHOD) as req: req.return_value = SIMPLE_BATCH_GET_ITEMS - item_keys = ['hash-{}'.format(x) for x in range(10)] + item_keys = [f'hash-{x}' for x in range(10)] for item in SimpleUserModel.batch_get(item_keys): self.assertIsNotNone(item) params = { @@ -1793,7 +1793,7 @@ def test_batch_get(self): with patch(PATCH_METHOD) as req: req.return_value = SIMPLE_BATCH_GET_ITEMS - item_keys = ['hash-{}'.format(x) for x in range(10)] + item_keys = [f'hash-{x}' for x in range(10)] for item in SimpleUserModel.batch_get(item_keys, attributes_to_get=['numbers']): self.assertIsNotNone(item) params = { @@ -1823,7 +1823,7 @@ def test_batch_get(self): with patch(PATCH_METHOD) as req: req.return_value = SIMPLE_BATCH_GET_ITEMS - item_keys = ['hash-{}'.format(x) for x in range(10)] + item_keys = [f'hash-{x}' for x in range(10)] for item in SimpleUserModel.batch_get(item_keys, consistent_read=True): self.assertIsNotNone(item) params = { @@ -1851,7 +1851,7 @@ def test_batch_get(self): self.init_table_meta(UserModel, MODEL_TABLE_DATA) with patch(PATCH_METHOD) as req: - item_keys = [('hash-{}'.format(x), '{}'.format(x)) for x in range(10)] + item_keys = [(f'hash-{x}', f'{x}') for x in range(10)] item_keys_copy = list(item_keys) req.return_value = BATCH_GET_ITEMS for item in UserModel.batch_get(item_keys): @@ -1906,7 +1906,7 @@ def fake_batch_get(*batch_args): batch_get_mock.side_effect = fake_batch_get with patch(PATCH_METHOD, new=batch_get_mock) as req: - item_keys = [('hash-{}'.format(x), '{}'.format(x)) for x in range(200)] + item_keys = [(f'hash-{x}', f'{x}') for x in range(200)] for item in UserModel.batch_get(item_keys): self.assertIsNotNone(item) @@ -1926,30 +1926,30 @@ def test_batch_write(self): with self.assertRaises(ValueError): with UserModel.batch_write(auto_commit=False) as batch: - items = [UserModel('hash-{}'.format(x), '{}'.format(x)) for x in range(26)] + items = [UserModel(f'hash-{x}', f'{x}') for x in range(26)] for item in items: batch.delete(item) self.assertRaises(ValueError, batch.save, UserModel('asdf', '1234')) with UserModel.batch_write(auto_commit=False) as batch: - items = [UserModel('hash-{}'.format(x), '{}'.format(x)) for x in range(25)] + items = [UserModel(f'hash-{x}', f'{x}') for x in range(25)] for item in items: batch.delete(item) self.assertRaises(ValueError, batch.save, UserModel('asdf', '1234')) with UserModel.batch_write(auto_commit=False) as batch: - items = [UserModel('hash-{}'.format(x), '{}'.format(x)) for x in range(25)] + items = [UserModel(f'hash-{x}', f'{x}') for x in range(25)] for item in items: batch.save(item) self.assertRaises(ValueError, batch.save, UserModel('asdf', '1234')) with UserModel.batch_write() as batch: - items = [UserModel('hash-{}'.format(x), '{}'.format(x)) for x in range(30)] + items = [UserModel(f'hash-{x}', f'{x}') for x in range(30)] for item in items: batch.delete(item) with UserModel.batch_write() as batch: - items = [UserModel('hash-{}'.format(x), '{}'.format(x)) for x in range(30)] + items = [UserModel(f'hash-{x}', f'{x}') for x in range(30)] for item in items: batch.save(item) @@ -1961,7 +1961,7 @@ def test_batch_write_with_unprocessed(self): for idx in range(10): items.append(UserModel( 'daniel', - '{}'.format(idx), + f'{idx}', picture=picture_blob, )) @@ -1971,7 +1971,7 @@ def test_batch_write_with_unprocessed(self): 'PutRequest': { 'Item': { 'custom_username': {STRING: 'daniel'}, - 'user_id': {STRING: '{}'.format(idx)}, + 'user_id': {STRING: f'{idx}'}, 'picture': {BINARY: base64.b64encode(picture_blob).decode(DEFAULT_ENCODING)} } } @@ -2002,7 +2002,7 @@ def test_batch_write_raises_put_error(self): items = [] for idx in range(10): items.append(BatchModel( - '{}'.format(idx) + f'{idx}' )) unprocessed_items = [] @@ -2057,8 +2057,8 @@ def test_index_queries(self): items = [] for idx in range(10): item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM)) - item['user_name'] = {STRING: 'id-{}'.format(idx)} - item['email'] = {STRING: 'id-{}'.format(idx)} + item['user_name'] = {STRING: f'id-{idx}'} + item['email'] = {STRING: f'id-{idx}'} items.append(item) req.return_value = {'Count': len(items), 'ScannedCount': len(items), 'Items': items} queried = [] @@ -2092,8 +2092,8 @@ def test_index_queries(self): items = [] for idx in range(10): item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM)) - item['user_name'] = {STRING: 'id-{}'.format(idx)} - item['email'] = {STRING: 'id-{}'.format(idx)} + item['user_name'] = {STRING: f'id-{idx}'} + item['email'] = {STRING: f'id-{idx}'} items.append(item) req.return_value = {'Count': len(items), 'ScannedCount': len(items), 'Items': items} queried = [] @@ -2134,7 +2134,7 @@ def test_index_queries(self): items = [] for idx in range(10): item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM)) - item['user_name'] = {STRING: 'id-{}'.format(idx)} + item['user_name'] = {STRING: f'id-{idx}'} items.append(item) req.return_value = {'Count': len(items), 'ScannedCount': len(items), 'Items': items} queried = [] @@ -2430,8 +2430,8 @@ def test_dumps(self): items = [] for idx in range(10): item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM)) - item['user_id'] = {STRING: 'id-{}'.format(idx)} - item['email'] = {STRING: 'email-{}'.format(random.randint(0, 65536))} + item['user_id'] = {STRING: f'id-{idx}'} + item['email'] = {STRING: f'email-{random.randint(0, 65536)}'} item['picture'] = {BINARY: BINARY_ATTR_DATA} items.append(item) req.return_value = {'Count': len(items), 'ScannedCount': len(items), 'Items': items} diff --git a/tests/test_table_connection.py b/tests/test_table_connection.py index e24f9fd97..10e2a392b 100644 --- a/tests/test_table_connection.py +++ b/tests/test_table_connection.py @@ -384,7 +384,7 @@ def test_batch_write_item(self): conn = TableConnection(self.test_table_name) for i in range(10): items.append( - {"ForumName": "FooForum", "Subject": "thread-{}".format(i)} + {"ForumName": "FooForum", "Subject": f"thread-{i}"} ) with patch(PATCH_METHOD) as req: req.return_value = DESCRIBE_TABLE_DATA @@ -421,7 +421,7 @@ def test_batch_get_item(self): conn = TableConnection(self.test_table_name) for i in range(10): items.append( - {"ForumName": "FooForum", "Subject": "thread-{}".format(i)} + {"ForumName": "FooForum", "Subject": f"thread-{i}"} ) with patch(PATCH_METHOD) as req: req.return_value = DESCRIBE_TABLE_DATA