diff --git a/README.rst b/README.rst index 5ee67c77d..893cc2e51 100644 --- a/README.rst +++ b/README.rst @@ -214,16 +214,6 @@ the type of data you'd like to stream. name = UnicodeAttribute(range_key=True) id = UnicodeAttribute(hash_key=True) -Want to backup and restore a table? No problem. - -.. code-block:: python - - # Backup the table - UserModel.dump("usermodel_backup.json") - - # Restore the table - UserModel.load("usermodel_backup.json") - Features ======== @@ -232,7 +222,6 @@ Features * An ORM-like interface with query and scan filters * Compatible with DynamoDB Local * Supports the entire DynamoDB API -* Full table backup/restore * Support for Unicode, Binary, JSON, Number, Set, and UTC Datetime attributes * Support for Global and Local Secondary Indexes * Provides iterators for working with queries, scans, that are automatically paginated diff --git a/docs/backup_restore.rst b/docs/backup_restore.rst deleted file mode 100644 index bcc3f6b99..000000000 --- a/docs/backup_restore.rst +++ /dev/null @@ -1,66 +0,0 @@ -Table Backups -============= - -PynamoDB provides methods for backing up and restoring the items in your table. Items are serialized to and from JSON -encoded strings and files. Only serialized item data are stored in a backup, not any table metadata. - -Backing up a table ------------------- - -To back up a table, you can simply use the provided `dump` method and write the contents to a file. - -.. code-block:: python - - from pynamodb.models import Model - from pynamodb.attributes import ( - UnicodeAttribute, NumberAttribute - ) - - class Thread(Model): - class Meta: - table_name = 'Thread' - - forum_name = UnicodeAttribute(hash_key=True) - subject = UnicodeAttribute(range_key=True) - views = NumberAttribute(default=0) - - Thread.dump("thread_backup.json") - -Alternatively, you can write the contents to a string. - -.. code-block:: python - - content = Thread.dumps() - - -Restoring from a backup ------------------------ - -To restore items from a backup file, simply use the provided `load` method. - -.. warning:: - - Items contained in a backup *will* overwrite any existing items in your table! - -.. code-block:: python - - from pynamodb.models import Model - from pynamodb.attributes import ( - UnicodeAttribute, NumberAttribute - ) - - class Thread(Model): - class Meta: - table_name = 'Thread' - - forum_name = UnicodeAttribute(hash_key=True) - subject = UnicodeAttribute(range_key=True) - views = NumberAttribute(default=0) - - Thread.load("thread_backup.json") - -Alternatively, you can also load the contents from a string. - -.. code-block:: python - - Thread.loads(content) diff --git a/docs/index.rst b/docs/index.rst index e52f36db4..d287254f7 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -37,7 +37,6 @@ Topics optimistic_locking rate_limited_operations local - backup_restore signals examples settings diff --git a/docs/release_notes.rst b/docs/release_notes.rst index fa353ddae..44115515e 100644 --- a/docs/release_notes.rst +++ b/docs/release_notes.rst @@ -8,6 +8,9 @@ v5.0 where what's intended to be a cheap and fast condition ends up being expensive and slow. Since filter conditions cannot contain range keys, this had limited utility to begin with, and would sometimes cause confusing "'filter_condition' cannot contain key attributes" errors. +* ``Model.dump(s)`` and ``Model.load(s)`` were removed since they are woefully inadequate for any table at scale + and didn't belong to the base Model class. Additionally, they were static methods applying to the entire table + but named confusingly like methods that would (de)serialize a model instance. v4.3.3 ---------- diff --git a/examples/model.py b/examples/model.py index 51387f113..4da6e6228 100644 --- a/examples/model.py +++ b/examples/model.py @@ -192,26 +192,3 @@ class Meta: print(thread_item.update(actions=[ Thread.tags.remove() ])) - -# Backup/restore example -# Print the size of the table -print("Table size: {}".format(Thread.describe_table().get('ItemCount'))) - -# Dump the entire table to a file -Thread.dump('thread.json') - -# Optionally Delete all table items -# Commented out for safety -# for item in Thread.scan(): -# item.delete() -print("Table size: {}".format(Thread.describe_table().get('ItemCount'))) - -# Restore table from a file -Thread.load('thread.json') -print("Table size: {}".format(Thread.describe_table().get('ItemCount'))) - -# Dump the entire table to a string -serialized = Thread.dumps() - -# Load the entire table from a string -Thread.loads(serialized) diff --git a/pynamodb/models.py b/pynamodb/models.py index 3faa0d2e3..145eb255d 100644 --- a/pynamodb/models.py +++ b/pynamodb/models.py @@ -828,34 +828,6 @@ def update_ttl(cls, ignore_update_ttl_errors: bool) -> None: else: raise - @classmethod - def dumps(cls) -> Any: - """ - Returns a JSON representation of this model's table - """ - return json.dumps([item._get_json() for item in cls.scan()]) - - @classmethod - def dump(cls, filename: str) -> None: - """ - Writes the contents of this model's table as JSON to the given filename - """ - with open(filename, 'w') as out: - out.write(cls.dumps()) - - @classmethod - def loads(cls, data: str) -> None: - content = json.loads(data) - with cls.batch_write() as batch: - for item_data in content: - item = cls._from_data(item_data) - batch.save(item) - - @classmethod - def load(cls, filename: str) -> None: - with open(filename, 'r') as inf: - cls.loads(inf.read()) - # Private API below @classmethod def _from_data(cls, data): diff --git a/tests/test_model.py b/tests/test_model.py index 5eefb2ea3..12f6aa4d9 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -2421,83 +2421,6 @@ def test_old_style_model_exception(self): with self.assertRaises(AttributeError): OldStyleModel.exists() - def test_dumps(self): - """ - Model.dumps - """ - with patch(PATCH_METHOD) as req: - items = [] - for idx in range(10): - item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM)) - item['user_id'] = {STRING_SHORT: 'id-{}'.format(idx)} - item['email'] = {STRING_SHORT: 'email-{}'.format(random.randint(0, 65536))} - item['picture'] = {BINARY_SHORT: BINARY_ATTR_DATA} - items.append(item) - req.return_value = {'Count': len(items), 'ScannedCount': len(items), 'Items': items} - content = UserModel.dumps() - serialized_items = json.loads(content) - for original, new_item in zip(items, serialized_items): - self.assertEqual(new_item[0], original['user_name'][STRING_SHORT]) - self.assertEqual(new_item[1][snake_to_camel_case(ATTRIBUTES)]['zip_code']['N'], original['zip_code']['N']) - self.assertEqual(new_item[1][snake_to_camel_case(ATTRIBUTES)]['email']['S'], original['email']['S']) - self.assertEqual(new_item[1][snake_to_camel_case(ATTRIBUTES)]['picture']['B'], original['picture']['B']) - - def test_loads(self): - """ - Model.loads - """ - with patch(PATCH_METHOD) as req: - req.return_value = {} - UserModel.loads(json.dumps(SERIALIZED_TABLE_DATA)) - - args = { - 'UserModel': [ - { - 'PutRequest': { - 'Item': { - 'user_id': {'S': u'id-0'}, - 'callable_field': {'N': '42'}, - 'user_name': {'S': u'foo'}, - 'email': {'S': u'email-7980'}, - 'picture': { - "B": "aGVsbG8sIHdvcmxk" - }, - 'zip_code': {'N': '88030'} - } - } - }, - { - 'PutRequest': { - 'Item': { - 'user_id': {'S': u'id-1'}, - 'callable_field': {'N': '42'}, - 'user_name': {'S': u'foo'}, - 'email': {'S': u'email-19770'}, - 'picture': { - "B": "aGVsbG8sIHdvcmxk" - }, - 'zip_code': {'N': '88030'} - } - } - } - ] - } - self.assert_dict_lists_equal(req.call_args[0][1]['RequestItems']['UserModel'], args['UserModel']) - - def test_loads_complex_model(self): - with patch(PATCH_METHOD) as req: - req.return_value = {} - ComplexModel.loads(json.dumps(COMPLEX_MODEL_SERIALIZED_TABLE_DATA)) - - args = { - 'ComplexModel': [ - { - 'PutRequest': COMPLEX_MODEL_ITEM_DATA - } - ] - } - self.assert_dict_lists_equal(req.call_args[0][1]['RequestItems']['ComplexModel'], args['ComplexModel']) - def _get_office_employee(self): justin = Person( fname='Justin',