diff --git a/examples/pg_vectorstore_how_to.ipynb b/examples/pg_vectorstore_how_to.ipynb index fb38bfa..ec91239 100644 --- a/examples/pg_vectorstore_how_to.ipynb +++ b/examples/pg_vectorstore_how_to.ipynb @@ -73,7 +73,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "id": "irl7eMFnSPZr", "metadata": { "id": "irl7eMFnSPZr" @@ -117,7 +117,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -131,7 +131,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -151,7 +151,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -178,7 +178,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": { "id": "avlyHEMn6gzU" }, @@ -219,7 +219,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" @@ -247,7 +247,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "metadata": { "id": "z-AZyzAQ7bsf" }, @@ -333,7 +333,9 @@ "source": [ "### Delete documents\n", "\n", - "Documents can be deleted using ids." + "Documents can be deleted using IDs or metadata filters.\n", + "\n", + "#### Delete by IDs" ] }, { @@ -345,6 +347,47 @@ "await store.adelete([ids[1]])" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Delete by metadata filter\n", + "\n", + "You can delete documents based on metadata filters. This is useful for bulk deletion operations." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Delete all documents with a specific metadata value\n", + "await store.adelete(filter={\"source\": \"documentation\"})" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Delete documents matching complex filter criteria\n", + "await store.adelete(\n", + " filter={\"$and\": [{\"category\": \"obsolete\"}, {\"year\": {\"$lt\": 2020}}]}\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Delete by both IDs and filter (must match both criteria)\n", + "await store.adelete(ids=[\"id1\", \"id2\"], filter={\"status\": \"archived\"})" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -496,7 +539,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -959,6 +1002,13 @@ "source": [ "await pg_engine.adrop_table(TABLE_NAME)" ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { @@ -967,7 +1017,8 @@ "toc_visible": true }, "kernelspec": { - "display_name": "Python 3", + "display_name": ".venv", + "language": "python", "name": "python3" }, "language_info": { @@ -980,7 +1031,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.8" + "version": "3.12.11" } }, "nbformat": 4, diff --git a/langchain_postgres/v2/async_vectorstore.py b/langchain_postgres/v2/async_vectorstore.py index a5020c0..89cf83c 100644 --- a/langchain_postgres/v2/async_vectorstore.py +++ b/langchain_postgres/v2/async_vectorstore.py @@ -415,19 +415,60 @@ async def aadd_documents( async def adelete( self, ids: Optional[list] = None, + filter: Optional[dict] = None, **kwargs: Any, ) -> Optional[bool]: """Delete records from the table. + Args: + ids: List of document IDs to delete. + filter: Metadata filter dictionary for bulk deletion. + Supports the same filter syntax as similarity_search. + Note: Filters only work on fields defined in metadata_columns, + not on fields stored in the metadata_json_column. + + Returns: + True if deletion was successful, False if no criteria provided. + Raises: :class:`InvalidTextRepresentationError `: if the `ids` data type does not match that of the `id_column`. + + Examples: + Delete by IDs: + await vectorstore.adelete(ids=["id1", "id2"]) + + Delete by metadata filter (requires metadata_columns): + await vectorstore.adelete(filter={"source": "documentation"}) + await vectorstore.adelete(filter={"$and": [{"category": "obsolete"}, {"year": {"$lt": 2020}}]}) + + Delete by both IDs and filter (must match both criteria): + await vectorstore.adelete(ids=["id1", "id2"], filter={"status": "archived"}) """ - if not ids: + if not ids and not filter: return False - placeholders = ", ".join(f":id_{i}" for i in range(len(ids))) - param_dict = {f"id_{i}": id for i, id in enumerate(ids)} - query = f'DELETE FROM "{self.schema_name}"."{self.table_name}" WHERE {self.id_column} in ({placeholders})' + where_clauses = [] + param_dict = {} + + # Handle ID-based deletion + if ids: + placeholders = ", ".join(f":id_{i}" for i in range(len(ids))) + id_params = {f"id_{i}": id for i, id in enumerate(ids)} + param_dict.update(id_params) + where_clauses.append(f"{self.id_column} in ({placeholders})") + + # Handle filter-based deletion + if filter: + filter_clause, filter_params = self._create_filter_clause(filter) + param_dict.update(filter_params) + where_clauses.append(filter_clause) + + # Combine WHERE clauses with AND if both are present + where_clause = " AND ".join(where_clauses) + query = ( + f'DELETE FROM "{self.schema_name}"."{self.table_name}" WHERE {where_clause}' + ) + async with self.engine.connect() as conn: await conn.execute(text(query), param_dict) await conn.commit() @@ -1337,8 +1378,35 @@ def add_documents( def delete( self, ids: Optional[list] = None, + filter: Optional[dict] = None, **kwargs: Any, ) -> Optional[bool]: + """Delete records from the table. + + Args: + ids: List of document IDs to delete. + filter: Metadata filter dictionary for bulk deletion. + Supports the same filter syntax as similarity_search. + Note: Filters only work on fields defined in metadata_columns, + not on fields stored in the metadata_json_column. + + Returns: + True if deletion was successful, False if no criteria provided. + + Raises: + :class:`InvalidTextRepresentationError `: if the `ids` data type does not match that of the `id_column`. + + Examples: + Delete by IDs: + vectorstore.delete(ids=["id1", "id2"]) + + Delete by metadata filter (requires metadata_columns): + vectorstore.delete(filter={"source": "documentation"}) + vectorstore.delete(filter={"$and": [{"category": "obsolete"}, {"year": {"$lt": 2020}}]}) + + Delete by both IDs and filter (must match both criteria): + vectorstore.delete(ids=["id1", "id2"], filter={"status": "archived"}) + """ raise NotImplementedError( "Sync methods are not implemented for AsyncPGVectorStore. Use PGVectorStore interface instead." ) diff --git a/langchain_postgres/v2/vectorstores.py b/langchain_postgres/v2/vectorstores.py index edbfb57..ad4e7f4 100644 --- a/langchain_postgres/v2/vectorstores.py +++ b/langchain_postgres/v2/vectorstores.py @@ -266,26 +266,74 @@ def add_documents( async def adelete( self, ids: Optional[list] = None, + filter: Optional[dict] = None, **kwargs: Any, ) -> Optional[bool]: """Delete records from the table. + Args: + ids: List of document IDs to delete. + filter: Metadata filter dictionary for bulk deletion. + Supports the same filter syntax as similarity_search. + Note: Filters only work on fields defined in metadata_columns, + not on fields stored in the metadata_json_column. + + Returns: + True if deletion was successful, False if no criteria provided. + Raises: :class:`InvalidTextRepresentationError `: if the `ids` data type does not match that of the `id_column`. + + Examples: + Delete by IDs: + await vectorstore.adelete(ids=["id1", "id2"]) + + Delete by metadata filter (requires metadata_columns): + await vectorstore.adelete(filter={"source": "documentation"}) + await vectorstore.adelete(filter={"$and": [{"category": "obsolete"}, {"year": {"$lt": 2020}}]}) + + Delete by both IDs and filter (must match both criteria): + await vectorstore.adelete(ids=["id1", "id2"], filter={"status": "archived"}) """ - return await self._engine._run_as_async(self.__vs.adelete(ids, **kwargs)) + return await self._engine._run_as_async( + self.__vs.adelete(ids, filter=filter, **kwargs) + ) def delete( self, ids: Optional[list] = None, + filter: Optional[dict] = None, **kwargs: Any, ) -> Optional[bool]: """Delete records from the table. + Args: + ids: List of document IDs to delete. + filter: Metadata filter dictionary for bulk deletion. + Supports the same filter syntax as similarity_search. + Note: Filters only work on fields defined in metadata_columns, + not on fields stored in the metadata_json_column. + + Returns: + True if deletion was successful, False if no criteria provided. + Raises: :class:`InvalidTextRepresentationError `: if the `ids` data type does not match that of the `id_column`. + + Examples: + Delete by IDs: + vectorstore.delete(ids=["id1", "id2"]) + + Delete by metadata filter (requires metadata_columns): + vectorstore.delete(filter={"source": "documentation"}) + vectorstore.delete(filter={"$and": [{"category": "obsolete"}, {"year": {"$lt": 2020}}]}) + + Delete by both IDs and filter (must match both criteria): + vectorstore.delete(ids=["id1", "id2"], filter={"status": "archived"}) """ - return self._engine._run_as_sync(self.__vs.adelete(ids, **kwargs)) + return self._engine._run_as_sync( + self.__vs.adelete(ids, filter=filter, **kwargs) + ) @classmethod async def afrom_texts( # type: ignore[override] diff --git a/tests/unit_tests/v2/test_async_pg_vectorstore.py b/tests/unit_tests/v2/test_async_pg_vectorstore.py index 7159d4d..26616fa 100644 --- a/tests/unit_tests/v2/test_async_pg_vectorstore.py +++ b/tests/unit_tests/v2/test_async_pg_vectorstore.py @@ -165,6 +165,210 @@ async def test_adelete(self, engine: PGEngine, vs: AsyncPGVectorStore) -> None: assert not result await aexecute(engine, f'TRUNCATE TABLE "{DEFAULT_TABLE}"') + async def test_adelete_with_filter(self, engine: PGEngine) -> None: + """Test deletion by metadata filter.""" + # Create a vectorstore with metadata columns for filtering + test_table = "test_delete_filter" + str(uuid.uuid4()) + await engine._ainit_vectorstore_table( + test_table, + VECTOR_SIZE, + metadata_columns=[ + Column("source", "TEXT"), + Column("category", "TEXT"), + ], + store_metadata=False, + ) + vs_filter = await AsyncPGVectorStore.create( + engine, + embedding_service=embeddings_service, + table_name=test_table, + metadata_columns=["source", "category"], + ) + + # Add texts with different metadata + test_metadatas = [ + {"source": "postgres", "category": "docs"}, + {"source": "web", "category": "docs"}, + {"source": "postgres", "category": "blog"}, + ] + ids = [str(uuid.uuid4()) for i in range(len(texts))] + await vs_filter.aadd_texts(texts, metadatas=test_metadatas, ids=ids) + results = await afetch(engine, f'SELECT * FROM "{test_table}"') + assert len(results) == 3 + + # Delete all documents with source="postgres" + await vs_filter.adelete(filter={"source": "postgres"}) + results = await afetch(engine, f'SELECT * FROM "{test_table}"') + assert len(results) == 1 + # The remaining document should have source="web" + assert results[0]["source"] == "web" + await aexecute(engine, f'DROP TABLE "{test_table}"') + + async def test_adelete_with_filter_and_operator(self, engine: PGEngine) -> None: + """Test deletion with filter using operators.""" + # Create a vectorstore with metadata columns for filtering + test_table = "test_delete_operator" + str(uuid.uuid4()) + await engine._ainit_vectorstore_table( + test_table, + VECTOR_SIZE, + metadata_columns=[ + Column("source", "TEXT"), + Column("year", "INTEGER"), + ], + store_metadata=False, + ) + vs_filter = await AsyncPGVectorStore.create( + engine, + embedding_service=embeddings_service, + table_name=test_table, + metadata_columns=["source", "year"], + ) + + # Add texts with different metadata including numeric values + test_metadatas = [ + {"source": "postgres", "year": 2020}, + {"source": "web", "year": 2021}, + {"source": "postgres", "year": 2022}, + ] + ids = [str(uuid.uuid4()) for i in range(len(texts))] + await vs_filter.aadd_texts(texts, metadatas=test_metadatas, ids=ids) + results = await afetch(engine, f'SELECT * FROM "{test_table}"') + assert len(results) == 3 + + # Delete all documents with year < 2022 + await vs_filter.adelete(filter={"year": {"$lt": 2022}}) + results = await afetch(engine, f'SELECT * FROM "{test_table}"') + assert len(results) == 1 + # The remaining document should have year=2022 + assert results[0]["year"] == 2022 + await aexecute(engine, f'DROP TABLE "{test_table}"') + + async def test_adelete_with_complex_filter(self, engine: PGEngine) -> None: + """Test deletion with complex filter using $and.""" + # Create a vectorstore with metadata columns for filtering + test_table = "test_delete_complex" + str(uuid.uuid4()) + await engine._ainit_vectorstore_table( + test_table, + VECTOR_SIZE, + metadata_columns=[ + Column("source", "TEXT"), + Column("category", "TEXT"), + ], + store_metadata=False, + ) + vs_filter = await AsyncPGVectorStore.create( + engine, + embedding_service=embeddings_service, + table_name=test_table, + metadata_columns=["source", "category"], + ) + + # Add texts with different metadata + test_metadatas = [ + {"source": "postgres", "category": "obsolete"}, + {"source": "web", "category": "obsolete"}, + {"source": "postgres", "category": "current"}, + ] + ids = [str(uuid.uuid4()) for i in range(len(texts))] + await vs_filter.aadd_texts(texts, metadatas=test_metadatas, ids=ids) + results = await afetch(engine, f'SELECT * FROM "{test_table}"') + assert len(results) == 3 + + # Delete documents with source="postgres" AND category="obsolete" + await vs_filter.adelete( + filter={"$and": [{"source": "postgres"}, {"category": "obsolete"}]} + ) + results = await afetch(engine, f'SELECT * FROM "{test_table}"') + assert len(results) == 2 + # Should have removed only the first document + remaining_categories = [result["category"] for result in results] + assert "obsolete" in remaining_categories # web/obsolete still exists + assert "current" in remaining_categories # postgres/current still exists + await aexecute(engine, f'DROP TABLE "{test_table}"') + + async def test_adelete_with_filter_and_ids(self, engine: PGEngine) -> None: + """Test deletion with both IDs and filter (must match both).""" + # Create a vectorstore with metadata columns for filtering + test_table = "test_delete_ids_filter" + str(uuid.uuid4()) + await engine._ainit_vectorstore_table( + test_table, + VECTOR_SIZE, + metadata_columns=[ + Column("source", "TEXT"), + ], + store_metadata=False, + ) + vs_filter = await AsyncPGVectorStore.create( + engine, + embedding_service=embeddings_service, + table_name=test_table, + metadata_columns=["source"], + ) + + # Add texts with different metadata + test_metadatas = [ + {"source": "postgres"}, + {"source": "web"}, + {"source": "postgres"}, + ] + ids = [str(uuid.uuid4()) for i in range(len(texts))] + await vs_filter.aadd_texts(texts, metadatas=test_metadatas, ids=ids) + results = await afetch(engine, f'SELECT * FROM "{test_table}"') + assert len(results) == 3 + + # Try to delete ids[0] and ids[2] but only where source="web" + # This should delete nothing since ids[0] and ids[2] have source="postgres" + # With AND logic, it means id IN (ids) AND source="web" + # So this should only delete if the id is in the list AND source is web + # Since ids[0] and ids[2] are postgres, and ids[1] is web but not in the list, + # nothing should be deleted + await vs_filter.adelete(ids=[ids[0], ids[2]], filter={"source": "web"}) + results = await afetch(engine, f'SELECT * FROM "{test_table}"') + assert len(results) == 3 # Nothing deleted + + # Now delete ids[0] and ids[1] where source="web" + # This should delete only ids[1] (which has source="web") + await vs_filter.adelete(ids=[ids[0], ids[1]], filter={"source": "web"}) + results = await afetch(engine, f'SELECT * FROM "{test_table}"') + assert len(results) == 2 + remaining_ids = [str(result["langchain_id"]) for result in results] + assert ids[1] not in remaining_ids # ids[1] was deleted + assert ids[0] in remaining_ids # ids[0] not deleted (wrong source) + assert ids[2] in remaining_ids # ids[2] not deleted (not in id list) + await aexecute(engine, f'DROP TABLE "{test_table}"') + + async def test_adelete_with_filter_no_matches(self, engine: PGEngine) -> None: + """Test deletion with filter that matches no documents.""" + # Create a vectorstore with metadata columns for filtering + test_table = "test_delete_nomatch" + str(uuid.uuid4()) + await engine._ainit_vectorstore_table( + test_table, + VECTOR_SIZE, + metadata_columns=[ + Column("source", "TEXT"), + ], + store_metadata=False, + ) + vs_filter = await AsyncPGVectorStore.create( + engine, + embedding_service=embeddings_service, + table_name=test_table, + metadata_columns=["source"], + ) + + # Add texts + test_metadatas = [{"source": "postgres"} for _ in range(len(texts))] + ids = [str(uuid.uuid4()) for i in range(len(texts))] + await vs_filter.aadd_texts(texts, metadatas=test_metadatas, ids=ids) + results = await afetch(engine, f'SELECT * FROM "{test_table}"') + assert len(results) == 3 + + # Try to delete with a filter that matches nothing + await vs_filter.adelete(filter={"source": "nonexistent"}) + results = await afetch(engine, f'SELECT * FROM "{test_table}"') + assert len(results) == 3 # Nothing deleted + await aexecute(engine, f'DROP TABLE "{test_table}"') + ##### Custom Vector Store ##### async def test_aadd_embeddings( self, engine: PGEngine, vs_custom: AsyncPGVectorStore diff --git a/tests/unit_tests/v2/test_pg_vectorstore.py b/tests/unit_tests/v2/test_pg_vectorstore.py index d426b4e..5d76d22 100644 --- a/tests/unit_tests/v2/test_pg_vectorstore.py +++ b/tests/unit_tests/v2/test_pg_vectorstore.py @@ -215,6 +215,45 @@ async def test_adelete(self, engine: PGEngine, vs: PGVectorStore) -> None: assert len(results) == 2 await aexecute(engine, f'TRUNCATE TABLE "{DEFAULT_TABLE}"') + async def test_adelete_with_filter(self, engine: PGEngine) -> None: + """Test async deletion by metadata filter in sync wrapper.""" + # Create a vectorstore with metadata columns for filtering + test_table = "test_delete_filter_sync" + str(uuid.uuid4()) + await engine._ainit_vectorstore_table( + test_table, + VECTOR_SIZE, + metadata_columns=[ + Column("source", "TEXT"), + Column("category", "TEXT"), + ], + store_metadata=False, + ) + vs_filter = await PGVectorStore.create( + engine, + embedding_service=embeddings_service, + table_name=test_table, + metadata_columns=["source", "category"], + ) + + # Add texts with different metadata + test_metadatas = [ + {"source": "postgres", "category": "docs"}, + {"source": "web", "category": "docs"}, + {"source": "postgres", "category": "blog"}, + ] + ids = [str(uuid.uuid4()) for i in range(len(texts))] + await vs_filter.aadd_texts(texts, metadatas=test_metadatas, ids=ids) + results = await afetch(engine, f'SELECT * FROM "{test_table}"') + assert len(results) == 3 + + # Delete all documents with source="postgres" + await vs_filter.adelete(filter={"source": "postgres"}) + results = await afetch(engine, f'SELECT * FROM "{test_table}"') + assert len(results) == 1 + # The remaining document should have source="web" + assert results[0]["source"] == "web" + await aexecute(engine, f'DROP TABLE "{test_table}"') + async def test_aadd_texts_custom( self, engine: PGEngine, vs_custom: PGVectorStore ) -> None: @@ -291,6 +330,45 @@ async def test_add_texts( await vs_sync.adelete(ids) await aexecute(engine_sync, f'TRUNCATE TABLE "{DEFAULT_TABLE_SYNC}"') + async def test_delete_with_filter(self, engine_sync: PGEngine) -> None: + """Test sync deletion by metadata filter.""" + # Create a vectorstore with metadata columns for filtering + test_table = "test_delete_filter_sync2" + str(uuid.uuid4()) + await engine_sync._ainit_vectorstore_table( + test_table, + VECTOR_SIZE, + metadata_columns=[ + Column("source", "TEXT"), + Column("category", "TEXT"), + ], + store_metadata=False, + ) + vs_filter = await PGVectorStore.create( + engine_sync, + embedding_service=embeddings_service, + table_name=test_table, + metadata_columns=["source", "category"], + ) + + # Add texts with different metadata + test_metadatas = [ + {"source": "postgres", "category": "docs"}, + {"source": "web", "category": "docs"}, + {"source": "postgres", "category": "blog"}, + ] + ids = [str(uuid.uuid4()) for i in range(len(texts))] + vs_filter.add_texts(texts, metadatas=test_metadatas, ids=ids) + results = await afetch(engine_sync, f'SELECT * FROM "{test_table}"') + assert len(results) == 3 + + # Delete all documents with source="postgres" using sync method + vs_filter.delete(filter={"source": "postgres"}) + results = await afetch(engine_sync, f'SELECT * FROM "{test_table}"') + assert len(results) == 1 + # The remaining document should have source="web" + assert results[0]["source"] == "web" + await aexecute(engine_sync, f'DROP TABLE "{test_table}"') + async def test_cross_env( self, engine_sync: PGEngine, vs_sync: PGVectorStore ) -> None: diff --git a/uv.lock b/uv.lock index 933849d..ac9293f 100644 --- a/uv.lock +++ b/uv.lock @@ -478,6 +478,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7f/91/ae2eb6b7979e2f9b035a9f612cf70f1bf54aad4e1d125129bef1eae96f19/greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d", size = 584358, upload-time = "2025-08-07T13:18:23.708Z" }, { url = "https://files.pythonhosted.org/packages/f7/85/433de0c9c0252b22b16d413c9407e6cb3b41df7389afc366ca204dbc1393/greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5", size = 1113550, upload-time = "2025-08-07T13:42:37.467Z" }, { url = "https://files.pythonhosted.org/packages/a1/8d/88f3ebd2bc96bf7747093696f4335a0a8a4c5acfcf1b757717c0d2474ba3/greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f", size = 1137126, upload-time = "2025-08-07T13:18:20.239Z" }, + { url = "https://files.pythonhosted.org/packages/f1/29/74242b7d72385e29bcc5563fba67dad94943d7cd03552bac320d597f29b2/greenlet-3.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f47617f698838ba98f4ff4189aef02e7343952df3a615f847bb575c3feb177a7", size = 1544904, upload-time = "2025-11-04T12:42:04.763Z" }, + { url = "https://files.pythonhosted.org/packages/c8/e2/1572b8eeab0f77df5f6729d6ab6b141e4a84ee8eb9bc8c1e7918f94eda6d/greenlet-3.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af41be48a4f60429d5cad9d22175217805098a9ef7c40bfef44f7669fb9d74d8", size = 1611228, upload-time = "2025-11-04T12:42:08.423Z" }, { url = "https://files.pythonhosted.org/packages/d6/6f/b60b0291d9623c496638c582297ead61f43c4b72eef5e9c926ef4565ec13/greenlet-3.2.4-cp310-cp310-win_amd64.whl", hash = "sha256:73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c", size = 298654, upload-time = "2025-08-07T13:50:00.469Z" }, { url = "https://files.pythonhosted.org/packages/a4/de/f28ced0a67749cac23fecb02b694f6473f47686dff6afaa211d186e2ef9c/greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2", size = 272305, upload-time = "2025-08-07T13:15:41.288Z" }, { url = "https://files.pythonhosted.org/packages/09/16/2c3792cba130000bf2a31c5272999113f4764fd9d874fb257ff588ac779a/greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246", size = 632472, upload-time = "2025-08-07T13:42:55.044Z" }, @@ -487,6 +489,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8", size = 587684, upload-time = "2025-08-07T13:18:25.164Z" }, { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52", size = 1116647, upload-time = "2025-08-07T13:42:38.655Z" }, { url = "https://files.pythonhosted.org/packages/3f/cc/b07000438a29ac5cfb2194bfc128151d52f333cee74dd7dfe3fb733fc16c/greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa", size = 1142073, upload-time = "2025-08-07T13:18:21.737Z" }, + { url = "https://files.pythonhosted.org/packages/67/24/28a5b2fa42d12b3d7e5614145f0bd89714c34c08be6aabe39c14dd52db34/greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c", size = 1548385, upload-time = "2025-11-04T12:42:11.067Z" }, + { url = "https://files.pythonhosted.org/packages/6a/05/03f2f0bdd0b0ff9a4f7b99333d57b53a7709c27723ec8123056b084e69cd/greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5", size = 1613329, upload-time = "2025-11-04T12:42:12.928Z" }, { url = "https://files.pythonhosted.org/packages/d8/0f/30aef242fcab550b0b3520b8e3561156857c94288f0332a79928c31a52cf/greenlet-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9", size = 299100, upload-time = "2025-08-07T13:44:12.287Z" }, { url = "https://files.pythonhosted.org/packages/44/69/9b804adb5fd0671f367781560eb5eb586c4d495277c93bde4307b9e28068/greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd", size = 274079, upload-time = "2025-08-07T13:15:45.033Z" }, { url = "https://files.pythonhosted.org/packages/46/e9/d2a80c99f19a153eff70bc451ab78615583b8dac0754cfb942223d2c1a0d/greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb", size = 640997, upload-time = "2025-08-07T13:42:56.234Z" }, @@ -496,6 +500,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0", size = 607586, upload-time = "2025-08-07T13:18:28.544Z" }, { url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0", size = 1123281, upload-time = "2025-08-07T13:42:39.858Z" }, { url = "https://files.pythonhosted.org/packages/3f/c7/12381b18e21aef2c6bd3a636da1088b888b97b7a0362fac2e4de92405f97/greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f", size = 1151142, upload-time = "2025-08-07T13:18:22.981Z" }, + { url = "https://files.pythonhosted.org/packages/27/45/80935968b53cfd3f33cf99ea5f08227f2646e044568c9b1555b58ffd61c2/greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0", size = 1564846, upload-time = "2025-11-04T12:42:15.191Z" }, + { url = "https://files.pythonhosted.org/packages/69/02/b7c30e5e04752cb4db6202a3858b149c0710e5453b71a3b2aec5d78a1aab/greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d", size = 1633814, upload-time = "2025-11-04T12:42:17.175Z" }, { url = "https://files.pythonhosted.org/packages/e9/08/b0814846b79399e585f974bbeebf5580fbe59e258ea7be64d9dfb253c84f/greenlet-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02", size = 299899, upload-time = "2025-08-07T13:38:53.448Z" }, { url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31", size = 272814, upload-time = "2025-08-07T13:15:50.011Z" }, { url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945", size = 641073, upload-time = "2025-08-07T13:42:57.23Z" }, @@ -505,6 +511,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671", size = 610497, upload-time = "2025-08-07T13:18:31.636Z" }, { url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b", size = 1121662, upload-time = "2025-08-07T13:42:41.117Z" }, { url = "https://files.pythonhosted.org/packages/a2/15/0d5e4e1a66fab130d98168fe984c509249c833c1a3c16806b90f253ce7b9/greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae", size = 1149210, upload-time = "2025-08-07T13:18:24.072Z" }, + { url = "https://files.pythonhosted.org/packages/1c/53/f9c440463b3057485b8594d7a638bed53ba531165ef0ca0e6c364b5cc807/greenlet-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e343822feb58ac4d0a1211bd9399de2b3a04963ddeec21530fc426cc121f19b", size = 1564759, upload-time = "2025-11-04T12:42:19.395Z" }, + { url = "https://files.pythonhosted.org/packages/47/e4/3bb4240abdd0a8d23f4f88adec746a3099f0d86bfedb623f063b2e3b4df0/greenlet-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca7f6f1f2649b89ce02f6f229d7c19f680a6238af656f61e0115b24857917929", size = 1634288, upload-time = "2025-11-04T12:42:21.174Z" }, { url = "https://files.pythonhosted.org/packages/0b/55/2321e43595e6801e105fcfdee02b34c0f996eb71e6ddffca6b10b7e1d771/greenlet-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b", size = 299685, upload-time = "2025-08-07T13:24:38.824Z" }, { url = "https://files.pythonhosted.org/packages/22/5c/85273fd7cc388285632b0498dbbab97596e04b154933dfe0f3e68156c68c/greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0", size = 273586, upload-time = "2025-08-07T13:16:08.004Z" }, { url = "https://files.pythonhosted.org/packages/d1/75/10aeeaa3da9332c2e761e4c50d4c3556c21113ee3f0afa2cf5769946f7a3/greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f", size = 686346, upload-time = "2025-08-07T13:42:59.944Z" }, @@ -512,6 +520,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/dc/8b/29aae55436521f1d6f8ff4e12fb676f3400de7fcf27fccd1d4d17fd8fecd/greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1", size = 694659, upload-time = "2025-08-07T13:53:17.759Z" }, { url = "https://files.pythonhosted.org/packages/92/2e/ea25914b1ebfde93b6fc4ff46d6864564fba59024e928bdc7de475affc25/greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735", size = 695355, upload-time = "2025-08-07T13:18:34.517Z" }, { url = "https://files.pythonhosted.org/packages/72/60/fc56c62046ec17f6b0d3060564562c64c862948c9d4bc8aa807cf5bd74f4/greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337", size = 657512, upload-time = "2025-08-07T13:18:33.969Z" }, + { url = "https://files.pythonhosted.org/packages/23/6e/74407aed965a4ab6ddd93a7ded3180b730d281c77b765788419484cdfeef/greenlet-3.2.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2917bdf657f5859fbf3386b12d68ede4cf1f04c90c3a6bc1f013dd68a22e2269", size = 1612508, upload-time = "2025-11-04T12:42:23.427Z" }, + { url = "https://files.pythonhosted.org/packages/0d/da/343cd760ab2f92bac1845ca07ee3faea9fe52bee65f7bcb19f16ad7de08b/greenlet-3.2.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:015d48959d4add5d6c9f6c5210ee3803a830dce46356e3bc326d6776bde54681", size = 1680760, upload-time = "2025-11-04T12:42:25.341Z" }, { url = "https://files.pythonhosted.org/packages/e3/a5/6ddab2b4c112be95601c13428db1d8b6608a8b6039816f2ba09c346c08fc/greenlet-3.2.4-cp314-cp314-win_amd64.whl", hash = "sha256:e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01", size = 303425, upload-time = "2025-08-07T13:32:27.59Z" }, { url = "https://files.pythonhosted.org/packages/f7/c0/93885c4106d2626bf51fdec377d6aef740dfa5c4877461889a7cf8e565cc/greenlet-3.2.4-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:b6a7c19cf0d2742d0809a4c05975db036fdff50cd294a93632d6a310bf9ac02c", size = 269859, upload-time = "2025-08-07T13:16:16.003Z" }, { url = "https://files.pythonhosted.org/packages/4d/f5/33f05dc3ba10a02dedb1485870cf81c109227d3d3aa280f0e48486cac248/greenlet-3.2.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:27890167f55d2387576d1f41d9487ef171849ea0359ce1510ca6e06c8bece11d", size = 627610, upload-time = "2025-08-07T13:43:01.345Z" }, @@ -521,6 +531,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6b/4c/f3de2a8de0e840ecb0253ad0dc7e2bb3747348e798ec7e397d783a3cb380/greenlet-3.2.4-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9913f1a30e4526f432991f89ae263459b1c64d1608c0d22a5c79c287b3c70df", size = 582817, upload-time = "2025-08-07T13:18:35.48Z" }, { url = "https://files.pythonhosted.org/packages/89/80/7332915adc766035c8980b161c2e5d50b2f941f453af232c164cff5e0aeb/greenlet-3.2.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b90654e092f928f110e0007f572007c9727b5265f7632c2fa7415b4689351594", size = 1111985, upload-time = "2025-08-07T13:42:42.425Z" }, { url = "https://files.pythonhosted.org/packages/66/71/1928e2c80197353bcb9b50aa19c4d8e26ee6d7a900c564907665cf4b9a41/greenlet-3.2.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:81701fd84f26330f0d5f4944d4e92e61afe6319dcd9775e39396e39d7c3e5f98", size = 1136137, upload-time = "2025-08-07T13:18:26.168Z" }, + { url = "https://files.pythonhosted.org/packages/4b/bf/7bd33643e48ed45dcc0e22572f650767832bd4e1287f97434943cc402148/greenlet-3.2.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:28a3c6b7cd72a96f61b0e4b2a36f681025b60ae4779cc73c1535eb5f29560b10", size = 1542941, upload-time = "2025-11-04T12:42:27.427Z" }, + { url = "https://files.pythonhosted.org/packages/9b/74/4bc433f91d0d09a1c22954a371f9df928cb85e72640870158853a83415e5/greenlet-3.2.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:52206cd642670b0b320a1fd1cbfd95bca0e043179c1d8a045f2c6109dfe973be", size = 1609685, upload-time = "2025-11-04T12:42:29.242Z" }, { url = "https://files.pythonhosted.org/packages/89/48/a5dc74dde38aeb2b15d418cec76ed50e1dd3d620ccda84d8199703248968/greenlet-3.2.4-cp39-cp39-win32.whl", hash = "sha256:65458b409c1ed459ea899e939f0e1cdb14f58dbc803f2f93c5eab5694d32671b", size = 281400, upload-time = "2025-08-07T14:02:20.263Z" }, { url = "https://files.pythonhosted.org/packages/e5/44/342c4591db50db1076b8bda86ed0ad59240e3e1da17806a4cf10a6d0e447/greenlet-3.2.4-cp39-cp39-win_amd64.whl", hash = "sha256:d2e685ade4dafd447ede19c31277a224a239a0a1a4eca4e6390efedf20260cfb", size = 298533, upload-time = "2025-08-07T13:56:34.168Z" }, ] @@ -621,7 +633,7 @@ wheels = [ [[package]] name = "langchain-postgres" -version = "0.0.15" +version = "0.0.16" source = { editable = "." } dependencies = [ { name = "asyncpg" },