Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions .coveragerc
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
[run]
branch = true
omit =
*/__init__.py

[report]
show_missing = true
fail_under = 90
2 changes: 1 addition & 1 deletion integration.cloudbuild.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ steps:
- "-c"
- |
/workspace/cloud-sql-proxy ${_INSTANCE_CONNECTION_NAME} --port $_DATABASE_PORT & sleep 2;
python -m pytest tests/
python -m pytest --cov=llama_index_cloud_sql_pg --cov-config=.coveragerc tests/

availableSecrets:
secretManager:
Expand Down
82 changes: 79 additions & 3 deletions tests/test_async_document_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@

default_table_name_async = "document_store_" + str(uuid.uuid4())
custom_table_name_async = "document_store_" + str(uuid.uuid4())
sync_method_exception_str = "Sync methods are not implemented for AsyncPostgresDocumentStore. Use PostgresDocumentStore interface instead."


async def aexecute(engine: PostgresEngine, query: str) -> None:
Expand Down Expand Up @@ -116,9 +117,16 @@ async def custom_doc_store(self, async_engine):
await aexecute(async_engine, query)

async def test_init_with_constructor(self, async_engine):
key = object()
with pytest.raises(Exception):
AsyncPostgresDocumentStore(
engine=async_engine, table_name=default_table_name_async
key, engine=async_engine, table_name=default_table_name_async
)

async def test_create_without_table(self, async_engine):
with pytest.raises(ValueError):
await AsyncPostgresDocumentStore.create(
engine=async_engine, table_name="non-existent-table"
)

async def test_warning(self, custom_doc_store):
Expand Down Expand Up @@ -178,7 +186,7 @@ async def test_add_hash_before_data(self, async_engine, doc_store):
result = results[0]
assert result["node_data"][DATA_KEY]["text_resource"]["text"] == document_text

async def test_ref_doc_exists(self, doc_store):
async def test_aref_doc_exists(self, doc_store):
# Create a ref_doc & a doc and add them to the store.
ref_doc = Document(
text="first doc", id_="doc_exists_doc_1", metadata={"doc": "info"}
Expand Down Expand Up @@ -235,6 +243,8 @@ async def test_adelete_ref_doc(self, doc_store):
assert (
await doc_store.aget_document(doc_id=doc.doc_id, raise_error=False) is None
)
# Confirm deleting an non-existent reference doc returns None.
assert await doc_store.adelete_ref_doc(ref_doc_id=ref_doc.doc_id) is None

async def test_set_and_get_document_hash(self, doc_store):
# Set a doc hash for a document
Expand All @@ -245,6 +255,9 @@ async def test_set_and_get_document_hash(self, doc_store):
# Assert with get that the hash is same as the one set.
assert await doc_store.aget_document_hash(doc_id=doc_id) == doc_hash

async def test_aget_document_hash(self, doc_store):
assert await doc_store.aget_document_hash(doc_id="non-existent-doc") is None

async def test_set_and_get_document_hashes(self, doc_store):
# Create a dictionary of doc_id -> doc_hash mappings and add it to the table.
document_dict = {
Expand Down Expand Up @@ -279,7 +292,7 @@ async def test_doc_store_basic(self, doc_store):
retrieved_node = await doc_store.aget_document(doc_id=node.node_id)
assert retrieved_node == node

async def test_delete_document(self, async_engine, doc_store):
async def test_adelete_document(self, async_engine, doc_store):
# Create a doc and add it to the store.
doc = Document(text="document_2", id_="doc_id_2", metadata={"doc": "info"})
await doc_store.async_add_documents([doc])
Expand All @@ -292,6 +305,11 @@ async def test_delete_document(self, async_engine, doc_store):
result = await afetch(async_engine, query)
assert len(result) == 0

async def test_delete_non_existent_document(self, doc_store):
await doc_store.adelete_document(doc_id="non-existent-doc", raise_error=False)
with pytest.raises(ValueError):
await doc_store.adelete_document(doc_id="non-existent-doc")

async def test_doc_store_ref_doc_not_added(self, async_engine, doc_store):
# Create a ref_doc & doc.
ref_doc = Document(
Expand Down Expand Up @@ -367,3 +385,61 @@ async def test_doc_store_delete_all_ref_doc_nodes(self, async_engine, doc_store)
query = f"""select * from "public"."{default_table_name_async}" where id = '{ref_doc.doc_id}';"""
result = await afetch(async_engine, query)
assert len(result) == 0

async def test_docs(self, doc_store):
with pytest.raises(Exception, match=sync_method_exception_str):
doc_store.docs()

async def test_add_documents(self, doc_store):
with pytest.raises(Exception, match=sync_method_exception_str):
doc_store.add_documents([])

async def test_get_document(self, doc_store):
with pytest.raises(Exception, match=sync_method_exception_str):
doc_store.get_document("test_doc_id", raise_error=True)
with pytest.raises(Exception, match=sync_method_exception_str):
doc_store.get_document("test_doc_id", raise_error=False)

async def test_delete_document(self, doc_store):
with pytest.raises(Exception, match=sync_method_exception_str):
doc_store.delete_document("test_doc_id", raise_error=True)
with pytest.raises(Exception, match=sync_method_exception_str):
doc_store.delete_document("test_doc_id", raise_error=False)

async def test_document_exists(self, doc_store):
with pytest.raises(Exception, match=sync_method_exception_str):
doc_store.document_exists("test_doc_id")

async def test_ref_doc_exists(self, doc_store):
with pytest.raises(Exception, match=sync_method_exception_str):
doc_store.ref_doc_exists(ref_doc_id="test_ref_doc_id")

async def test_set_document_hash(self, doc_store):
with pytest.raises(Exception, match=sync_method_exception_str):
doc_store.set_document_hash("test_doc_id", "test_doc_hash")

async def test_set_document_hashes(self, doc_store):
with pytest.raises(Exception, match=sync_method_exception_str):
doc_store.set_document_hashes({"test_doc_id": "test_doc_hash"})

async def test_get_document_hash(self, doc_store):
with pytest.raises(Exception, match=sync_method_exception_str):
doc_store.get_document_hash(doc_id="test_doc_id")

async def test_get_all_document_hashes(self, doc_store):
with pytest.raises(Exception, match=sync_method_exception_str):
doc_store.get_all_document_hashes()

async def test_get_all_ref_doc_info(self, doc_store):
with pytest.raises(Exception, match=sync_method_exception_str):
doc_store.get_all_ref_doc_info()

async def test_get_ref_doc_info(self, doc_store):
with pytest.raises(Exception, match=sync_method_exception_str):
doc_store.get_ref_doc_info(ref_doc_id="test_doc_id")

async def test_delete_ref_doc(self, doc_store):
with pytest.raises(Exception, match=sync_method_exception_str):
doc_store.delete_ref_doc(ref_doc_id="test_doc_id", raise_error=False)
with pytest.raises(Exception, match=sync_method_exception_str):
doc_store.delete_ref_doc(ref_doc_id="test_doc_id", raise_error=True)
27 changes: 26 additions & 1 deletion tests/test_async_index_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
from llama_index_cloud_sql_pg.async_index_store import AsyncPostgresIndexStore

default_table_name_async = "index_store_" + str(uuid.uuid4())
sync_method_exception_str = "Sync methods are not implemented for AsyncPostgresIndexStore . Use PostgresIndexStore interface instead."


async def aexecute(engine: PostgresEngine, query: str) -> None:
Expand Down Expand Up @@ -102,9 +103,16 @@ async def index_store(self, async_engine):
await aexecute(async_engine, query)

async def test_init_with_constructor(self, async_engine):
key = object()
with pytest.raises(Exception):
AsyncPostgresIndexStore(
engine=async_engine, table_name=default_table_name_async
key, engine=async_engine, table_name=default_table_name_async
)

async def test_create_without_table(self, async_engine):
with pytest.raises(ValueError):
await AsyncPostgresIndexStore.create(
engine=async_engine, table_name="non-existent-table"
)

async def test_add_and_delete_index(self, index_store, async_engine):
Expand Down Expand Up @@ -162,3 +170,20 @@ async def test_warning(self, index_store):
assert "No struct_id specified and more than one struct exists." in str(
w[-1].message
)

async def test_index_structs(self, index_store):
with pytest.raises(Exception, match=sync_method_exception_str):
index_store.index_structs()

async def test_add_index_struct(self, index_store):
index_struct = IndexGraph()
with pytest.raises(Exception, match=sync_method_exception_str):
index_store.add_index_struct(index_struct)

async def test_delete_index_struct(self, index_store):
with pytest.raises(Exception, match=sync_method_exception_str):
index_store.delete_index_struct("non_existent_key")

async def test_get_index_struct(self, index_store):
with pytest.raises(Exception, match=sync_method_exception_str):
index_store.get_index_struct(struct_id="non_existent_id")
72 changes: 69 additions & 3 deletions tests/test_async_vector_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

import os
import uuid
import warnings
from typing import Sequence

import pytest
Expand Down Expand Up @@ -109,8 +110,8 @@ async def engine(self, db_project, db_region, db_instance, db_name):
)

yield engine
await aexecute(engine, f'DROP TABLE "{DEFAULT_TABLE}"')
await aexecute(engine, f'DROP TABLE "{DEFAULT_TABLE_CUSTOM_VS}"')
await aexecute(engine, f'DROP TABLE IF EXISTS "{DEFAULT_TABLE}"')
await aexecute(engine, f'DROP TABLE IF EXISTS "{DEFAULT_TABLE_CUSTOM_VS}"')
await engine.close()
Copy link
Collaborator

@averikitsch averikitsch Jan 6, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: we should also close the connector like await engine._connector.close_async().


@pytest_asyncio.fixture(scope="class")
Expand Down Expand Up @@ -153,8 +154,9 @@ async def custom_vs(self, engine):
yield vs

async def test_init_with_constructor(self, engine):
key = object()
with pytest.raises(Exception):
AsyncPostgresVectorStore(engine, table_name=DEFAULT_TABLE)
AsyncPostgresVectorStore(key, engine, table_name=DEFAULT_TABLE)

async def test_validate_id_column_create(self, engine, vs):
test_id_column = "test_id_column"
Expand Down Expand Up @@ -313,6 +315,70 @@ async def test_aquery(self, engine, vs):
assert len(results.nodes) == 3
assert results.nodes[0].get_content(metadata_mode=MetadataMode.NONE) == "foo"

async def test_aquery_filters(self, engine, custom_vs):
# Note: To be migrated to a pytest dependency on test_async_add
# Blocked due to unexpected fixtures reloads while running integration test suite
await aexecute(engine, f'TRUNCATE TABLE "{DEFAULT_TABLE_CUSTOM_VS}"')
# setting extra metadata to be indexed in separate column
for node in nodes:
node.metadata["len"] = len(node.text)

await custom_vs.async_add(nodes)

filters = MetadataFilters(
filters=[
MetadataFilter(
key="some_test_column",
value=["value_should_be_ignored"],
operator=FilterOperator.CONTAINS,
),
MetadataFilter(
key="len",
value=3,
operator=FilterOperator.LTE,
),
MetadataFilter(
key="len",
value=3,
operator=FilterOperator.GTE,
),
MetadataFilter(
key="len",
value=2,
operator=FilterOperator.GT,
),
MetadataFilter(
key="len",
value=4,
operator=FilterOperator.LT,
),
MetadataFilters(
filters=[
MetadataFilter(
key="len",
value=6.0,
operator=FilterOperator.NE,
),
],
condition=FilterCondition.OR,
),
],
condition=FilterCondition.AND,
)
query = VectorStoreQuery(
query_embedding=[1.0] * VECTOR_SIZE, filters=filters, similarity_top_k=-1
)
with warnings.catch_warnings(record=True) as w:
results = await custom_vs.aquery(query)

assert len(w) == 1
assert "Expecting a scalar in the filter value" in str(w[-1].message)

assert results.nodes is not None
assert results.ids is not None
assert results.similarities is not None
assert len(results.nodes) == 3

async def test_aclear(self, engine, vs):
# Note: To be migrated to a pytest dependency on test_adelete
# Blocked due to unexpected fixtures reloads while running integration test suite
Expand Down
3 changes: 2 additions & 1 deletion tests/test_document_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,9 +117,10 @@ async def doc_store(self, async_engine):
await aexecute(async_engine, query)

async def test_init_with_constructor(self, async_engine):
key = object()
with pytest.raises(Exception):
PostgresDocumentStore(
engine=async_engine, table_name=default_table_name_async
key, engine=async_engine, table_name=default_table_name_async
)

async def test_async_add_document(self, async_engine, doc_store):
Expand Down
Loading
Loading