@@ -183,7 +183,7 @@ def test_vector_db_insert_from_url_and_query(
183
183
assert any ("llama2" in chunk .content .lower () for chunk in response2 .chunks )
184
184
185
185
186
- def test_rag_tool_openai_api_integration (client_with_empty_registry , embedding_model_id , embedding_dimension ):
186
+ def test_rag_tool_openai_apis (client_with_empty_registry , embedding_model_id , embedding_dimension ):
187
187
vector_db_id = "test_openai_vector_db"
188
188
189
189
client_with_empty_registry .vector_dbs .register (
@@ -221,6 +221,16 @@ def test_rag_tool_openai_api_integration(client_with_empty_registry, embedding_m
221
221
chunk_size_in_tokens = 256 ,
222
222
)
223
223
224
+ files_list = client_with_empty_registry .files .list ()
225
+ assert len (files_list .data ) >= len (documents ), (
226
+ f"Expected at least { len (documents )} files, got { len (files_list .data )} "
227
+ )
228
+
229
+ vector_store_files = client_with_empty_registry .vector_io .openai_list_files_in_vector_store (
230
+ vector_store_id = actual_vector_db_id
231
+ )
232
+ assert len (vector_store_files .data ) >= len (documents ), f"Expected at least { len (documents )} files in vector store"
233
+
224
234
response = client_with_empty_registry .tool_runtime .rag_tool .query (
225
235
vector_db_ids = [actual_vector_db_id ],
226
236
content = "Tell me about machine learning and deep learning" ,
@@ -231,8 +241,8 @@ def test_rag_tool_openai_api_integration(client_with_empty_registry, embedding_m
231
241
assert "machine learning" in content_text or "deep learning" in content_text
232
242
233
243
234
- def test_rag_tool_error_resilience (client_with_empty_registry , embedding_model_id , embedding_dimension ):
235
- vector_db_id = "test_error_resilience "
244
+ def test_rag_tool_exception_handling (client_with_empty_registry , embedding_model_id , embedding_dimension ):
245
+ vector_db_id = "test_exception_handling "
236
246
237
247
client_with_empty_registry .vector_dbs .register (
238
248
vector_db_id = vector_db_id ,
0 commit comments