|
3 | 3 | from typing import TYPE_CHECKING, Any, Optional
|
4 | 4 |
|
5 | 5 | from llama_cloud import PipelineType
|
6 |
| -from pydantic import BaseModel, Field, field_validator |
7 |
| - |
8 | 6 | from llama_index.core.callbacks import CallbackManager
|
9 | 7 | from llama_index.core.ingestion.api_utils import (
|
10 | 8 | get_client as llama_cloud_get_client,
|
11 | 9 | )
|
12 | 10 | from llama_index.core.settings import Settings
|
13 | 11 | from llama_index.indices.managed.llama_cloud import LlamaCloudIndex
|
| 12 | +from llama_index.server.api.models import ChatRequest |
| 13 | +from pydantic import BaseModel, Field, field_validator |
14 | 14 |
|
15 | 15 | if TYPE_CHECKING:
|
16 | 16 | from llama_cloud.client import LlamaCloud
|
@@ -87,13 +87,26 @@ def to_index_kwargs(self) -> dict:
|
87 | 87 | "callback_manager": self.callback_manager,
|
88 | 88 | }
|
89 | 89 |
|
| 90 | + @classmethod |
| 91 | + def from_chat_request(cls, chat_request: ChatRequest) -> "IndexConfig": |
| 92 | + default_config = cls() |
| 93 | + if chat_request is not None: |
| 94 | + llamacloud_config = chat_request.data["llamaCloudPipeline"] |
| 95 | + if llamacloud_config is not None: |
| 96 | + default_config.llama_cloud_pipeline_config.pipeline = llamacloud_config[ |
| 97 | + "pipeline" |
| 98 | + ] |
| 99 | + default_config.llama_cloud_pipeline_config.project = llamacloud_config[ |
| 100 | + "project" |
| 101 | + ] |
| 102 | + return default_config |
| 103 | + |
90 | 104 |
|
91 | 105 | def get_index(
|
92 |
| - config: Optional[IndexConfig] = None, |
| 106 | + chat_request: Optional[ChatRequest] = None, |
93 | 107 | create_if_missing: bool = False,
|
94 | 108 | ) -> Optional[LlamaCloudIndex]:
|
95 |
| - if config is None: |
96 |
| - config = IndexConfig() |
| 109 | + config = IndexConfig.from_chat_request(chat_request) |
97 | 110 | # Check whether the index exists
|
98 | 111 | try:
|
99 | 112 | index = LlamaCloudIndex(**config.to_index_kwargs())
|
|
0 commit comments