Skip to content

Commit db0065f

Browse files
committed
add llamacloud config api
1 parent bef630a commit db0065f

File tree

3 files changed

+32
-2
lines changed

3 files changed

+32
-2
lines changed

llama-index-server/llama_index/server/api/routers/chat.py

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import asyncio
22
import logging
3+
import os
34
from typing import AsyncGenerator, Callable, Union
45

56
from fastapi import APIRouter, BackgroundTasks, HTTPException
@@ -15,6 +16,7 @@
1516
from llama_index.server.api.callbacks.stream_handler import StreamHandler
1617
from llama_index.server.api.models import ChatRequest
1718
from llama_index.server.api.utils.vercel_stream import VercelStreamResponse
19+
from llama_index.server.services.llamacloud import LlamaCloudFileService
1820

1921

2022
def chat_router(
@@ -57,6 +59,27 @@ async def chat(
5759
logger.error(e)
5860
raise HTTPException(status_code=500, detail=str(e))
5961

62+
if LlamaCloudFileService.is_configured():
63+
@router.get("/config/llamacloud")
64+
async def chat_llama_cloud_config():
65+
if not os.getenv("LLAMA_CLOUD_API_KEY"):
66+
raise HTTPException(
67+
status_code=500, detail="LlamaCloud API KEY is not configured"
68+
)
69+
projects = LlamaCloudFileService.get_all_projects_with_pipelines()
70+
pipeline = os.getenv("LLAMA_CLOUD_INDEX_NAME")
71+
project = os.getenv("LLAMA_CLOUD_PROJECT_NAME")
72+
pipeline_config = None
73+
if pipeline and project:
74+
pipeline_config = {
75+
"pipeline": pipeline,
76+
"project": project,
77+
}
78+
return {
79+
"projects": projects,
80+
"pipeline": pipeline_config,
81+
}
82+
6083
return router
6184

6285

llama-index-server/llama_index/server/server.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,7 @@ def _ui_config(self) -> dict:
7070
return {
7171
"CHAT_API": f"{self.api_prefix}/chat",
7272
"STARTER_QUESTIONS": self.starter_questions,
73+
"LLAMA_CLOUD_API": "/api/chat/config/llamacloud",
7374
}
7475

7576
# Default routers

llama-index-server/llama_index/server/services/llamacloud/file.py

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,11 +8,10 @@
88
import requests
99
from fastapi import BackgroundTasks
1010
from llama_cloud import ManagedIngestionStatus, PipelineFileCreateCustomMetadataValue
11-
from pydantic import BaseModel
12-
1311
from llama_index.core.schema import NodeWithScore
1412
from llama_index.server.api.models import SourceNodes
1513
from llama_index.server.services.llamacloud.index import get_client
14+
from pydantic import BaseModel
1615

1716
logger = logging.getLogger("uvicorn")
1817

@@ -176,3 +175,10 @@ def _download_file(cls, url: str, local_file_path: str) -> None:
176175
for chunk in r.iter_content(chunk_size=8192):
177176
f.write(chunk)
178177
logger.info("File downloaded successfully")
178+
179+
@classmethod
180+
def is_configured(cls) -> bool:
181+
try:
182+
return os.environ.get("LLAMA_CLOUD_API_KEY") is not None
183+
except Exception:
184+
return False

0 commit comments

Comments
 (0)