File tree Expand file tree Collapse file tree 3 files changed +12
-12
lines changed
python/llama-index-server Expand file tree Collapse file tree 3 files changed +12
-12
lines changed Original file line number Diff line number Diff line change @@ -18,7 +18,7 @@ def create_file_tool(chat_request: ChatRequest) -> Optional[FunctionTool]:
18
18
Create a tool to read file if the user uploads a file.
19
19
"""
20
20
file_ids = []
21
- for file in get_file_attachments (chat_request ):
21
+ for file in get_file_attachments (chat_request . messages ):
22
22
file_ids .append (file .id )
23
23
if len (file_ids ) == 0 :
24
24
return None
@@ -29,7 +29,7 @@ def create_file_tool(chat_request: ChatRequest) -> Optional[FunctionTool]:
29
29
)
30
30
31
31
def read_file (file_id : str ) -> str :
32
- file_path = FileService .get_private_file_path (file_id )
32
+ file_path = FileService .get_file_path (file_id )
33
33
try :
34
34
with open (file_path , "r" ) as file :
35
35
return file .read ()
@@ -57,7 +57,7 @@ def create_app() -> FastAPI:
57
57
workflow_factory = create_workflow ,
58
58
suggest_next_questions = False ,
59
59
ui_config = UIConfig (
60
- file_upload_enabled = True ,
60
+ enable_file_upload = True ,
61
61
component_dir = "components" ,
62
62
),
63
63
)
Original file line number Diff line number Diff line change @@ -37,10 +37,7 @@ def prepare_user_message(chat_request: ChatRequest) -> ChatMessage:
37
37
raise ValueError ("Last message must be from user" )
38
38
39
39
# Add attached files to the user message
40
- user_messages = [
41
- message for message in chat_request .messages if message .role == MessageRole .USER
42
- ]
43
- attachment_files = get_file_attachments (user_messages )
40
+ attachment_files = get_file_attachments (chat_request .messages )
44
41
last_message .blocks += [
45
42
DocumentBlock (
46
43
path = file .path or FileService .get_file_path (file .id ),
Original file line number Diff line number Diff line change 1
1
from typing import List
2
2
3
+ from llama_index .core .base .message import MessageRole
3
4
from llama_index .server .models .chat import ChatAPIMessage , FileAnnotation , ServerFile
4
5
5
6
6
7
def get_file_attachments (messages : List [ChatAPIMessage ]) -> List [ServerFile ]:
7
8
"""
8
- Extract all file attachments from the chat request .
9
+ Extract all file attachments from user messages .
9
10
10
11
Args:
11
12
messages (List[ChatAPIMessage]): The list of messages.
12
13
13
14
Returns:
14
- List[PrivateFile ]: The list of private files.
15
+ List[ServerFile ]: The list of private files.
15
16
"""
16
- message_annotations = [
17
- message .annotations for message in messages if message .annotations
17
+ user_message_annotations = [
18
+ message .annotations
19
+ for message in messages
20
+ if message .annotations and message .role == MessageRole .USER
18
21
]
19
22
files : List [ServerFile ] = []
20
- for annotation in message_annotations :
23
+ for annotation in user_message_annotations :
21
24
if isinstance (annotation , list ):
22
25
for item in annotation :
23
26
if isinstance (item , FileAnnotation ):
You can’t perform that action at this time.
0 commit comments