|
1 | 1 | from typing import Any
|
2 | 2 |
|
3 | 3 | from fastapi import FastAPI
|
4 |
| - |
5 | 4 | from llama_index.core.agent.workflow.workflow_events import AgentStream
|
6 | 5 | from llama_index.core.llms import LLM
|
7 | 6 | from llama_index.core.prompts import PromptTemplate
|
@@ -52,7 +51,33 @@ def __init__(
|
52 | 51 | @step
|
53 | 52 | async def read_files(self, ctx: Context, ev: StartEvent) -> FileHelpEvent:
|
54 | 53 | user_msg = ev.user_msg
|
55 |
| - last_file = self.uploaded_files[-1] |
| 54 | + |
| 55 | + # 1. Access through workflow instance as is |
| 56 | + # last_file = self.uploaded_files[-1] |
| 57 | + |
| 58 | + |
| 59 | + # 2. Access through user_msg (if it's a ChatMessage) |
| 60 | + # llama_index support ChatMessage with DocumentBlock which mostly the same as our FileServer. |
| 61 | + # (but I guess we'll get back to dealing with other problems |
| 62 | + # that we need to pass other data to the workflow later) |
| 63 | + # e.g: |
| 64 | + # files = [ |
| 65 | + # ServerFile.from_document_block(block) |
| 66 | + # for block in user_msg.blocks |
| 67 | + # if isinstance(block, DocumentBlock) |
| 68 | + # ] |
| 69 | + # |
| 70 | + # or they can just use files: List[DocumentBlock] as is. |
| 71 | + |
| 72 | + |
| 73 | + # 3. Introduce server start event with additional fields |
| 74 | + # e.g: |
| 75 | + # class ChatStartEvent(StartEvent): |
| 76 | + # user_msg: Union[str, ChatMessage] |
| 77 | + # chat_history: list[ChatMessage] |
| 78 | + # attachments: list[ServerFile] |
| 79 | + # Then the user can clearly know what do they have with the StartEvent |
| 80 | + |
56 | 81 | file_path = FileService.get_private_file_path(last_file.id)
|
57 | 82 | with open(file_path, "r", encoding="utf-8") as f:
|
58 | 83 | file_content = f.read()
|
|
0 commit comments