2
2
from llama_index .core .tools import FunctionTool
3
3
from llama_index .core .memory import ChatMemoryBuffer
4
4
5
- from llama_index .llms .anthropic import Anthropic
6
5
from tools import PDFExtractor , AskVisionModel , StructuredFileReader , Report
7
6
7
+ from llama_index .llms .anthropic import Anthropic
8
+ from llama_index .llms .ollama import Ollama
9
+ from llama_index .llms .openai import OpenAI
10
+
8
11
CLAUDE_API_KEY = "<ANTROPIC-API-KEY>"
9
12
llm = Anthropic (model = "claude-3-opus-20240229" , # you can change the model
10
13
api_key = CLAUDE_API_KEY )
14
+ OPENAI_API_KEY = "<OPENAI_API_KEY>"
15
+ #llm = OpenAI(model="gpt-4", api_key=OPENAI_API_KEY)
11
16
12
17
#llm = Ollama(model="llama3", request_timeout=360)
13
18
28
33
name = "Report" ,
29
34
description = """Useful when you are done with analysis and want to save the final version of the report.""" )
30
35
31
- with open ("claude_prompt.txt" , "r" ) as f :
36
+ with open ("prompts/ claude_prompt.txt" , "r" ) as f :
32
37
prompt = f .readlines ()
33
38
34
39
agent = ReActAgent .from_tools (llm = llm ,
35
- max_iterations = 20 ,
40
+ max_iterations = 20 ,
36
41
tools = [pdf_tool , askvision_tool , directory_tool , report_tool ],
37
42
verbose = True ,
38
43
memory = ChatMemoryBuffer .from_defaults (llm = llm ),
39
44
chat_history = [
40
45
{"role" :"system" , "content" : prompt }])
41
46
42
- #text = "I want you to read files provided and create a complete analysis of my company's stats"
47
+ #text = "I want you to read files provided and create a complete analysis of my company's stats. "
43
48
text = input ("Ask: " )
44
- print (agent .chat (text ))
49
+ print (agent .chat (text ))
50
+
51
+
52
+ # Optional: saving Agent's memory to a file
53
+ #memory_path = "memory.txt"
54
+ #with open(memory_path, "w") as file:
55
+ # file.write(str(agent.memory.to_string()))
56
+ #print(f"Memory saved to {memory_path}")
0 commit comments