Skip to content
Open
Show file tree
Hide file tree
Changes from 13 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions financial-analyst-deepseek/financial-analyst-langgraph/.env
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
AZURE_OPENAI_API_KEY="2pq............."
AZURE_OPENAI_ENDPOINT="https:........."
AZURE_OPENAI_DEPLOYMENT_NAME="gpt-4o"
AZURE_OPENAI_API_VERSION="...."
71 changes: 71 additions & 0 deletions financial-analyst-deepseek/financial-analyst-langgraph/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
# MCP-powered Financial Analyst using Langgraph and Azure openai GPT4o

This project implements a financial analysis agentic workflow that analyzes stock market data and provides insights.

We use:
- Langgraph for multi-agent orchestration.
- Azure open ai model
- Cursor IDE as the MCP host.

---
## Setup and installations

**Clone the repository and navigate into the project directory:**

**Fill Your Environment Variables**

A `.env` file is already included in the project.
Open the file and fill in your actual API keys:

```env
.env

```
**Install Dependencies**

Ensure you have Python 3.12 or later installed.
```
pip install -r requirements.txt
```

---

## Run the project

First, set up your MCP server as follows:
- Go to Cursor settings
- Select MCP
- Add new global MCP server.

In the JSON file, add this:
```json
{
"mcpServers": {
"financial-analyst": {
"command": "uv",
"args": [
"--directory",
"absolute/path/to/project_root",
"run",
"server.py"
]
}
}
}
```

You should now be able to see the MCP server listed in the MCP settings.

In Cursor MCP settings make sure to toggle the button to connect the server to the host. Done! Your server is now up and running.

You can now chat with Cursor and analyze stock market data. Simply provide the stock symbol and timeframe you want to analyze, and watch the magic unfold.

**Example queries**:
- "Show me Tesla's stock performance over the last 3 months"
- "Compare Apple and Microsoft stocks for the past year"
- "Analyze the trading volume of Amazon stock for the last month"

---



Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
from langgraph.graph import StateGraph, END ,START
from pydantic import BaseModel, Field
from langchain_core.runnables import Runnable
from typing import TypedDict, Literal, Optional
from langchain_core.messages import AIMessage, HumanMessage
import ast
from IPython.display import Image, display
from typing import Dict
from langchain_openai import AzureChatOpenAI
import os
from dotenv import load_dotenv
from langgraph.errors import NodeInterrupt
import yfinance as yf
load_dotenv()


llm = AzureChatOpenAI(
azure_endpoint=os.environ['AZURE_OPENAI_ENDPOINT'],
azure_deployment=os.environ['AZURE_OPENAI_DEPLOYMENT_NAME'],
openai_api_version=os.environ['AZURE_OPENAI_API_VERSION'],
openai_api_key=os.environ['AZURE_OPENAI_API_KEY']
)



class QueryFields(BaseModel):
symbol: str = Field(..., description="Stock ticker symbol (e.g., TSLA, AAPL).")
timeframe: str = Field(..., description="Time period (e.g., '1d', '1mo', '1y').")
action: str = Field(..., description="Action to be performed (e.g., 'fetch', 'plot').")

class QueryAnalysisOutput(BaseModel):
result: QueryFields
class StockAnalysisState(TypedDict):
query: str
parsed_output: QueryAnalysisOutput
generated_code: Optional[str]
execution_result: Optional[str]



def query_parser_node(state: StockAnalysisState):
query = state["query"]
prompt = """You are a Stock Data Analyst. Extract stock details from this user query: {query}.

"""
finalprompt=prompt.format(query=query)
llm_with_struc=llm.with_structured_output(QueryAnalysisOutput)
response = llm_with_struc.invoke(finalprompt)

return {"parsed_output": response}

def code_writer_node(state: StockAnalysisState):
parsed = state["parsed_output"]
if isinstance(parsed, dict):
raise NodeInterrupt("recieved wrong type")
fprompt = """You are a Senior Python Developer. Generate code to {action} the stock data.
Stock: {symbol}
Timeframe: {timeframe}

Use yfinance, pandas, and matplotlib libraries. Output should be a clean, executable .py Python script for stock visualization without explanations or AI-generated messages—just the direct script content. without ''' or any code blockers
"""
action=parsed.result.action
symbol=parsed.result.symbol
time=parsed.result.timeframe
ffprompt=fprompt.format(action=action,symbol=symbol,timeframe=time)
code = llm.invoke(ffprompt)
return {"generated_code": code}


def code_result(state: StockAnalysisState):

ans=StockAnalysisState["generated_code"]
return {"execution_result": ans}


graph = StateGraph(StockAnalysisState)

graph.add_node("QueryParser", query_parser_node)
graph.add_node("CodeWriter", code_writer_node)
graph.add_node("CodeExecutor", code_result)


graph.add_edge(START,"QueryParser")
graph.add_edge("QueryParser", "CodeWriter")
graph.add_edge("CodeWriter", "CodeExecutor")
graph.add_edge("CodeExecutor", END)


workflow = graph.compile()

#visual representation of our graph
#display(Image(workflow.get_graph(xray=1).draw_mermaid_png()))


# Function to be wrapped inside MCP tool
def run_financial_analysis(query):
result = workflow.invoke({"query": query})

return result["generated_code"].content

if __name__ == "__main__":
res=run_financial_analysis("Plot YTD stock gain of Tesla")
print(res)
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can you please use UV as a package manager? rest of the things look good

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yes definnetly, i will update

Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
langgraph
langchain
langchain-core
langchain-openai
pydantic
ipython
python-dotenv
yfinance
matplotlib
mcp
63 changes: 63 additions & 0 deletions financial-analyst-deepseek/financial-analyst-langgraph/server.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
from mcp.server.fastmcp import FastMCP
from finance_langgraph import run_financial_analysis

# create FastMCP instance
mcp = FastMCP("financial-analyst")

@mcp.tool()
def analyze_stock(query: str) -> str:
"""
Analyzes stock market data based on the query and generates executable Python code for analysis and visualization.
Returns a formatted Python script ready for execution.

The query is a string that must contain the stock symbol (e.g., TSLA, AAPL, NVDA, etc.),
timeframe (e.g., 1d, 1mo, 1y), and action to perform (e.g., plot, analyze, compare).

Example queries:
- "Show me Tesla's stock performance over the last 3 months"
- "Compare Apple and Microsoft stocks for the past year"
- "Analyze the trading volume of Amazon stock for the last month"

Args:
query (str): The query to analyze the stock market data.

Returns:
str: A nicely formatted python code as a string.
"""
try:
result = run_financial_analysis(query)
return result
except Exception as e:
return f"Error: {e}"


@mcp.tool()
def save_code(code: str) -> str:
"""
Expects a nicely formatted, working and executable python code as input in form of a string.
Save the given code to a file stock_analysis.py, make sure the code is a valid python file, nicely formatted and ready to execute.

Args:
code (str): The nicely formatted, working and executable python code as string.

Returns:
str: A message indicating the code was saved successfully.
"""
try:
with open('stock_analysis.py', 'w') as f:
f.write(code)
return "Code saved to stock_analysis.py"
except Exception as e:
return f"Error: {e}"

@mcp.tool()
def run_code_and_show_plot() -> str:
"""
Run the code in stock_analysis.py and generate the plot
"""
with open('stock_analysis.py', 'r') as f:
exec(f.read())

# Run the server locally
if __name__ == "__main__":
mcp.run(transport='stdio')