Skip to content

Commit 53b892f

Browse files
furqan-shaikh-devFurqan Shaikh
andauthored
Add support for OCI OpenAI Responses API with Langchain/Langgraph (#61)
* Add OCIChatOpenAI for supporting OpenAI Responses API * remove oci values from example file * incorporate pr review comments * refactoring, adding openai dependency * add oci-openai as a dependency * move ChatOCIOpenAI to oci_generative_ai class, address other comments * address review comments * make conversation store id optional * fix lint errors * fix lint errors * fix lint errors in python 3.12 * fixing import error * fixing incorrect requires mark * add _resolve_base_url in try..except block --------- Co-authored-by: Furqan Shaikh <[email protected]>
1 parent 1203a01 commit 53b892f

File tree

13 files changed

+1266
-48
lines changed

13 files changed

+1266
-48
lines changed

libs/oci/README.md

Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -79,6 +79,53 @@ structured_llm = llm.with_structured_output(Joke)
7979
structured_llm.invoke("Tell me a joke about programming")
8080
```
8181

82+
### 5. Use OpenAI Responses API
83+
`ChatOCIOpenAI` supports OpenAI Responses API.
84+
85+
```python
86+
from oci_openai import (
87+
OciSessionAuth,
88+
)
89+
from langchain_oci import ChatOCIOpenAI
90+
client = ChatOCIOpenAI(
91+
auth=OciSessionAuth(profile_name="MY_PROFILE_NAME"),
92+
compartment_id="MY_COMPARTMENT_ID",
93+
region="us-chicago-1",
94+
model="openai.gpt-4.1",
95+
conversation_store_id="MY_CONVERSATION_STORE_ID"
96+
)
97+
messages = [
98+
(
99+
"system",
100+
"You are a helpful translator. Translate the user sentence to French.",
101+
),
102+
("human", "I love programming."),
103+
]
104+
response = client.invoke(messages)
105+
```
106+
NOTE: By default `store` argument is set to `True` which requires passing `conversation_store_id`. You can set `store` to `False` and not pass `conversation_store_id`.
107+
```python
108+
from oci_openai import (
109+
OciSessionAuth,
110+
)
111+
from langchain_oci import ChatOCIOpenAI
112+
client = ChatOCIOpenAI(
113+
auth=OciSessionAuth(profile_name="MY_PROFILE_NAME"),
114+
compartment_id="MY_COMPARTMENT_ID",
115+
region="us-chicago-1",
116+
model="openai.gpt-4.1",
117+
store=False
118+
)
119+
messages = [
120+
(
121+
"system",
122+
"You are a helpful translator. Translate the user sentence to French.",
123+
),
124+
("human", "I love programming."),
125+
]
126+
response = client.invoke(messages)
127+
```
128+
82129

83130
## OCI Data Science Model Deployment Examples
84131

libs/oci/examples/__init__.py

Whitespace-only changes.

libs/oci/examples/chat_models/__init__.py

Whitespace-only changes.
Lines changed: 118 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,118 @@
1+
from langchain_core.prompts import ChatPromptTemplate
2+
from oci_openai import OciSessionAuth
3+
from pydantic import BaseModel, Field
4+
from rich import print
5+
6+
from langchain_oci import ChatOCIOpenAI
7+
8+
COMPARTMENT_ID = "ocid1.compartment.oc1..aaaaaaaaexample"
9+
CONVERSATION_STORE_ID = (
10+
"ocid1.generativeaiconversationstore.oc1.us-chicago-1.aaaaaaaaexample"
11+
)
12+
SERVICE_ENDPOINT = "https://inference.generativeai.us-chicago-1.oci.oraclecloud.com"
13+
REGION = "us-chicago-1"
14+
MODEL = "openai.gpt-4o"
15+
PROFILE_NAME = "oc1"
16+
17+
18+
def get_oci_openai_client():
19+
return ChatOCIOpenAI(
20+
auth=OciSessionAuth(profile_name=PROFILE_NAME),
21+
compartment_id=COMPARTMENT_ID,
22+
service_endpoint=SERVICE_ENDPOINT,
23+
model=MODEL,
24+
conversation_store_id=CONVERSATION_STORE_ID,
25+
)
26+
27+
28+
def do_model_invoke():
29+
client = get_oci_openai_client()
30+
messages = [
31+
(
32+
"system",
33+
"You are a helpful translator. Translate the user sentence to French.",
34+
),
35+
("human", "I love programming."),
36+
]
37+
response = client.invoke(messages)
38+
return response
39+
40+
41+
def do_prompt_chaining():
42+
client = get_oci_openai_client()
43+
prompt = ChatPromptTemplate.from_messages(
44+
[
45+
(
46+
"system",
47+
"You are a helpful assistant that translates {input_language}"
48+
" to {output_language}.",
49+
),
50+
("human", "{input}"),
51+
]
52+
)
53+
54+
chain = prompt | client
55+
response = chain.invoke(
56+
{
57+
"input_language": "English",
58+
"output_language": "German",
59+
"input": "I love programming.",
60+
}
61+
)
62+
return response
63+
64+
65+
def do_function_calling():
66+
class GetWeather(BaseModel):
67+
"""Get the current weather in a given location"""
68+
69+
location: str = Field(
70+
..., description="The city and state, e.g. San Francisco, CA"
71+
)
72+
73+
client = get_oci_openai_client()
74+
llm_with_tools = client.bind_tools([GetWeather])
75+
response = llm_with_tools.invoke(
76+
"what is the weather like in San Francisco",
77+
)
78+
return response
79+
80+
81+
def do_web_search():
82+
client = get_oci_openai_client()
83+
tool = {"type": "web_search"}
84+
llm_with_tools = client.bind_tools([tool])
85+
86+
response = llm_with_tools.invoke("What was a positive news story from today?")
87+
return response
88+
89+
90+
def do_hosted_mcp_calling():
91+
client = get_oci_openai_client()
92+
llm_with_mcp_tools = client.bind_tools(
93+
[
94+
{
95+
"type": "mcp",
96+
"server_label": "deepwiki",
97+
"server_url": "https://mcp.deepwiki.com/mcp",
98+
"require_approval": "never",
99+
}
100+
]
101+
)
102+
response = llm_with_mcp_tools.invoke(
103+
"What transport protocols does the 2025-03-26 version of the MCP "
104+
"spec (modelcontextprotocol/modelcontextprotocol) support?"
105+
)
106+
return response
107+
108+
109+
def main():
110+
print(do_model_invoke())
111+
print(do_prompt_chaining())
112+
print(do_function_calling())
113+
print(do_web_search())
114+
print(do_hosted_mcp_calling())
115+
116+
117+
if __name__ == "__main__":
118+
main()

libs/oci/langchain_oci/__init__.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
ChatOCIModelDeploymentTGI,
77
ChatOCIModelDeploymentVLLM,
88
)
9-
from langchain_oci.chat_models.oci_generative_ai import ChatOCIGenAI
9+
from langchain_oci.chat_models.oci_generative_ai import ChatOCIGenAI, ChatOCIOpenAI
1010
from langchain_oci.embeddings.oci_data_science_model_deployment_endpoint import (
1111
OCIModelDeploymentEndpointEmbeddings,
1212
)
@@ -32,4 +32,5 @@
3232
"OCIModelDeploymentLLM",
3333
"OCIModelDeploymentTGI",
3434
"OCIModelDeploymentVLLM",
35+
"ChatOCIOpenAI",
3536
]

0 commit comments

Comments
 (0)