Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
83 changes: 62 additions & 21 deletions js/plugins/compat-oai/src/model.ts
Original file line number Diff line number Diff line change
Expand Up @@ -277,19 +277,36 @@ export function fromOpenAIChoice(
const toolRequestParts = choice.message.tool_calls?.map((toolCall) =>
fromOpenAIToolCall(toolCall, choice)
);

// Build content array based on what's present in the message
let content: Part[] = [];

if (toolRequestParts) {
content = toolRequestParts as ToolRequestPart[];
} else {
// Handle reasoning_content if present
if (
'reasoning_content' in choice.message &&
choice.message.reasoning_content
) {
content.push({ reasoning: choice.message.reasoning_content as string });
}

// Handle regular content if present
if (choice.message.content) {
content.push(
jsonMode
? { data: JSON.parse(choice.message.content!) }
: { text: choice.message.content! }
);
}
}

return {
finishReason: finishReasonMap[choice.finish_reason] || 'other',
message: {
role: 'model',
content: toolRequestParts
? // Note: Not sure why I have to cast here exactly.
// Otherwise it thinks toolRequest must be 'undefined' if provided
(toolRequestParts as ToolRequestPart[])
: [
jsonMode
? { data: JSON.parse(choice.message.content!) }
: { text: choice.message.content! },
],
content,
},
};
}
Expand All @@ -308,21 +325,35 @@ export function fromOpenAIChunkChoice(
const toolRequestParts = choice.delta.tool_calls?.map((toolCall) =>
fromOpenAIToolCall(toolCall, choice)
);

// Build content array based on what's present in the delta
let content: Part[] = [];

if (toolRequestParts) {
content = toolRequestParts as ToolRequestPart[];
} else {
// Handle reasoning_content if present
if ('reasoning_content' in choice.delta && choice.delta.reasoning_content) {
content.push({ reasoning: choice.delta.reasoning_content as string });
}

// Handle regular content if present
if (choice.delta.content) {
content.push(
jsonMode
? { data: JSON.parse(choice.delta.content!) }
: { text: choice.delta.content! }
);
}
}

return {
finishReason: choice.finish_reason
? finishReasonMap[choice.finish_reason] || 'other'
: 'unknown',
message: {
role: 'model',
content: toolRequestParts
? // Note: Not sure why I have to cast here exactly.
// Otherwise it thinks toolRequest must be 'undefined' if provided
(toolRequestParts as ToolRequestPart[])
: [
jsonMode
? { data: JSON.parse(choice.delta.content!) }
: { text: choice.delta.content! },
],
content,
},
};
}
Expand Down Expand Up @@ -383,9 +414,19 @@ export function toOpenAIRequestBody(
}
const response_format = request.output?.format;
if (response_format === 'json') {
body.response_format = {
type: 'json_object',
};
if (request.output?.schema) {
body.response_format = {
type: 'json_schema',
json_schema: {
name: 'output',
schema: request.output!.schema,
},
};
Comment on lines +417 to +424
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I could not find API reference for json_schema for DeepSeek. Are you sure this works?

http://api-docs.deepseek.com/api/create-chat-completion#request

Copy link
Author

@neokn neokn Oct 17, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

hi @ssbushi thanks for your review, yes, DeepSeek didn't support json_schema
so there still has fallback for json_object
when integrate to DeepSeek, user can set the output.format without JsonSchema
e.g. ai.generate({output: {format: 'json'}});

then it will fallback to

body.response_format = {
    type: 'json_object',
};

I'm follow these two document for implement json_schema part
LiteLLM
OpenAI
xAI

} else {
body.response_format = {
type: 'json_object',
};
}
} else if (response_format === 'text') {
body.response_format = {
type: 'text',
Expand Down
106 changes: 106 additions & 0 deletions js/plugins/compat-oai/tests/compat_oai_test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -412,6 +412,48 @@ describe('fromOpenAiChoice', () => {
finishReason: 'stop',
},
},
{
should: 'should work with reasoning_content',
choice: {
index: 0,
message: {
role: 'assistant',
content: null,
reasoning_content: 'Let me think about this step by step...',
refusal: null,
} as any,
finish_reason: 'stop',
logprobs: null,
},
expectedOutput: {
finishReason: 'stop',
message: {
role: 'model',
content: [{ reasoning: 'Let me think about this step by step...' }],
},
},
},
{
should: 'should work with both reasoning_content and content',
choice: {
index: 0,
message: {
role: 'assistant',
content: 'Final answer',
reasoning_content: 'Let me think...',
refusal: null,
} as any,
finish_reason: 'stop',
logprobs: null,
},
expectedOutput: {
finishReason: 'stop',
message: {
role: 'model',
content: [{ reasoning: 'Let me think...' }, { text: 'Final answer' }],
},
},
},
];

for (const test of testCases) {
Expand Down Expand Up @@ -503,6 +545,43 @@ describe('fromOpenAiChunkChoice', () => {
finishReason: 'stop',
},
},
{
should: 'should work with reasoning_content',
chunkChoice: {
index: 0,
delta: {
role: 'assistant',
reasoning_content: 'Let me think about this step by step...',
} as any,
finish_reason: null,
},
expectedOutput: {
finishReason: 'unknown',
message: {
role: 'model',
content: [{ reasoning: 'Let me think about this step by step...' }],
},
},
},
{
should: 'should work with both reasoning_content and content',
chunkChoice: {
index: 0,
delta: {
role: 'assistant',
reasoning_content: 'Let me think...',
content: 'Final answer',
} as any,
finish_reason: 'stop',
},
expectedOutput: {
finishReason: 'stop',
message: {
role: 'model',
content: [{ reasoning: 'Let me think...' }, { text: 'Final answer' }],
},
},
},
];

for (const test of testCases) {
Expand Down Expand Up @@ -1284,6 +1363,33 @@ describe('toOpenAiRequestBody', () => {
},
});
});
it('sets json_schema response_format when an output schema is provided', () => {
const schema = {
type: 'object',
properties: { foo: { type: 'string' } },
required: ['foo'],
additionalProperties: false,
};
const request = {
messages: [{ role: 'user', content: [{ text: 'hello' }] }],
output: { format: 'json', schema },
} as unknown as GenerateRequest;

const actualOutput = toOpenAIRequestBody('gpt-4o', request) as unknown as {
response_format?: {
type: string;
json_schema?: { name: string; schema: unknown };
};
};

expect(actualOutput.response_format).toStrictEqual({
type: 'json_schema',
json_schema: {
name: 'output',
schema,
},
});
});
});

describe('openAIModelRunner', () => {
Expand Down