Skip to content

Commit 8b3e72d

Browse files
authored
fix (provider/xai): handle new reasoning text chunk parts (#12362)
## Background The XAI provider introduced a new type of reasoning text chunk in their API responses that needs to be properly handled in our SDK. ## Summary This PR adds support for handling the new `response.reasoning_text.delta` and `response.reasoning_text.done` chunk types in the XAI provider. These chunks represent streaming reasoning text that should be processed as reasoning deltas in our standardized output format. The implementation: - Adds the new chunk types to the XAI responses schema - Updates the language model to process these new chunk types - Adds tests to verify the correct handling of these new chunk formats An example has also been added to demonstrate streaming text with the grok-code-fast-1 model, showing how to display reasoning text in a different color. ## Manual Verification Tested with the new example that streams text from the grok-code-fast-1 model, confirming that reasoning text chunks are properly processed and displayed. ## Checklist - [x] Tests have been added / updated (for bug fixes / features) - [x] A _patch_ changeset for relevant packages has been added (for bug fixes / features - run `pnpm changeset` in the project root) - [x] I have reviewed this pull request (self-review) ## Related Issues Fixes handling of new reasoning text chunk parts from the XAI provider.
1 parent ff0a2e4 commit 8b3e72d

File tree

5 files changed

+216
-0
lines changed

5 files changed

+216
-0
lines changed

.changeset/fresh-turkeys-clap.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'@ai-sdk/xai': patch
3+
---
4+
5+
fix (provider/xai): handle new reasoning text chunk parts
Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
import { xai } from '@ai-sdk/xai';
2+
import { streamText } from 'ai';
3+
import { run } from '../lib/run';
4+
5+
run(async () => {
6+
const result = streamText({
7+
model: xai.responses('grok-code-fast-1'),
8+
prompt: 'Invent a new holiday and describe its traditions.',
9+
});
10+
11+
for await (const part of result.fullStream) {
12+
if (part.type === 'reasoning-delta') {
13+
process.stdout.write(`\x1b[34m${part.text}\x1b[0m`);
14+
} else if (part.type === 'text-delta') {
15+
process.stdout.write(part.text);
16+
}
17+
}
18+
console.log();
19+
console.log('Token usage:', await result.usage);
20+
console.log('Finish reason:', await result.finishReason);
21+
});

packages/xai/src/responses/xai-responses-api.ts

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -341,6 +341,20 @@ export const xaiResponsesChunkSchema = z.union([
341341
summary_index: z.number(),
342342
text: z.string(),
343343
}),
344+
z.object({
345+
type: z.literal('response.reasoning_text.delta'),
346+
item_id: z.string(),
347+
output_index: z.number(),
348+
content_index: z.number(),
349+
delta: z.string(),
350+
}),
351+
z.object({
352+
type: z.literal('response.reasoning_text.done'),
353+
item_id: z.string(),
354+
output_index: z.number(),
355+
content_index: z.number(),
356+
text: z.string(),
357+
}),
344358
z.object({
345359
type: z.literal('response.web_search_call.in_progress'),
346360
item_id: z.string(),

packages/xai/src/responses/xai-responses-language-model.test.ts

Lines changed: 142 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1573,6 +1573,148 @@ describe('XaiResponsesLanguageModel', () => {
15731573
expect(reasoningStartIndex).toBeLessThan(reasoningEndIndex);
15741574
});
15751575

1576+
it('should stream reasoning text deltas (response.reasoning_text.delta)', async () => {
1577+
prepareStreamChunks([
1578+
JSON.stringify({
1579+
type: 'response.created',
1580+
response: {
1581+
id: 'resp_123',
1582+
object: 'response',
1583+
model: 'grok-code-fast-1',
1584+
output: [],
1585+
},
1586+
}),
1587+
JSON.stringify({
1588+
type: 'response.output_item.added',
1589+
item: {
1590+
type: 'reasoning',
1591+
id: 'rs_456',
1592+
status: 'in_progress',
1593+
summary: [],
1594+
},
1595+
output_index: 0,
1596+
}),
1597+
JSON.stringify({
1598+
type: 'response.reasoning_text.delta',
1599+
item_id: 'rs_456',
1600+
output_index: 0,
1601+
content_index: 0,
1602+
delta: 'First',
1603+
}),
1604+
JSON.stringify({
1605+
type: 'response.reasoning_text.delta',
1606+
item_id: 'rs_456',
1607+
output_index: 0,
1608+
content_index: 0,
1609+
delta: ', analyze the question.',
1610+
}),
1611+
JSON.stringify({
1612+
type: 'response.reasoning_text.done',
1613+
item_id: 'rs_456',
1614+
output_index: 0,
1615+
content_index: 0,
1616+
text: 'First, analyze the question.',
1617+
}),
1618+
JSON.stringify({
1619+
type: 'response.output_item.done',
1620+
item: {
1621+
type: 'reasoning',
1622+
id: 'rs_456',
1623+
status: 'completed',
1624+
summary: [
1625+
{ type: 'summary_text', text: 'First, analyze the question.' },
1626+
],
1627+
},
1628+
output_index: 0,
1629+
}),
1630+
JSON.stringify({
1631+
type: 'response.output_item.added',
1632+
item: {
1633+
type: 'message',
1634+
id: 'msg_789',
1635+
role: 'assistant',
1636+
status: 'in_progress',
1637+
content: [],
1638+
},
1639+
output_index: 1,
1640+
}),
1641+
JSON.stringify({
1642+
type: 'response.output_text.delta',
1643+
item_id: 'msg_789',
1644+
output_index: 1,
1645+
content_index: 0,
1646+
delta: 'The answer.',
1647+
}),
1648+
JSON.stringify({
1649+
type: 'response.done',
1650+
response: {
1651+
id: 'resp_123',
1652+
object: 'response',
1653+
model: 'grok-code-fast-1',
1654+
status: 'completed',
1655+
output: [],
1656+
usage: {
1657+
input_tokens: 10,
1658+
output_tokens: 20,
1659+
output_tokens_details: { reasoning_tokens: 15 },
1660+
},
1661+
},
1662+
}),
1663+
]);
1664+
1665+
const { stream } = await createModel('grok-code-fast-1').doStream({
1666+
prompt: TEST_PROMPT,
1667+
});
1668+
1669+
const parts = await convertReadableStreamToArray(stream);
1670+
1671+
const reasoningStart = parts.find(
1672+
part => part.type === 'reasoning-start',
1673+
);
1674+
expect(reasoningStart).toMatchInlineSnapshot(`
1675+
{
1676+
"id": "reasoning-rs_456",
1677+
"providerMetadata": {
1678+
"xai": {
1679+
"itemId": "rs_456",
1680+
},
1681+
},
1682+
"type": "reasoning-start",
1683+
}
1684+
`);
1685+
1686+
const reasoningDeltas = parts.filter(
1687+
part => part.type === 'reasoning-delta',
1688+
);
1689+
expect(reasoningDeltas).toHaveLength(2);
1690+
expect(reasoningDeltas[0].delta).toBe('First');
1691+
expect(reasoningDeltas[1].delta).toBe(', analyze the question.');
1692+
1693+
const reasoningEnd = parts.find(part => part.type === 'reasoning-end');
1694+
expect(reasoningEnd).toMatchInlineSnapshot(`
1695+
{
1696+
"id": "reasoning-rs_456",
1697+
"providerMetadata": {
1698+
"xai": {
1699+
"itemId": "rs_456",
1700+
},
1701+
},
1702+
"type": "reasoning-end",
1703+
}
1704+
`);
1705+
1706+
// Verify ordering: reasoning-start < reasoning-deltas < reasoning-end < text
1707+
const startIdx = parts.findIndex(p => p.type === 'reasoning-start');
1708+
const firstDeltaIdx = parts.findIndex(
1709+
p => p.type === 'reasoning-delta',
1710+
);
1711+
const endIdx = parts.findIndex(p => p.type === 'reasoning-end');
1712+
const textIdx = parts.findIndex(p => p.type === 'text-delta');
1713+
expect(startIdx).toBeLessThan(firstDeltaIdx);
1714+
expect(firstDeltaIdx).toBeLessThan(endIdx);
1715+
expect(endIdx).toBeLessThan(textIdx);
1716+
});
1717+
15761718
it('should stream x_search tool call', async () => {
15771719
prepareChunksFixtureResponse('xai-x-search-tool');
15781720

packages/xai/src/responses/xai-responses-language-model.ts

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -529,6 +529,40 @@ export class XaiResponsesLanguageModel implements LanguageModelV3 {
529529
return;
530530
}
531531

532+
if (event.type === 'response.reasoning_text.delta') {
533+
const blockId = `reasoning-${event.item_id}`;
534+
535+
if (activeReasoning[event.item_id] == null) {
536+
activeReasoning[event.item_id] = {};
537+
controller.enqueue({
538+
type: 'reasoning-start',
539+
id: blockId,
540+
providerMetadata: {
541+
xai: {
542+
itemId: event.item_id,
543+
},
544+
},
545+
});
546+
}
547+
548+
controller.enqueue({
549+
type: 'reasoning-delta',
550+
id: blockId,
551+
delta: event.delta,
552+
providerMetadata: {
553+
xai: {
554+
itemId: event.item_id,
555+
},
556+
},
557+
});
558+
559+
return;
560+
}
561+
562+
if (event.type === 'response.reasoning_text.done') {
563+
return;
564+
}
565+
532566
if (event.type === 'response.output_text.delta') {
533567
const blockId = `text-${event.item_id}`;
534568

0 commit comments

Comments
 (0)