Skip to content

Commit b5e7a31

Browse files
Backport: fix (provider/xai): handle new reasoning text chunk parts (#12363)
This is an automated backport of #12362 to the release-v5.0 branch. FYI @shaper This backport has conflicts that need to be resolved manually. ### `git cherry-pick` output ``` CONFLICT (file location): examples/ai-functions/src/stream-text/xai-responses-grok-code-fast-1.ts added in 8b3e72d (fix (provider/xai): handle new reasoning text chunk parts (#12362)) inside a directory that was renamed in HEAD, suggesting it should perhaps be moved to examples/ai-core/src/stream-text/xai-responses-grok-code-fast-1.ts. Auto-merging packages/xai/src/responses/xai-responses-api.ts Auto-merging packages/xai/src/responses/xai-responses-language-model.test.ts CONFLICT (content): Merge conflict in packages/xai/src/responses/xai-responses-language-model.test.ts Auto-merging packages/xai/src/responses/xai-responses-language-model.ts error: could not apply 8b3e72d... fix (provider/xai): handle new reasoning text chunk parts (#12362) hint: After resolving the conflicts, mark them with hint: "git add/rm <pathspec>", then run hint: "git cherry-pick --continue". hint: You can instead skip this commit with "git cherry-pick --skip". hint: To abort and get back to the state before "git cherry-pick", hint: run "git cherry-pick --abort". hint: Disable this message with "git config set advice.mergeConflict false" ``` --------- Co-authored-by: Walter Korman <shaper@vercel.com>
1 parent 85c1f30 commit b5e7a31

File tree

6 files changed

+371
-0
lines changed

6 files changed

+371
-0
lines changed

.changeset/fresh-turkeys-clap.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'@ai-sdk/xai': patch
3+
---
4+
5+
fix (provider/xai): handle new reasoning text chunk parts
Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
import { xai } from '@ai-sdk/xai';
2+
import { streamText } from 'ai';
3+
import { run } from '../lib/run';
4+
5+
run(async () => {
6+
const result = streamText({
7+
model: xai.responses('grok-code-fast-1'),
8+
prompt: 'Invent a new holiday and describe its traditions.',
9+
});
10+
11+
for await (const part of result.fullStream) {
12+
if (part.type === 'reasoning-delta') {
13+
process.stdout.write(`\x1b[34m${part.text}\x1b[0m`);
14+
} else if (part.type === 'text-delta') {
15+
process.stdout.write(part.text);
16+
}
17+
}
18+
console.log();
19+
console.log('Token usage:', await result.usage);
20+
console.log('Finish reason:', await result.finishReason);
21+
});

packages/xai/src/responses/__snapshots__/xai-responses-language-model.test.ts.snap

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -879,6 +879,15 @@ exports[`XaiResponsesLanguageModel > doStream > text streaming > should stream t
879879
},
880880
"type": "reasoning-delta",
881881
},
882+
{
883+
"id": "reasoning-rs_769f3302-64f9-4c72-2b48-860c87fd9b2a",
884+
"providerMetadata": {
885+
"xai": {
886+
"itemId": "rs_769f3302-64f9-4c72-2b48-860c87fd9b2a",
887+
},
888+
},
889+
"type": "reasoning-start",
890+
},
882891
{
883892
"id": "reasoning-rs_769f3302-64f9-4c72-2b48-860c87fd9b2a",
884893
"providerMetadata": {
@@ -4724,6 +4733,15 @@ exports[`XaiResponsesLanguageModel > doStream > text streaming > should stream t
47244733
},
47254734
"type": "reasoning-delta",
47264735
},
4736+
{
4737+
"id": "reasoning-rs_0b824fe9-3250-2588-0bbf-0810402fc822",
4738+
"providerMetadata": {
4739+
"xai": {
4740+
"itemId": "rs_0b824fe9-3250-2588-0bbf-0810402fc822",
4741+
},
4742+
},
4743+
"type": "reasoning-start",
4744+
},
47274745
{
47284746
"id": "reasoning-rs_0b824fe9-3250-2588-0bbf-0810402fc822",
47294747
"providerMetadata": {
@@ -8354,6 +8372,15 @@ exports[`XaiResponsesLanguageModel > doStream > text streaming > should stream t
83548372
},
83558373
"type": "reasoning-delta",
83568374
},
8375+
{
8376+
"id": "reasoning-rs_bf3b2b34-79d4-a45c-7be8-d1e5f96386c2",
8377+
"providerMetadata": {
8378+
"xai": {
8379+
"itemId": "rs_bf3b2b34-79d4-a45c-7be8-d1e5f96386c2",
8380+
},
8381+
},
8382+
"type": "reasoning-start",
8383+
},
83578384
{
83588385
"id": "reasoning-rs_bf3b2b34-79d4-a45c-7be8-d1e5f96386c2",
83598386
"providerMetadata": {

packages/xai/src/responses/xai-responses-api.ts

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -290,6 +290,20 @@ export const xaiResponsesChunkSchema = z.union([
290290
summary_index: z.number(),
291291
text: z.string(),
292292
}),
293+
z.object({
294+
type: z.literal('response.reasoning_text.delta'),
295+
item_id: z.string(),
296+
output_index: z.number(),
297+
content_index: z.number(),
298+
delta: z.string(),
299+
}),
300+
z.object({
301+
type: z.literal('response.reasoning_text.done'),
302+
item_id: z.string(),
303+
output_index: z.number(),
304+
content_index: z.number(),
305+
text: z.string(),
306+
}),
293307
z.object({
294308
type: z.literal('response.web_search_call.in_progress'),
295309
item_id: z.string(),

packages/xai/src/responses/xai-responses-language-model.test.ts

Lines changed: 256 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1257,6 +1257,262 @@ describe('XaiResponsesLanguageModel', () => {
12571257
`);
12581258
});
12591259

1260+
it('should emit reasoning-start before reasoning-end when reasoning_summary_part.added is not sent', async () => {
1261+
// This test covers the case where xAI sends encrypted reasoning without
1262+
// streaming the reasoning summary text (no reasoning_summary_part.added events)
1263+
prepareStreamChunks([
1264+
JSON.stringify({
1265+
type: 'response.created',
1266+
response: {
1267+
id: 'resp_123',
1268+
object: 'response',
1269+
model: 'grok-4-fast',
1270+
output: [],
1271+
},
1272+
}),
1273+
JSON.stringify({
1274+
type: 'response.output_item.added',
1275+
item: {
1276+
type: 'reasoning',
1277+
id: 'rs_456',
1278+
status: 'in_progress',
1279+
summary: [],
1280+
},
1281+
output_index: 0,
1282+
}),
1283+
// Note: No response.reasoning_summary_part.added event
1284+
// This happens with encrypted reasoning when store=false
1285+
JSON.stringify({
1286+
type: 'response.output_item.done',
1287+
item: {
1288+
type: 'reasoning',
1289+
id: 'rs_456',
1290+
status: 'completed',
1291+
summary: [],
1292+
encrypted_content: 'encrypted_reasoning_content_xyz',
1293+
},
1294+
output_index: 0,
1295+
}),
1296+
JSON.stringify({
1297+
type: 'response.output_item.added',
1298+
item: {
1299+
type: 'message',
1300+
id: 'msg_789',
1301+
role: 'assistant',
1302+
status: 'in_progress',
1303+
content: [],
1304+
},
1305+
output_index: 1,
1306+
}),
1307+
JSON.stringify({
1308+
type: 'response.output_text.delta',
1309+
item_id: 'msg_789',
1310+
output_index: 1,
1311+
content_index: 0,
1312+
delta: 'The answer is 42.',
1313+
}),
1314+
JSON.stringify({
1315+
type: 'response.done',
1316+
response: {
1317+
id: 'resp_123',
1318+
object: 'response',
1319+
model: 'grok-4-fast',
1320+
status: 'completed',
1321+
output: [],
1322+
usage: { input_tokens: 10, output_tokens: 20 },
1323+
},
1324+
}),
1325+
]);
1326+
1327+
const { stream } = await createModel().doStream({
1328+
prompt: TEST_PROMPT,
1329+
});
1330+
1331+
const parts = await convertReadableStreamToArray(stream);
1332+
1333+
// Verify reasoning-start is emitted before reasoning-end
1334+
const reasoningStart = parts.find(
1335+
part => part.type === 'reasoning-start',
1336+
);
1337+
const reasoningEnd = parts.find(part => part.type === 'reasoning-end');
1338+
1339+
expect(reasoningStart).toMatchInlineSnapshot(`
1340+
{
1341+
"id": "reasoning-rs_456",
1342+
"providerMetadata": {
1343+
"xai": {
1344+
"itemId": "rs_456",
1345+
},
1346+
},
1347+
"type": "reasoning-start",
1348+
}
1349+
`);
1350+
1351+
expect(reasoningEnd).toMatchInlineSnapshot(`
1352+
{
1353+
"id": "reasoning-rs_456",
1354+
"providerMetadata": {
1355+
"xai": {
1356+
"itemId": "rs_456",
1357+
"reasoningEncryptedContent": "encrypted_reasoning_content_xyz",
1358+
},
1359+
},
1360+
"type": "reasoning-end",
1361+
}
1362+
`);
1363+
1364+
// Verify reasoning-start comes before reasoning-end in the stream
1365+
const reasoningStartIndex = parts.findIndex(
1366+
part => part.type === 'reasoning-start',
1367+
);
1368+
const reasoningEndIndex = parts.findIndex(
1369+
part => part.type === 'reasoning-end',
1370+
);
1371+
expect(reasoningStartIndex).toBeLessThan(reasoningEndIndex);
1372+
});
1373+
1374+
it('should stream reasoning text deltas (response.reasoning_text.delta)', async () => {
1375+
prepareStreamChunks([
1376+
JSON.stringify({
1377+
type: 'response.created',
1378+
response: {
1379+
id: 'resp_123',
1380+
object: 'response',
1381+
model: 'grok-code-fast-1',
1382+
output: [],
1383+
},
1384+
}),
1385+
JSON.stringify({
1386+
type: 'response.output_item.added',
1387+
item: {
1388+
type: 'reasoning',
1389+
id: 'rs_456',
1390+
status: 'in_progress',
1391+
summary: [],
1392+
},
1393+
output_index: 0,
1394+
}),
1395+
JSON.stringify({
1396+
type: 'response.reasoning_text.delta',
1397+
item_id: 'rs_456',
1398+
output_index: 0,
1399+
content_index: 0,
1400+
delta: 'First',
1401+
}),
1402+
JSON.stringify({
1403+
type: 'response.reasoning_text.delta',
1404+
item_id: 'rs_456',
1405+
output_index: 0,
1406+
content_index: 0,
1407+
delta: ', analyze the question.',
1408+
}),
1409+
JSON.stringify({
1410+
type: 'response.reasoning_text.done',
1411+
item_id: 'rs_456',
1412+
output_index: 0,
1413+
content_index: 0,
1414+
text: 'First, analyze the question.',
1415+
}),
1416+
JSON.stringify({
1417+
type: 'response.output_item.done',
1418+
item: {
1419+
type: 'reasoning',
1420+
id: 'rs_456',
1421+
status: 'completed',
1422+
summary: [
1423+
{ type: 'summary_text', text: 'First, analyze the question.' },
1424+
],
1425+
},
1426+
output_index: 0,
1427+
}),
1428+
JSON.stringify({
1429+
type: 'response.output_item.added',
1430+
item: {
1431+
type: 'message',
1432+
id: 'msg_789',
1433+
role: 'assistant',
1434+
status: 'in_progress',
1435+
content: [],
1436+
},
1437+
output_index: 1,
1438+
}),
1439+
JSON.stringify({
1440+
type: 'response.output_text.delta',
1441+
item_id: 'msg_789',
1442+
output_index: 1,
1443+
content_index: 0,
1444+
delta: 'The answer.',
1445+
}),
1446+
JSON.stringify({
1447+
type: 'response.done',
1448+
response: {
1449+
id: 'resp_123',
1450+
object: 'response',
1451+
model: 'grok-code-fast-1',
1452+
status: 'completed',
1453+
output: [],
1454+
usage: {
1455+
input_tokens: 10,
1456+
output_tokens: 20,
1457+
output_tokens_details: { reasoning_tokens: 15 },
1458+
},
1459+
},
1460+
}),
1461+
]);
1462+
1463+
const { stream } = await createModel('grok-code-fast-1').doStream({
1464+
prompt: TEST_PROMPT,
1465+
});
1466+
1467+
const parts = await convertReadableStreamToArray(stream);
1468+
1469+
const reasoningStart = parts.find(
1470+
part => part.type === 'reasoning-start',
1471+
);
1472+
expect(reasoningStart).toMatchInlineSnapshot(`
1473+
{
1474+
"id": "reasoning-rs_456",
1475+
"providerMetadata": {
1476+
"xai": {
1477+
"itemId": "rs_456",
1478+
},
1479+
},
1480+
"type": "reasoning-start",
1481+
}
1482+
`);
1483+
1484+
const reasoningDeltas = parts.filter(
1485+
part => part.type === 'reasoning-delta',
1486+
);
1487+
expect(reasoningDeltas).toHaveLength(2);
1488+
expect(reasoningDeltas[0].delta).toBe('First');
1489+
expect(reasoningDeltas[1].delta).toBe(', analyze the question.');
1490+
1491+
const reasoningEnd = parts.find(part => part.type === 'reasoning-end');
1492+
expect(reasoningEnd).toMatchInlineSnapshot(`
1493+
{
1494+
"id": "reasoning-rs_456",
1495+
"providerMetadata": {
1496+
"xai": {
1497+
"itemId": "rs_456",
1498+
},
1499+
},
1500+
"type": "reasoning-end",
1501+
}
1502+
`);
1503+
1504+
// Verify ordering: reasoning-start < reasoning-deltas < reasoning-end < text
1505+
const startIdx = parts.findIndex(p => p.type === 'reasoning-start');
1506+
const firstDeltaIdx = parts.findIndex(
1507+
p => p.type === 'reasoning-delta',
1508+
);
1509+
const endIdx = parts.findIndex(p => p.type === 'reasoning-end');
1510+
const textIdx = parts.findIndex(p => p.type === 'text-delta');
1511+
expect(startIdx).toBeLessThan(firstDeltaIdx);
1512+
expect(firstDeltaIdx).toBeLessThan(endIdx);
1513+
expect(endIdx).toBeLessThan(textIdx);
1514+
});
1515+
12601516
it('should stream x_search tool call', async () => {
12611517
prepareChunksFixtureResponse('xai-x-search-tool');
12621518

0 commit comments

Comments
 (0)