Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
import { instrumentAnthropicAiClient } from '@sentry/core';
import * as Sentry from '@sentry/node';

function createMockStreamEvents(model = 'claude-3-haiku-20240307') {
async function* generator() {
// initial message metadata with id/model and input tokens
yield {
type: 'content_block_start',
message: {
id: 'msg_stream_tool_1',
type: 'message',
role: 'assistant',
model,
content: [],
stop_reason: 'end_turn',
usage: { input_tokens: 11 },
},
};

// streamed text
yield { type: 'content_block_delta', delta: { text: 'Starting tool...' } };

// tool_use streamed via partial json
yield {
type: 'content_block_start',
index: 0,
content_block: { type: 'tool_use', id: 'tool_weather_2', name: 'weather' },
};
yield { type: 'content_block_delta', index: 0, delta: { partial_json: '{"city":' } };
yield { type: 'content_block_delta', index: 0, delta: { partial_json: '"Paris"}' } };
yield { type: 'content_block_stop', index: 0 };

// more text
yield { type: 'content_block_delta', delta: { text: 'Done.' } };

// final usage
yield { type: 'message_delta', usage: { output_tokens: 9 } };
}
return generator();
}

class MockAnthropic {
constructor(config) {
this.apiKey = config.apiKey;
this.messages = {
create: this._messagesCreate.bind(this),
stream: this._messagesStream.bind(this),
};
}

async _messagesCreate(params) {
await new Promise(resolve => setTimeout(resolve, 5));
if (params?.stream) {
return createMockStreamEvents(params.model);
}
return {
id: 'msg_mock_no_stream',
type: 'message',
model: params.model,
role: 'assistant',
content: [{ type: 'text', text: 'No stream' }],
usage: { input_tokens: 2, output_tokens: 3 },
};
}

async _messagesStream(params) {
await new Promise(resolve => setTimeout(resolve, 5));
return createMockStreamEvents(params?.model);
}
}

async function run() {
await Sentry.startSpan({ op: 'function', name: 'main' }, async () => {
const mockClient = new MockAnthropic({ apiKey: 'mock-api-key' });
const client = instrumentAnthropicAiClient(mockClient);

// stream via create(stream:true)
const stream1 = await client.messages.create({
model: 'claude-3-haiku-20240307',
messages: [{ role: 'user', content: 'Need the weather' }],
tools: [
{
name: 'weather',
description: 'Get weather',
input_schema: { type: 'object', properties: { city: { type: 'string' } }, required: ['city'] },
},
],
stream: true,
});
for await (const _ of stream1) {
void _;
}

// stream via messages.stream
const stream2 = await client.messages.stream({
model: 'claude-3-haiku-20240307',
messages: [{ role: 'user', content: 'Need the weather' }],
tools: [
{
name: 'weather',
description: 'Get weather',
input_schema: { type: 'object', properties: { city: { type: 'string' } }, required: ['city'] },
},
],
});
for await (const _ of stream2) {
void _;
}
});
}

run();
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
import { instrumentAnthropicAiClient } from '@sentry/core';
import * as Sentry from '@sentry/node';

class MockAnthropic {
constructor(config) {
this.apiKey = config.apiKey;

this.messages = {
create: this._messagesCreate.bind(this),
};
}

async _messagesCreate(params) {
await new Promise(resolve => setTimeout(resolve, 5));

return {
id: 'msg_mock_tool_1',
type: 'message',
model: params.model,
role: 'assistant',
content: [
{ type: 'text', text: 'Let me check the weather.' },
{
type: 'tool_use',
id: 'tool_weather_1',
name: 'weather',
input: { city: 'Paris' },
},
{ type: 'text', text: 'It is sunny.' },
],
stop_reason: 'end_turn',
stop_sequence: null,
usage: {
input_tokens: 5,
output_tokens: 7,
},
};
}
}

async function run() {
await Sentry.startSpan({ op: 'function', name: 'main' }, async () => {
const mockClient = new MockAnthropic({ apiKey: 'mock-api-key' });
const client = instrumentAnthropicAiClient(mockClient);

await client.messages.create({
model: 'claude-3-haiku-20240307',
messages: [{ role: 'user', content: 'What is the weather?' }],
tools: [
{
name: 'weather',
description: 'Get the weather by city',
input_schema: {
type: 'object',
properties: { city: { type: 'string' } },
required: ['city'],
},
},
],
});
});
}

run();
Original file line number Diff line number Diff line change
Expand Up @@ -293,4 +293,59 @@ describe('Anthropic integration', () => {
await createRunner().ignore('event').expect({ transaction: EXPECTED_STREAM_SPANS_PII_TRUE }).start().completed();
});
});

// Non-streaming tool calls + available tools (PII true)
createEsmAndCjsTests(__dirname, 'scenario-tools.mjs', 'instrument-with-pii.mjs', (createRunner, test) => {
test('non-streaming sets available tools and tool calls with PII', async () => {
const EXPECTED_TOOLS_JSON =
'[{"name":"weather","description":"Get the weather by city","input_schema":{"type":"object","properties":{"city":{"type":"string"}},"required":["city"]}}]';
const EXPECTED_TOOL_CALLS_JSON =
'[{"type":"tool_use","id":"tool_weather_1","name":"weather","input":{"city":"Paris"}}]';
await createRunner()
.ignore('event')
.expect({
transaction: {
spans: expect.arrayContaining([
expect.objectContaining({
op: 'gen_ai.messages',
data: expect.objectContaining({
'gen_ai.request.available_tools': EXPECTED_TOOLS_JSON,
'gen_ai.response.tool_calls': EXPECTED_TOOL_CALLS_JSON,
}),
}),
]),
},
})
.start()
.completed();
});
});

// Streaming tool calls + available tools (PII true)
createEsmAndCjsTests(__dirname, 'scenario-stream-tools.mjs', 'instrument-with-pii.mjs', (createRunner, test) => {
test('streaming sets available tools and tool calls with PII', async () => {
const EXPECTED_TOOLS_JSON =
'[{"name":"weather","description":"Get weather","input_schema":{"type":"object","properties":{"city":{"type":"string"}},"required":["city"]}}]';
const EXPECTED_TOOL_CALLS_JSON =
'[{"type":"tool_use","id":"tool_weather_2","name":"weather","input":{"city":"Paris"}}]';
await createRunner()
.ignore('event')
.expect({
transaction: {
spans: expect.arrayContaining([
expect.objectContaining({
description: expect.stringContaining('stream-response'),
op: 'gen_ai.messages',
data: expect.objectContaining({
'gen_ai.request.available_tools': EXPECTED_TOOLS_JSON,
'gen_ai.response.tool_calls': EXPECTED_TOOL_CALLS_JSON,
}),
}),
]),
},
})
.start()
.completed();
});
});
});
1 change: 1 addition & 0 deletions packages/core/src/utils/anthropic-ai/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,5 @@ export const ANTHROPIC_AI_INSTRUMENTED_METHODS = [
'models.get',
'completions.create',
'models.retrieve',
'beta.messages.create',
] as const;
21 changes: 19 additions & 2 deletions packages/core/src/utils/anthropic-ai/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import {
ANTHROPIC_AI_RESPONSE_TIMESTAMP_ATTRIBUTE,
GEN_AI_OPERATION_NAME_ATTRIBUTE,
GEN_AI_PROMPT_ATTRIBUTE,
GEN_AI_REQUEST_AVAILABLE_TOOLS_ATTRIBUTE,
GEN_AI_REQUEST_FREQUENCY_PENALTY_ATTRIBUTE,
GEN_AI_REQUEST_MAX_TOKENS_ATTRIBUTE,
GEN_AI_REQUEST_MESSAGES_ATTRIBUTE,
Expand All @@ -19,6 +20,7 @@ import {
GEN_AI_RESPONSE_ID_ATTRIBUTE,
GEN_AI_RESPONSE_MODEL_ATTRIBUTE,
GEN_AI_RESPONSE_TEXT_ATTRIBUTE,
GEN_AI_RESPONSE_TOOL_CALLS_ATTRIBUTE,
GEN_AI_SYSTEM_ATTRIBUTE,
} from '../ai/gen-ai-attributes';
import { buildMethodPath, getFinalOperationName, getSpanOperation, setTokenUsageAttributes } from '../ai/utils';
Expand All @@ -31,6 +33,7 @@ import type {
AnthropicAiOptions,
AnthropicAiResponse,
AnthropicAiStreamingEvent,
ContentBlock,
} from './types';
import { shouldInstrument } from './utils';

Expand All @@ -46,6 +49,9 @@ function extractRequestAttributes(args: unknown[], methodPath: string): Record<s

if (args.length > 0 && typeof args[0] === 'object' && args[0] !== null) {
const params = args[0] as Record<string, unknown>;
if (params.tools && Array.isArray(params.tools)) {
attributes[GEN_AI_REQUEST_AVAILABLE_TOOLS_ATTRIBUTE] = JSON.stringify(params.tools);
}

attributes[GEN_AI_REQUEST_MODEL_ATTRIBUTE] = params.model ?? 'unknown';
if ('temperature' in params) attributes[GEN_AI_REQUEST_TEMPERATURE_ATTRIBUTE] = params.temperature;
Expand Down Expand Up @@ -96,10 +102,21 @@ function addResponseAttributes(span: Span, response: AnthropicAiResponse, record
if (Array.isArray(response.content)) {
span.setAttributes({
[GEN_AI_RESPONSE_TEXT_ATTRIBUTE]: response.content
.map((item: { text: string | undefined }) => item.text)
.filter((text): text is string => text !== undefined)
.map((item: ContentBlock) => item.text)
.filter(text => !!text)
.join(''),
});

const toolCalls: Array<ContentBlock> = [];

for (const item of response.content) {
if (item.type === 'tool_use' || item.type === 'server_tool_use') {
toolCalls.push(item);
}
}
if (toolCalls.length > 0) {
span.setAttributes({ [GEN_AI_RESPONSE_TOOL_CALLS_ATTRIBUTE]: JSON.stringify(toolCalls) });
}
}
}
// Completions.create
Expand Down
Loading
Loading