Skip to content
Merged
7 changes: 6 additions & 1 deletion static/app/gettingStartedDocs/node/utils.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import type {
} from 'sentry/components/onboarding/gettingStartedDoc/types';
import {StepType} from 'sentry/components/onboarding/gettingStartedDoc/types';
import {t, tct} from 'sentry/locale';
import {CopyLLMPromptButton} from 'sentry/views/insights/pages/agents/llmOnboardingInstructions';

function getInstallSnippet({
params,
Expand Down Expand Up @@ -605,14 +606,18 @@ Sentry.init({
{
type: 'text',
text: tct(
'Then follow the [link:manual instrumentation guide] to instrument your AI calls.',
'Then follow the [link:manual instrumentation guide] to instrument your AI calls, or use an AI coding agent to do it for you.',
{
link: (
<ExternalLink href="https://docs.sentry.io/platforms/node/tracing/instrumentation/ai-agents-module/#manual-instrumentation" />
),
}
),
},
{
type: 'custom',
content: <CopyLLMPromptButton />,
},
];

const selected = (params.platformOptions as any)?.integration ?? 'vercel_ai';
Expand Down
7 changes: 6 additions & 1 deletion static/app/gettingStartedDocs/python/agentMonitoring.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import type {
} from 'sentry/components/onboarding/gettingStartedDoc/types';
import {StepType} from 'sentry/components/onboarding/gettingStartedDoc/types';
import {t, tct} from 'sentry/locale';
import {CopyLLMPromptButton} from 'sentry/views/insights/pages/agents/llmOnboardingInstructions';

import {getPythonInstallCodeBlock} from './utils';

Expand Down Expand Up @@ -307,14 +308,18 @@ sentry_sdk.init(
{
type: 'text',
text: tct(
'Then follow the [link:manual instrumentation guide] to instrument your AI calls.',
'Then follow the [link:manual instrumentation guide] to instrument your AI calls, or use an AI coding agent to do it for you.',
{
link: (
<ExternalLink href="https://docs.sentry.io/platforms/python/tracing/instrumentation/custom-instrumentation/ai-agents-module/" />
),
}
),
},
{
type: 'custom',
content: <CopyLLMPromptButton />,
},
],
};

Expand Down
2 changes: 2 additions & 0 deletions static/app/utils/analytics/agentMonitoringAnalyticsEvents.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ export type AgentMonitoringEventParameters = {
direction: 'asc' | 'desc';
table: string;
};
'agent-monitoring.copy-llm-prompt-click': Record<string, unknown>;
'agent-monitoring.drawer.open': Record<string, unknown>;
'agent-monitoring.drawer.span-select': Record<string, unknown>;
'agent-monitoring.drawer.view-full-trace-click': Record<string, unknown>;
Expand All @@ -25,6 +26,7 @@ export const agentMonitoringEventMap: Record<
keyof AgentMonitoringEventParameters,
string
> = {
'agent-monitoring.copy-llm-prompt-click': 'Agent Monitoring: Copy LLM Prompt Click',
'agent-monitoring.page-view': 'Agent Monitoring: Page View',
'agent-monitoring.table-switch': 'Agent Monitoring: Table Switch',
'agent-monitoring.column-sort': 'Agent Monitoring: Column Sort',
Expand Down
197 changes: 197 additions & 0 deletions static/app/views/insights/pages/agents/llmOnboardingInstructions.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,197 @@
import {Button} from 'sentry/components/core/button';
import {IconCopy} from 'sentry/icons';
import {t} from 'sentry/locale';
import {trackAnalytics} from 'sentry/utils/analytics';
import useCopyToClipboard from 'sentry/utils/useCopyToClipboard';
import useOrganization from 'sentry/utils/useOrganization';

export function CopyLLMPromptButton() {
const {copy} = useCopyToClipboard();
const organization = useOrganization();

return (
<Button
size="sm"
icon={<IconCopy />}
onClick={() => {
trackAnalytics('agent-monitoring.copy-llm-prompt-click', {
organization,
});
copy(LLM_ONBOARDING_INSTRUCTIONS, {
successMessage: t('Copied instrumentation prompt to clipboard'),
});
}}
>
{t('Copy Prompt for AI Agent')}
</Button>
);
}

const LLM_ONBOARDING_INSTRUCTIONS = `
# Instrument Sentry AI Agent Monitoring

Follow this decision tree to instrument AI Agent Monitoring.

## 1. Verify Sentry + Tracing

**Search for Sentry initialization:**
- JS/TS: \`Sentry.init\` in entry points, \`@sentry/*\` in package.json
- Python: \`sentry_sdk.init\` in entry points, \`sentry-sdk\` in requirements

**If not found:** Set up Sentry first following the official docs:
- JS/TS: https://docs.sentry.io/platforms/javascript/guides/node/
- Python: https://docs.sentry.io/platforms/python/

**Verify tracing is enabled** (REQUIRED for AI monitoring):
\`\`\`javascript
// JS - must have tracesSampleRate > 0, min SDK version 10.28.0
Sentry.init({ dsn: "...", tracesSampleRate: 1.0, sendDefaultPii: true })
\`\`\`
\`\`\`python
# Python - must have traces_sample_rate > 0
sentry_sdk.init(dsn="...", traces_sample_rate=1.0, send_default_pii=True)
\`\`\`

If missing, add \`tracesSampleRate: 1.0\` / \`traces_sample_rate=1.0\` and \`sendDefaultPii: true\` / \`send_default_pii=True\`.

## 2. Check for Supported AI Libraries

Check in this order - **use the highest-level framework found** (e.g., if using Vercel AI SDK with OpenAI provider, use Vercel integration, not OpenAI):

| Library (check in order) | JS Integration | Python Integration | Python Extra |
|--------------------------|---------------|-------------------|--------------|
| Vercel AI SDK | \`Sentry.vercelAIIntegration()\` | - | - |
| LangGraph | \`Sentry.langGraphIntegration()\` | Auto-enabled | \`sentry-sdk[langgraph]\` |
| LangChain | \`Sentry.langChainIntegration()\` | Auto-enabled | \`sentry-sdk[langchain]\` |
| OpenAI Agents | - | Auto-enabled | - |
| Pydantic AI | - | Auto-enabled | \`sentry-sdk[pydantic_ai]\` |
| LiteLLM | - | \`LiteLLMIntegration()\` | \`sentry-sdk[litellm]\` |
| OpenAI | \`Sentry.openAIIntegration()\` | Auto-enabled | - |
| Anthropic | \`Sentry.anthropicAIIntegration()\` | Auto-enabled | - |
| Google GenAI | \`Sentry.googleGenAIIntegration()\` | \`GoogleGenAIIntegration()\` | \`sentry-sdk[google_genai]\` |

**If supported library found → Step 3A**
**If no supported library → Step 3B**

## 3A. Enable Automatic Integration

### JavaScript

Add to Sentry.init integrations array with \`recordInputs\` and \`recordOutputs\`:

\`\`\`javascript
import * as Sentry from "@sentry/node";

Sentry.init({
dsn: "...",
tracesSampleRate: 1.0,
sendDefaultPii: true,
integrations: [
Sentry.openAIIntegration({ recordInputs: true, recordOutputs: true }),
// OR other integration as needed
],
});
\`\`\`

**Vercel AI SDK Extra Step:** Pass \`experimental_telemetry\` with \`functionId\` to every call:
\`\`\`javascript
const result = await generateText({
model: openai("gpt-4o"),
prompt: "Tell me a joke",
experimental_telemetry: {
isEnabled: true,
functionId: "generate-joke", // Name your functions for better tracing
recordInputs: true,
recordOutputs: true,
},
});
\`\`\`

### Python

Install with extras if needed:
\`\`\`bash
pip install sentry-sdk[langchain] # or [langgraph], [litellm], [google_genai], [pydantic_ai]
\`\`\`

Configure (some integrations auto-enable, some need explicit import):

\`\`\`python
import sentry_sdk
# Only import if NOT auto-enabled (see table above)
from sentry_sdk.integrations.openai_agents import OpenAIAgentsIntegration

sentry_sdk.init(
dsn="...",
traces_sample_rate=1.0,
send_default_pii=True, # Required to capture inputs/outputs
integrations=[
OpenAIAgentsIntegration(), # Only if explicit integration needed
],
)
\`\`\`

**Done.** SDK auto-instruments AI calls.

## 3B. Manual Instrumentation

Create spans with these exact \`op\` values and attributes:

### AI Request (LLM call)
- **op:** \`"gen_ai.request"\`
- **name:** \`"chat <model>"\`
- **Required:** \`gen_ai.request.model\`
- **Recommended:** \`gen_ai.usage.input_tokens\`, \`gen_ai.usage.output_tokens\`

\`\`\`python
with sentry_sdk.start_span(op="gen_ai.request", name=f"chat {model}") as span:
span.set_data("gen_ai.request.model", model)
result = llm.generate(messages)
span.set_data("gen_ai.usage.input_tokens", result.input_tokens)
span.set_data("gen_ai.usage.output_tokens", result.output_tokens)
span.set_data("gen_ai.usage.input_tokens.cached", result.cached_tokens)

\`\`\`

### Invoke Agent
- **op:** \`"gen_ai.invoke_agent"\`
- **name:** \`"invoke_agent <AgentName>"\`
- **Required:** \`gen_ai.request.model\`, \`gen_ai.agent.name\`

\`\`\`python
with sentry_sdk.start_span(op="gen_ai.invoke_agent", name=f"invoke_agent {agent_name}") as span:
span.set_data("gen_ai.agent.name", agent_name)
span.set_data("gen_ai.request.model", model)
result = agent.run()
\`\`\`

### Execute Tool
- **op:** \`"gen_ai.execute_tool"\`
- **name:** \`"execute_tool <tool_name>"\`
- **Required:** \`gen_ai.tool.name\`

\`\`\`python
with sentry_sdk.start_span(op="gen_ai.execute_tool", name=f"execute_tool {tool_name}") as span:
span.set_data("gen_ai.tool.name", tool_name)
span.set_data("gen_ai.tool.input", json.dumps(inputs))
result = tool(**inputs)
span.set_data("gen_ai.tool.output", json.dumps(result))
\`\`\`

### Handoff (agent-to-agent)
- **op:** \`"gen_ai.handoff"\`
- **name:** \`"handoff from <A> to <B>"\`

\`\`\`python
with sentry_sdk.start_span(op="gen_ai.handoff", name=f"handoff from {a} to {b}"):
pass
\`\`\`

## Key Rules

1. **All complex data must be JSON-stringified** - span attributes only accept primitives
2. **\`gen_ai.request.model\` is required** on \`gen_ai.request\` and \`gen_ai.invoke_agent\` spans
3. **Nest spans correctly:** \`gen_ai.invoke_agent\` spans should contain \`gen_ai.request\` and \`gen_ai.execute_tool\` spans as children
4. **JS min version:** \`@sentry/[email protected]\` or later
5. **Enable PII:** \`sendDefaultPii: true\` (JS) / \`send_default_pii=True\` (Python) to capture inputs/outputs
`;
Loading
Loading