Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
import { ChatAnthropic } from '@langchain/anthropic';
import { HumanMessage, SystemMessage } from '@langchain/core/messages';
import { createReactAgent } from '@langchain/langgraph/prebuilt';
import * as Sentry from '@sentry/node';
import express from 'express';

function startMockAnthropicServer() {
const app = express();
app.use(express.json());

app.post('/v1/messages', (req, res) => {
const model = req.body.model;

res.json({
id: 'msg_react_agent_123',
type: 'message',
role: 'assistant',
content: [
{
type: 'text',
text: 'Paris is the capital of France.',
},
],
model: model,
stop_reason: 'end_turn',
stop_sequence: null,
usage: {
input_tokens: 20,
output_tokens: 10,
},
});
});

return new Promise(resolve => {
const server = app.listen(0, () => {
resolve(server);
});
});
}

async function run() {
const server = await startMockAnthropicServer();
const baseUrl = `http://localhost:${server.address().port}`;

await Sentry.startSpan({ op: 'function', name: 'main' }, async () => {
const llm = new ChatAnthropic({
model: 'claude-3-5-sonnet-20241022',
apiKey: 'mock-api-key',
clientOptions: {
baseURL: baseUrl,
},
});

const agent = createReactAgent({ llm, tools: [], name: 'helpful_assistant' });

await agent.invoke({
messages: [new SystemMessage('You are a helpful assistant.'), new HumanMessage('What is the capital of France?')],
});
});

await Sentry.flush(2000);
server.close();
}

run();
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
import { tool } from '@langchain/core/tools';
import { ChatAnthropic } from '@langchain/anthropic';
import { createReactAgent } from '@langchain/langgraph/prebuilt';
import { HumanMessage } from '@langchain/core/messages';
import * as Sentry from '@sentry/node';
import express from 'express';
import { z } from 'zod';

let callCount = 0;

function startMockAnthropicServer() {
const app = express();
app.use(express.json());

app.post('/v1/messages', (req, res) => {
callCount++;
const model = req.body.model;

if (callCount === 1) {
// First call: model decides to call the "add" tool
res.json({
id: 'msg_1',
type: 'message',
role: 'assistant',
content: [
{
type: 'tool_use',
id: 'toolu_add_1',
name: 'add',
input: { a: 3, b: 5 },
},
],
model: model,
stop_reason: 'tool_use',
usage: { input_tokens: 20, output_tokens: 10 },
});
} else if (callCount === 2) {
// Second call: model sees add result=8, calls "multiply"
res.json({
id: 'msg_2',
type: 'message',
role: 'assistant',
content: [
{
type: 'tool_use',
id: 'toolu_mul_1',
name: 'multiply',
input: { a: 8, b: 4 },
},
],
model: model,
stop_reason: 'tool_use',
usage: { input_tokens: 30, output_tokens: 10 },
});
} else {
// Third call: model returns final answer
res.json({
id: 'msg_3',
type: 'message',
role: 'assistant',
content: [{ type: 'text', text: 'The result is 32.' }],
model: model,
stop_reason: 'end_turn',
usage: { input_tokens: 40, output_tokens: 10 },
});
}
});

return new Promise(resolve => {
const server = app.listen(0, () => resolve(server));
});
}

async function run() {
const server = await startMockAnthropicServer();
const baseUrl = `http://localhost:${server.address().port}`;

await Sentry.startSpan({ op: 'function', name: 'main' }, async () => {
const llm = new ChatAnthropic({
model: 'claude-3-5-sonnet-20241022',
apiKey: 'mock-api-key',
clientOptions: { baseURL: baseUrl },
});

const addTool = tool(
async ({ a, b }) => {
return String(a + b);
},
{
name: 'add',
description: 'Add two numbers',
schema: z.object({ a: z.number(), b: z.number() }),
},
);

const multiplyTool = tool(
async ({ a, b }) => {
return String(a * b);
},
{
name: 'multiply',
description: 'Multiply two numbers',
schema: z.object({ a: z.number(), b: z.number() }),
},
);

const agent = createReactAgent({
llm,
tools: [addTool, multiplyTool],
name: 'math_assistant',
});

await agent.invoke({
messages: [new HumanMessage('Calculate (3 + 5) * 4')],
});
});

await Sentry.flush(2000);
server.close();
}

run();
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import * as Sentry from '@sentry/node';
import { loggingTransport } from '@sentry-internal/node-integration-tests';

Sentry.init({
dsn: 'https://public@dsn.ingest.sentry.io/1337',
release: '1.0',
tracesSampleRate: 1.0,
sendDefaultPii: true,
transport: loggingTransport,
beforeSendTransaction: event => {
// Filter out mock express server transactions
if (event.transaction && event.transaction.includes('/v1/messages')) {
return null;
}
return event;
},
});
Original file line number Diff line number Diff line change
Expand Up @@ -445,4 +445,88 @@ describe('LangGraph integration', () => {
});
},
);

// createReactAgent tests
const EXPECTED_TRANSACTION_REACT_AGENT = {
transaction: 'main',
spans: expect.arrayContaining([
// invoke_agent span (no create_agent span expected)
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'invoke_agent',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.invoke_agent',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.langgraph',
[GEN_AI_AGENT_NAME_ATTRIBUTE]: 'helpful_assistant',
[GEN_AI_PIPELINE_NAME_ATTRIBUTE]: 'helpful_assistant',
}),
description: 'invoke_agent helpful_assistant',
op: 'gen_ai.invoke_agent',
origin: 'auto.ai.langgraph',
status: 'ok',
}),
// chat span (from Anthropic integration) should be a child with inherited agent name
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_AGENT_NAME_ATTRIBUTE]: 'helpful_assistant',
}),
op: 'gen_ai.chat',
}),
]),
};

createEsmAndCjsTests(__dirname, 'agent-scenario.mjs', 'instrument-agent.mjs', (createRunner, test) => {
test('should instrument createReactAgent with agent and chat spans', { timeout: 30000 }, async () => {
await createRunner()
.ignore('event')
.expect({ transaction: EXPECTED_TRANSACTION_REACT_AGENT })
.start()
.completed();
});
});

// createReactAgent with tools - verifies tool execution spans
const EXPECTED_TRANSACTION_REACT_AGENT_TOOLS = {
transaction: 'main',
spans: expect.arrayContaining([
// invoke_agent span
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'invoke_agent',
[GEN_AI_AGENT_NAME_ATTRIBUTE]: 'math_assistant',
}),
op: 'gen_ai.invoke_agent',
status: 'ok',
}),
// execute_tool span for "add"
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'execute_tool',
'gen_ai.tool.name': 'add',
}),
description: 'execute_tool add',
op: 'gen_ai.execute_tool',
status: 'ok',
}),
// execute_tool span for "multiply"
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'execute_tool',
'gen_ai.tool.name': 'multiply',
}),
description: 'execute_tool multiply',
op: 'gen_ai.execute_tool',
status: 'ok',
}),
]),
};

createEsmAndCjsTests(__dirname, 'agent-tools-scenario.mjs', 'instrument-agent.mjs', (createRunner, test) => {
test('should create tool execution spans for createReactAgent with tools', { timeout: 30000 }, async () => {
await createRunner()
.ignore('event')
.expect({ transaction: EXPECTED_TRANSACTION_REACT_AGENT_TOOLS })
.start()
.completed();
});
});
});
1 change: 1 addition & 0 deletions packages/browser/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ export {
instrumentOpenAiClient,
instrumentGoogleGenAIClient,
instrumentLangGraph,
instrumentCreateReactAgent,
createLangChainCallbackHandler,
instrumentLangChainEmbeddings,
logger,
Expand Down
1 change: 1 addition & 0 deletions packages/cloudflare/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,7 @@ export {
withStreamedSpan,
spanStreamingIntegration,
instrumentLangGraph,
instrumentCreateReactAgent,
} from '@sentry/core';

export { withSentry } from './withSentry';
Expand Down
2 changes: 1 addition & 1 deletion packages/core/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ export type { GoogleGenAIResponse } from './tracing/google-genai/types';
export { createLangChainCallbackHandler, instrumentLangChainEmbeddings } from './tracing/langchain';
export { LANGCHAIN_INTEGRATION_NAME } from './tracing/langchain/constants';
export type { LangChainOptions, LangChainIntegration } from './tracing/langchain/types';
export { instrumentStateGraphCompile, instrumentLangGraph } from './tracing/langgraph';
export { instrumentStateGraphCompile, instrumentCreateReactAgent, instrumentLangGraph } from './tracing/langgraph';
export { LANGGRAPH_INTEGRATION_NAME } from './tracing/langgraph/constants';
export type { LangGraphOptions, LangGraphIntegration, CompiledGraph } from './tracing/langgraph/types';
export type { OpenAiClient, OpenAiOptions, InstrumentedMethod } from './tracing/openai/types';
Expand Down
12 changes: 12 additions & 0 deletions packages/core/src/tracing/ai/gen-ai-attributes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -262,3 +262,15 @@ export const GEN_AI_TOOL_OUTPUT_ATTRIBUTE = 'gen_ai.tool.output';
* @see https://opentelemetry.io/docs/specs/semconv/registry/attributes/gen-ai/#gen-ai-tool-description
*/
export const GEN_AI_TOOL_DESCRIPTION_ATTRIBUTE = 'gen_ai.tool.description';

/**
* The tool call arguments (JSON string)
* Preferred over gen_ai.tool.input
*/
export const GEN_AI_TOOL_CALL_ARGUMENTS_ATTRIBUTE = 'gen_ai.tool.call.arguments';

/**
* The tool call result
* Preferred over gen_ai.tool.output
*/
export const GEN_AI_TOOL_CALL_RESULT_ATTRIBUTE = 'gen_ai.tool.call.result';
Loading
Loading