Skip to content

Commit 81f75c9

Browse files
committed
feat(core): Instrument LangChain AI
1 parent da08d49 commit 81f75c9

File tree

19 files changed

+1608
-9
lines changed

19 files changed

+1608
-9
lines changed

dev-packages/node-integration-tests/package.json

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,8 @@
2626
"@anthropic-ai/sdk": "0.63.0",
2727
"@aws-sdk/client-s3": "^3.552.0",
2828
"@google/genai": "^1.20.0",
29+
"@langchain/anthropic": "^0.3.10",
30+
"@langchain/core": "^0.3.28",
2931
"@hapi/hapi": "^21.3.10",
3032
"@hono/node-server": "^1.19.4",
3133
"@nestjs/common": "^11",
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
import * as Sentry from '@sentry/node';
2+
import { loggingTransport } from '@sentry-internal/node-integration-tests';
3+
4+
Sentry.init({
5+
dsn: 'https://public@dsn.ingest.sentry.io/1337',
6+
release: '1.0',
7+
tracesSampleRate: 1.0,
8+
sendDefaultPii: true,
9+
transport: loggingTransport,
10+
beforeSendTransaction: event => {
11+
// Filter out mock express server transactions
12+
if (event.transaction.includes('/v1/messages')) {
13+
return null;
14+
}
15+
return event;
16+
},
17+
});
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
import * as Sentry from '@sentry/node';
2+
import { loggingTransport } from '@sentry-internal/node-integration-tests';
3+
4+
Sentry.init({
5+
dsn: 'https://public@dsn.ingest.sentry.io/1337',
6+
release: '1.0',
7+
tracesSampleRate: 1.0,
8+
sendDefaultPii: false,
9+
transport: loggingTransport,
10+
beforeSendTransaction: event => {
11+
// Filter out mock express server transactions
12+
if (event.transaction.includes('/v1/messages')) {
13+
return null;
14+
}
15+
return event;
16+
},
17+
});
Lines changed: 108 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,108 @@
1+
import { ChatAnthropic } from '@langchain/anthropic';
2+
import * as Sentry from '@sentry/node';
3+
import express from 'express';
4+
5+
function startMockAnthropicServer() {
6+
const app = express();
7+
app.use(express.json());
8+
9+
app.post('/v1/messages', (req, res) => {
10+
const model = req.body.model;
11+
12+
if (model === 'error-model') {
13+
res
14+
.status(400)
15+
.set('request-id', 'mock-request-123')
16+
.json({
17+
type: 'error',
18+
error: {
19+
type: 'invalid_request_error',
20+
message: 'Model not found',
21+
},
22+
});
23+
return;
24+
}
25+
26+
// Simulate basic response
27+
res.json({
28+
id: 'msg_test123',
29+
type: 'message',
30+
role: 'assistant',
31+
content: [
32+
{
33+
type: 'text',
34+
text: 'Mock response from Anthropic!',
35+
},
36+
],
37+
model: model,
38+
stop_reason: 'end_turn',
39+
stop_sequence: null,
40+
usage: {
41+
input_tokens: 10,
42+
output_tokens: 15,
43+
},
44+
});
45+
});
46+
47+
return new Promise(resolve => {
48+
const server = app.listen(0, () => {
49+
resolve(server);
50+
});
51+
});
52+
}
53+
54+
async function run() {
55+
const server = await startMockAnthropicServer();
56+
const baseUrl = `http://localhost:${server.address().port}`;
57+
58+
await Sentry.startSpan({ op: 'function', name: 'main' }, async () => {
59+
// Test 1: Basic chat model invocation
60+
const model1 = new ChatAnthropic({
61+
model: 'claude-3-5-sonnet-20241022',
62+
temperature: 0.7,
63+
maxTokens: 100,
64+
apiKey: 'mock-api-key',
65+
clientOptions: {
66+
baseURL: baseUrl,
67+
},
68+
});
69+
70+
await model1.invoke('Tell me a joke');
71+
72+
// Test 2: Chat with different model
73+
const model2 = new ChatAnthropic({
74+
model: 'claude-3-opus-20240229',
75+
temperature: 0.9,
76+
topP: 0.95,
77+
maxTokens: 200,
78+
apiKey: 'mock-api-key',
79+
clientOptions: {
80+
baseURL: baseUrl,
81+
},
82+
});
83+
84+
await model2.invoke([
85+
{ role: 'system', content: 'You are a helpful assistant' },
86+
{ role: 'user', content: 'What is the capital of France?' },
87+
]);
88+
89+
// Test 3: Error handling
90+
const errorModel = new ChatAnthropic({
91+
model: 'error-model',
92+
apiKey: 'mock-api-key',
93+
clientOptions: {
94+
baseURL: baseUrl,
95+
},
96+
});
97+
98+
try {
99+
await errorModel.invoke('This will fail');
100+
} catch (error) {
101+
// Expected error
102+
}
103+
});
104+
105+
server.close();
106+
}
107+
108+
run();
Lines changed: 163 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,163 @@
1+
import { afterAll, describe, expect } from 'vitest';
2+
import { cleanupChildProcesses, createEsmAndCjsTests } from '../../../utils/runner';
3+
4+
describe('LangChain integration', () => {
5+
afterAll(() => {
6+
cleanupChildProcesses();
7+
});
8+
9+
const EXPECTED_TRANSACTION_DEFAULT_PII_FALSE = {
10+
transaction: 'main',
11+
spans: expect.arrayContaining([
12+
// First span - chat model with claude-3-5-sonnet
13+
expect.objectContaining({
14+
data: expect.objectContaining({
15+
'gen_ai.operation.name': 'chat',
16+
'sentry.op': 'gen_ai.chat',
17+
'sentry.origin': 'auto.ai.langchain',
18+
'gen_ai.system': '"anthropic"',
19+
'gen_ai.request.model': '"claude-3-5-sonnet-20241022"',
20+
'gen_ai.request.temperature': 0.7,
21+
'gen_ai.request.max_tokens': 100,
22+
'gen_ai.usage.input_tokens': 10,
23+
'gen_ai.usage.output_tokens': 15,
24+
'gen_ai.usage.total_tokens': 25,
25+
'gen_ai.response.id': expect.any(String),
26+
'gen_ai.response.model': expect.any(String),
27+
'gen_ai.response.stop_reason': expect.any(String),
28+
}),
29+
description: 'chat "claude-3-5-sonnet-20241022"',
30+
op: 'gen_ai.chat',
31+
origin: 'auto.ai.langchain',
32+
status: 'ok',
33+
}),
34+
// Second span - chat model with claude-3-opus
35+
expect.objectContaining({
36+
data: expect.objectContaining({
37+
'gen_ai.operation.name': 'chat',
38+
'sentry.op': 'gen_ai.chat',
39+
'sentry.origin': 'auto.ai.langchain',
40+
'gen_ai.system': '"anthropic"',
41+
'gen_ai.request.model': '"claude-3-opus-20240229"',
42+
'gen_ai.request.temperature': 0.9,
43+
'gen_ai.request.top_p': 0.95,
44+
'gen_ai.request.max_tokens': 200,
45+
'gen_ai.usage.input_tokens': 10,
46+
'gen_ai.usage.output_tokens': 15,
47+
'gen_ai.usage.total_tokens': 25,
48+
'gen_ai.response.id': expect.any(String),
49+
'gen_ai.response.model': expect.any(String),
50+
'gen_ai.response.stop_reason': expect.any(String),
51+
}),
52+
description: 'chat "claude-3-opus-20240229"',
53+
op: 'gen_ai.chat',
54+
origin: 'auto.ai.langchain',
55+
status: 'ok',
56+
}),
57+
// Third span - error handling
58+
expect.objectContaining({
59+
data: expect.objectContaining({
60+
'gen_ai.operation.name': 'chat',
61+
'sentry.op': 'gen_ai.chat',
62+
'sentry.origin': 'auto.ai.langchain',
63+
'gen_ai.system': '"anthropic"',
64+
'gen_ai.request.model': '"error-model"',
65+
}),
66+
description: 'chat "error-model"',
67+
op: 'gen_ai.chat',
68+
origin: 'auto.ai.langchain',
69+
status: 'unknown_error',
70+
}),
71+
]),
72+
};
73+
74+
const EXPECTED_TRANSACTION_DEFAULT_PII_TRUE = {
75+
transaction: 'main',
76+
spans: expect.arrayContaining([
77+
// First span - chat model with PII
78+
expect.objectContaining({
79+
data: expect.objectContaining({
80+
'gen_ai.operation.name': 'chat',
81+
'sentry.op': 'gen_ai.chat',
82+
'sentry.origin': 'auto.ai.langchain',
83+
'gen_ai.system': '"anthropic"',
84+
'gen_ai.request.model': '"claude-3-5-sonnet-20241022"',
85+
'gen_ai.request.temperature': 0.7,
86+
'gen_ai.request.max_tokens': 100,
87+
'gen_ai.request.messages': expect.any(String), // Should include messages when recordInputs: true
88+
'gen_ai.response.text': expect.any(String), // Should include response when recordOutputs: true
89+
'gen_ai.response.id': expect.any(String),
90+
'gen_ai.response.model': expect.any(String),
91+
'gen_ai.response.stop_reason': expect.any(String),
92+
'gen_ai.usage.input_tokens': 10,
93+
'gen_ai.usage.output_tokens': 15,
94+
'gen_ai.usage.total_tokens': 25,
95+
}),
96+
description: 'chat "claude-3-5-sonnet-20241022"',
97+
op: 'gen_ai.chat',
98+
origin: 'auto.ai.langchain',
99+
status: 'ok',
100+
}),
101+
// Second span - chat model with PII
102+
expect.objectContaining({
103+
data: expect.objectContaining({
104+
'gen_ai.operation.name': 'chat',
105+
'sentry.op': 'gen_ai.chat',
106+
'sentry.origin': 'auto.ai.langchain',
107+
'gen_ai.system': '"anthropic"',
108+
'gen_ai.request.model': '"claude-3-opus-20240229"',
109+
'gen_ai.request.temperature': 0.9,
110+
'gen_ai.request.top_p': 0.95,
111+
'gen_ai.request.max_tokens': 200,
112+
'gen_ai.request.messages': expect.any(String), // Should include messages when recordInputs: true
113+
'gen_ai.response.text': expect.any(String), // Should include response when recordOutputs: true
114+
'gen_ai.response.id': expect.any(String),
115+
'gen_ai.response.model': expect.any(String),
116+
'gen_ai.response.stop_reason': expect.any(String),
117+
'gen_ai.usage.input_tokens': 10,
118+
'gen_ai.usage.output_tokens': 15,
119+
'gen_ai.usage.total_tokens': 25,
120+
}),
121+
description: 'chat "claude-3-opus-20240229"',
122+
op: 'gen_ai.chat',
123+
origin: 'auto.ai.langchain',
124+
status: 'ok',
125+
}),
126+
// Third span - error handling with PII
127+
expect.objectContaining({
128+
data: expect.objectContaining({
129+
'gen_ai.operation.name': 'chat',
130+
'sentry.op': 'gen_ai.chat',
131+
'sentry.origin': 'auto.ai.langchain',
132+
'gen_ai.system': '"anthropic"',
133+
'gen_ai.request.model': '"error-model"',
134+
'gen_ai.request.messages': expect.any(String), // Should include messages when recordInputs: true
135+
}),
136+
description: 'chat "error-model"',
137+
op: 'gen_ai.chat',
138+
origin: 'auto.ai.langchain',
139+
status: 'unknown_error',
140+
}),
141+
]),
142+
};
143+
144+
createEsmAndCjsTests(__dirname, 'scenario.mjs', 'instrument.mjs', (createRunner, test) => {
145+
test('creates langchain related spans with sendDefaultPii: false', async () => {
146+
await createRunner()
147+
.ignore('event')
148+
.expect({ transaction: EXPECTED_TRANSACTION_DEFAULT_PII_FALSE })
149+
.start()
150+
.completed();
151+
});
152+
});
153+
154+
createEsmAndCjsTests(__dirname, 'scenario.mjs', 'instrument-with-pii.mjs', (createRunner, test) => {
155+
test('creates langchain related spans with sendDefaultPii: true', async () => {
156+
await createRunner()
157+
.ignore('event')
158+
.expect({ transaction: EXPECTED_TRANSACTION_DEFAULT_PII_TRUE })
159+
.start()
160+
.completed();
161+
});
162+
});
163+
});

packages/astro/src/index.server.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -93,6 +93,7 @@ export {
9393
onUncaughtExceptionIntegration,
9494
onUnhandledRejectionIntegration,
9595
openAIIntegration,
96+
langChainIntegration,
9697
parameterize,
9798
pinoIntegration,
9899
postgresIntegration,

packages/aws-serverless/src/index.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,7 @@ export {
5656
onUncaughtExceptionIntegration,
5757
onUnhandledRejectionIntegration,
5858
openAIIntegration,
59+
langChainIntegration,
5960
modulesIntegration,
6061
contextLinesIntegration,
6162
nodeContextIntegration,

packages/bun/src/index.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -76,6 +76,7 @@ export {
7676
onUncaughtExceptionIntegration,
7777
onUnhandledRejectionIntegration,
7878
openAIIntegration,
79+
langChainIntegration,
7980
modulesIntegration,
8081
contextLinesIntegration,
8182
nodeContextIntegration,

packages/core/src/index.ts

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -143,6 +143,8 @@ export { ANTHROPIC_AI_INTEGRATION_NAME } from './utils/anthropic-ai/constants';
143143
export { instrumentGoogleGenAIClient } from './utils/google-genai';
144144
export { GOOGLE_GENAI_INTEGRATION_NAME } from './utils/google-genai/constants';
145145
export type { GoogleGenAIResponse } from './utils/google-genai/types';
146+
export { createLangChainCallbackHandler } from './utils/langchain';
147+
export { LANGCHAIN_INTEGRATION_NAME } from './utils/langchain/constants';
146148
export type { OpenAiClient, OpenAiOptions, InstrumentedMethod } from './utils/openai/types';
147149
export type {
148150
AnthropicAiClient,
@@ -156,6 +158,15 @@ export type {
156158
GoogleGenAIOptions,
157159
GoogleGenAIIstrumentedMethod,
158160
} from './utils/google-genai/types';
161+
export type {
162+
LangChainOptions,
163+
LangChainIntegration,
164+
LangChainSerializedLLM,
165+
LangChainMessage,
166+
LangChainLLMResult,
167+
LangChainTool,
168+
LangChainDocument,
169+
} from './utils/langchain/types';
159170
export type { FeatureFlag } from './utils/featureFlags';
160171

161172
export {
Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
export const LANGCHAIN_INTEGRATION_NAME = 'LangChain';
2+
export const LANGCHAIN_ORIGIN = 'auto.ai.langchain';
3+
4+
/**
5+
* LangChain event types we instrument
6+
* Based on LangChain.js callback system
7+
* @see https://js.langchain.com/docs/concepts/callbacks/
8+
*/
9+
export const LANGCHAIN_EVENT_TYPES = {
10+
CHAT_MODEL_START: 'handleChatModelStart',
11+
LLM_START: 'handleLLMStart',
12+
LLM_NEW_TOKEN: 'handleLLMNewToken',
13+
LLM_END: 'handleLLMEnd',
14+
LLM_ERROR: 'handleLLMError',
15+
CHAIN_START: 'handleChainStart',
16+
CHAIN_END: 'handleChainEnd',
17+
CHAIN_ERROR: 'handleChainError',
18+
TOOL_START: 'handleToolStart',
19+
TOOL_END: 'handleToolEnd',
20+
TOOL_ERROR: 'handleToolError',
21+
RETRIEVER_START: 'handleRetrieverStart',
22+
RETRIEVER_END: 'handleRetrieverEnd',
23+
RETRIEVER_ERROR: 'handleRetrieverError',
24+
} as const;

0 commit comments

Comments
 (0)