Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
import { ChatAnthropic } from '@langchain/anthropic';
import * as Sentry from '@sentry/node';
import express from 'express';

function startMockAnthropicServer() {
const app = express();
app.use(express.json());

app.post('/v1/messages', (req, res) => {
const model = req.body.model;

res.json({
id: 'msg_truncation_test',
type: 'message',
role: 'assistant',
content: [
{
type: 'text',
text: 'Response to truncated messages',
},
],
model: model,
stop_reason: 'end_turn',
stop_sequence: null,
usage: {
input_tokens: 10,
output_tokens: 15,
},
});
});

return new Promise(resolve => {
const server = app.listen(0, () => {
resolve(server);
});
});
}

async function run() {
const server = await startMockAnthropicServer();
const baseUrl = `http://localhost:${server.address().port}`;

await Sentry.startSpan({ op: 'function', name: 'main' }, async () => {
const model = new ChatAnthropic({
model: 'claude-3-5-sonnet-20241022',
apiKey: 'mock-api-key',
clientOptions: {
baseURL: baseUrl,
},
});

const largeContent1 = 'A'.repeat(15000); // ~15KB
const largeContent2 = 'B'.repeat(15000); // ~15KB
const largeContent3 = 'C'.repeat(25000); // ~25KB (will be truncated)

// Create one very large string that gets truncated to only include Cs
await model.invoke(largeContent3 + largeContent2);

// Create an array of messages that gets truncated to only include the last message (result should again contain only Cs)
await model.invoke([
{ role: 'system', content: largeContent1 },
{ role: 'user', content: largeContent2 },
{ role: 'user', content: largeContent3 },
]);
});

await Sentry.flush(2000);

server.close();
}

run();
Original file line number Diff line number Diff line change
Expand Up @@ -194,4 +194,55 @@ describe('LangChain integration', () => {
await createRunner().ignore('event').expect({ transaction: EXPECTED_TRANSACTION_TOOL_CALLS }).start().completed();
});
});

const EXPECTED_TRANSACTION_MESSAGE_TRUNCATION = {
transaction: 'main',
spans: expect.arrayContaining([
expect.objectContaining({
data: expect.objectContaining({
'gen_ai.operation.name': 'chat',
'sentry.op': 'gen_ai.chat',
'sentry.origin': 'auto.ai.langchain',
'gen_ai.system': 'anthropic',
'gen_ai.request.model': 'claude-3-5-sonnet-20241022',
// Messages should be present and should include truncated string input (contains only Cs)
'gen_ai.request.messages': expect.stringMatching(/^\[\{"role":"user","content":"C+"\}\]$/),
}),
description: 'chat claude-3-5-sonnet-20241022',
op: 'gen_ai.chat',
origin: 'auto.ai.langchain',
status: 'ok',
}),
expect.objectContaining({
data: expect.objectContaining({
'gen_ai.operation.name': 'chat',
'sentry.op': 'gen_ai.chat',
'sentry.origin': 'auto.ai.langchain',
'gen_ai.system': 'anthropic',
'gen_ai.request.model': 'claude-3-5-sonnet-20241022',
// Messages should be present (truncation happened) and should be a JSON array of a single index (contains only Cs)
'gen_ai.request.messages': expect.stringMatching(/^\[\{"role":"user","content":"C+"\}\]$/),
}),
description: 'chat claude-3-5-sonnet-20241022',
op: 'gen_ai.chat',
origin: 'auto.ai.langchain',
status: 'ok',
}),
]),
};

createEsmAndCjsTests(
__dirname,
'scenario-message-truncation.mjs',
'instrument-with-pii.mjs',
(createRunner, test) => {
test('truncates messages when they exceed byte limit', async () => {
await createRunner()
.ignore('event')
.expect({ transaction: EXPECTED_TRANSACTION_MESSAGE_TRUNCATION })
.start()
.completed();
});
},
);
});
4 changes: 3 additions & 1 deletion packages/core/src/utils/langchain/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ import {
GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE,
GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE,
} from '../ai/gen-ai-attributes';
import { truncateGenAiMessages } from '../ai/messageTruncation';
import { LANGCHAIN_ORIGIN, ROLE_MAP } from './constants';
import type { LangChainLLMResult, LangChainMessage, LangChainSerialized } from './types';

Expand Down Expand Up @@ -281,7 +282,8 @@ export function extractChatModelRequestAttributes(

if (recordInputs && Array.isArray(langChainMessages) && langChainMessages.length > 0) {
const normalized = normalizeLangChainMessages(langChainMessages.flat());
setIfDefined(attrs, GEN_AI_REQUEST_MESSAGES_ATTRIBUTE, asString(normalized));
const truncated = truncateGenAiMessages(normalized);
setIfDefined(attrs, GEN_AI_REQUEST_MESSAGES_ATTRIBUTE, asString(truncated));
}

return attrs;
Expand Down