Skip to content

Commit ddcee8e

Browse files
committed
quick refactors
1 parent b4eedd9 commit ddcee8e

File tree

11 files changed

+336
-336
lines changed

11 files changed

+336
-336
lines changed

dev-packages/node-integration-tests/suites/tracing/langchain/instrument-with-pii.mjs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,8 @@ Sentry.init({
77
tracesSampleRate: 1.0,
88
sendDefaultPii: true,
99
transport: loggingTransport,
10+
// Filter out Anthropic integration to avoid duplicate spans with LangChain
11+
integrations: integrations => integrations.filter(integration => integration.name !== 'Anthropic_AI'),
1012
beforeSendTransaction: event => {
1113
// Filter out mock express server transactions
1214
if (event.transaction.includes('/v1/messages')) {

dev-packages/node-integration-tests/suites/tracing/langchain/instrument.mjs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,8 @@ Sentry.init({
77
tracesSampleRate: 1.0,
88
sendDefaultPii: false,
99
transport: loggingTransport,
10+
// Filter out Anthropic integration to avoid duplicate spans with LangChain
11+
integrations: integrations => integrations.filter(integration => integration.name !== 'Anthropic_AI'),
1012
beforeSendTransaction: event => {
1113
// Filter out mock express server transactions
1214
if (event.transaction.includes('/v1/messages')) {

dev-packages/node-integration-tests/suites/tracing/langchain/test.ts

Lines changed: 18 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,8 @@ describe('LangChain integration', () => {
1515
'gen_ai.operation.name': 'chat',
1616
'sentry.op': 'gen_ai.chat',
1717
'sentry.origin': 'auto.ai.langchain',
18-
'gen_ai.system': '"anthropic"',
19-
'gen_ai.request.model': '"claude-3-5-sonnet-20241022"',
18+
'gen_ai.system': 'anthropic',
19+
'gen_ai.request.model': 'claude-3-5-sonnet-20241022',
2020
'gen_ai.request.temperature': 0.7,
2121
'gen_ai.request.max_tokens': 100,
2222
'gen_ai.usage.input_tokens': 10,
@@ -26,7 +26,7 @@ describe('LangChain integration', () => {
2626
'gen_ai.response.model': expect.any(String),
2727
'gen_ai.response.stop_reason': expect.any(String),
2828
}),
29-
description: 'chat "claude-3-5-sonnet-20241022"',
29+
description: 'chat claude-3-5-sonnet-20241022',
3030
op: 'gen_ai.chat',
3131
origin: 'auto.ai.langchain',
3232
status: 'ok',
@@ -37,8 +37,8 @@ describe('LangChain integration', () => {
3737
'gen_ai.operation.name': 'chat',
3838
'sentry.op': 'gen_ai.chat',
3939
'sentry.origin': 'auto.ai.langchain',
40-
'gen_ai.system': '"anthropic"',
41-
'gen_ai.request.model': '"claude-3-opus-20240229"',
40+
'gen_ai.system': 'anthropic',
41+
'gen_ai.request.model': 'claude-3-opus-20240229',
4242
'gen_ai.request.temperature': 0.9,
4343
'gen_ai.request.top_p': 0.95,
4444
'gen_ai.request.max_tokens': 200,
@@ -49,7 +49,7 @@ describe('LangChain integration', () => {
4949
'gen_ai.response.model': expect.any(String),
5050
'gen_ai.response.stop_reason': expect.any(String),
5151
}),
52-
description: 'chat "claude-3-opus-20240229"',
52+
description: 'chat claude-3-opus-20240229',
5353
op: 'gen_ai.chat',
5454
origin: 'auto.ai.langchain',
5555
status: 'ok',
@@ -60,10 +60,10 @@ describe('LangChain integration', () => {
6060
'gen_ai.operation.name': 'chat',
6161
'sentry.op': 'gen_ai.chat',
6262
'sentry.origin': 'auto.ai.langchain',
63-
'gen_ai.system': '"anthropic"',
64-
'gen_ai.request.model': '"error-model"',
63+
'gen_ai.system': 'anthropic',
64+
'gen_ai.request.model': 'error-model',
6565
}),
66-
description: 'chat "error-model"',
66+
description: 'chat error-model',
6767
op: 'gen_ai.chat',
6868
origin: 'auto.ai.langchain',
6969
status: 'unknown_error',
@@ -80,8 +80,8 @@ describe('LangChain integration', () => {
8080
'gen_ai.operation.name': 'chat',
8181
'sentry.op': 'gen_ai.chat',
8282
'sentry.origin': 'auto.ai.langchain',
83-
'gen_ai.system': '"anthropic"',
84-
'gen_ai.request.model': '"claude-3-5-sonnet-20241022"',
83+
'gen_ai.system': 'anthropic',
84+
'gen_ai.request.model': 'claude-3-5-sonnet-20241022',
8585
'gen_ai.request.temperature': 0.7,
8686
'gen_ai.request.max_tokens': 100,
8787
'gen_ai.request.messages': expect.any(String), // Should include messages when recordInputs: true
@@ -93,7 +93,7 @@ describe('LangChain integration', () => {
9393
'gen_ai.usage.output_tokens': 15,
9494
'gen_ai.usage.total_tokens': 25,
9595
}),
96-
description: 'chat "claude-3-5-sonnet-20241022"',
96+
description: 'chat claude-3-5-sonnet-20241022',
9797
op: 'gen_ai.chat',
9898
origin: 'auto.ai.langchain',
9999
status: 'ok',
@@ -104,8 +104,8 @@ describe('LangChain integration', () => {
104104
'gen_ai.operation.name': 'chat',
105105
'sentry.op': 'gen_ai.chat',
106106
'sentry.origin': 'auto.ai.langchain',
107-
'gen_ai.system': '"anthropic"',
108-
'gen_ai.request.model': '"claude-3-opus-20240229"',
107+
'gen_ai.system': 'anthropic',
108+
'gen_ai.request.model': 'claude-3-opus-20240229',
109109
'gen_ai.request.temperature': 0.9,
110110
'gen_ai.request.top_p': 0.95,
111111
'gen_ai.request.max_tokens': 200,
@@ -118,7 +118,7 @@ describe('LangChain integration', () => {
118118
'gen_ai.usage.output_tokens': 15,
119119
'gen_ai.usage.total_tokens': 25,
120120
}),
121-
description: 'chat "claude-3-opus-20240229"',
121+
description: 'chat claude-3-opus-20240229',
122122
op: 'gen_ai.chat',
123123
origin: 'auto.ai.langchain',
124124
status: 'ok',
@@ -129,11 +129,11 @@ describe('LangChain integration', () => {
129129
'gen_ai.operation.name': 'chat',
130130
'sentry.op': 'gen_ai.chat',
131131
'sentry.origin': 'auto.ai.langchain',
132-
'gen_ai.system': '"anthropic"',
133-
'gen_ai.request.model': '"error-model"',
132+
'gen_ai.system': 'anthropic',
133+
'gen_ai.request.model': 'error-model',
134134
'gen_ai.request.messages': expect.any(String), // Should include messages when recordInputs: true
135135
}),
136-
description: 'chat "error-model"',
136+
description: 'chat error-model',
137137
op: 'gen_ai.chat',
138138
origin: 'auto.ai.langchain',
139139
status: 'unknown_error',

packages/core/src/index.ts

Lines changed: 1 addition & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -159,15 +159,7 @@ export type {
159159
GoogleGenAIOptions,
160160
GoogleGenAIIstrumentedMethod,
161161
} from './utils/google-genai/types';
162-
export type {
163-
LangChainOptions,
164-
LangChainIntegration,
165-
LangChainSerializedLLM,
166-
LangChainMessage,
167-
LangChainLLMResult,
168-
LangChainTool,
169-
LangChainDocument,
170-
} from './utils/langchain/types';
162+
export type { LangChainOptions, LangChainIntegration } from './utils/langchain/types';
171163
export type { FeatureFlag } from './utils/featureFlags';
172164

173165
export {

packages/core/src/utils/ai/gen-ai-attributes.ts

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -80,6 +80,11 @@ export const GEN_AI_RESPONSE_MODEL_ATTRIBUTE = 'gen_ai.response.model';
8080
*/
8181
export const GEN_AI_RESPONSE_ID_ATTRIBUTE = 'gen_ai.response.id';
8282

83+
/**
84+
* The reason why the model stopped generating tokens
85+
*/
86+
export const GEN_AI_RESPONSE_STOP_REASON_ATTRIBUTE = 'gen_ai.response.stop_reason';
87+
8388
/**
8489
* The number of tokens used in the prompt
8590
*/
@@ -129,6 +134,16 @@ export const GEN_AI_RESPONSE_STREAMING_ATTRIBUTE = 'gen_ai.response.streaming';
129134
*/
130135
export const GEN_AI_RESPONSE_TOOL_CALLS_ATTRIBUTE = 'gen_ai.response.tool_calls';
131136

137+
/**
138+
* The number of cache creation input tokens used
139+
*/
140+
export const GEN_AI_USAGE_CACHE_CREATION_INPUT_TOKENS_ATTRIBUTE = 'gen_ai.usage.cache_creation_input_tokens';
141+
142+
/**
143+
* The number of cache read input tokens used
144+
*/
145+
export const GEN_AI_USAGE_CACHE_READ_INPUT_TOKENS_ATTRIBUTE = 'gen_ai.usage.cache_read_input_tokens';
146+
132147
// =============================================================================
133148
// OPENAI-SPECIFIC ATTRIBUTES
134149
// =============================================================================
Lines changed: 8 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -1,24 +1,11 @@
11
export const LANGCHAIN_INTEGRATION_NAME = 'LangChain';
22
export const LANGCHAIN_ORIGIN = 'auto.ai.langchain';
33

4-
/**
5-
* LangChain event types we instrument
6-
* Based on LangChain.js callback system
7-
* @see https://js.langchain.com/docs/concepts/callbacks/
8-
*/
9-
export const LANGCHAIN_EVENT_TYPES = {
10-
CHAT_MODEL_START: 'handleChatModelStart',
11-
LLM_START: 'handleLLMStart',
12-
LLM_NEW_TOKEN: 'handleLLMNewToken',
13-
LLM_END: 'handleLLMEnd',
14-
LLM_ERROR: 'handleLLMError',
15-
CHAIN_START: 'handleChainStart',
16-
CHAIN_END: 'handleChainEnd',
17-
CHAIN_ERROR: 'handleChainError',
18-
TOOL_START: 'handleToolStart',
19-
TOOL_END: 'handleToolEnd',
20-
TOOL_ERROR: 'handleToolError',
21-
RETRIEVER_START: 'handleRetrieverStart',
22-
RETRIEVER_END: 'handleRetrieverEnd',
23-
RETRIEVER_ERROR: 'handleRetrieverError',
24-
} as const;
4+
export const ROLE_MAP: Record<string, string> = {
5+
human: 'user',
6+
ai: 'assistant',
7+
assistant: 'assistant',
8+
system: 'system',
9+
function: 'function',
10+
tool: 'tool',
11+
};

0 commit comments

Comments
 (0)