Skip to content

Commit 062d684

Browse files
authored
ref(core): Avoid looking up anthropic-ai integration options (#17694)
This avoids looking up the integration for anthropic-ai instrumentation, instead relying on this being passed in (which it already is). When manually instrumenting the client you need to pass in the options directly. 1. Node: Options are passed from the integration to `instrumentAnthropicAiClient` anyhow, so nothing changes 2. cloudflare/vercel-edge: There is no integration, users need to manually call `instrumentAnthropicAiClient()` and pass in the options anyhow (no integration to look anything up from exists there) This required updating the tests to actually use auto instrumentation properly instead of manual client wrapping, which is overall a good change anyhow IMHO.
1 parent 804f7a7 commit 062d684

File tree

11 files changed

+235
-117
lines changed

11 files changed

+235
-117
lines changed

dev-packages/node-integration-tests/package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
"test:watch": "yarn test --watch"
2424
},
2525
"dependencies": {
26+
"@anthropic-ai/sdk": "0.63.0",
2627
"@aws-sdk/client-s3": "^3.552.0",
2728
"@google/genai": "^1.20.0",
2829
"@hapi/hapi": "^21.3.10",

dev-packages/node-integration-tests/suites/tracing/anthropic/instrument-with-options.mjs

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import * as Sentry from '@sentry/node';
2-
import { nodeContextIntegration } from '@sentry/node-core';
32
import { loggingTransport } from '@sentry-internal/node-integration-tests';
43

54
Sentry.init({
@@ -13,6 +12,12 @@ Sentry.init({
1312
recordInputs: true,
1413
recordOutputs: true,
1514
}),
16-
nodeContextIntegration(),
1715
],
16+
beforeSendTransaction: event => {
17+
// Filter out mock express server transactions
18+
if (event.transaction.includes('/anthropic/v1/')) {
19+
return null;
20+
}
21+
return event;
22+
},
1823
});
Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import * as Sentry from '@sentry/node';
2-
import { nodeContextIntegration } from '@sentry/node-core';
32
import { loggingTransport } from '@sentry-internal/node-integration-tests';
43

54
Sentry.init({
@@ -8,5 +7,11 @@ Sentry.init({
87
tracesSampleRate: 1.0,
98
sendDefaultPii: true,
109
transport: loggingTransport,
11-
integrations: [Sentry.anthropicAIIntegration(), nodeContextIntegration()],
10+
beforeSendTransaction: event => {
11+
// Filter out mock express server transactions
12+
if (event.transaction.includes('/anthropic/v1/')) {
13+
return null;
14+
}
15+
return event;
16+
},
1217
});
Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import * as Sentry from '@sentry/node';
2-
import { nodeContextIntegration } from '@sentry/node-core';
32
import { loggingTransport } from '@sentry-internal/node-integration-tests';
43

54
Sentry.init({
@@ -8,6 +7,11 @@ Sentry.init({
87
tracesSampleRate: 1.0,
98
sendDefaultPii: false,
109
transport: loggingTransport,
11-
// Force include the integration
12-
integrations: [Sentry.anthropicAIIntegration(), nodeContextIntegration()],
10+
beforeSendTransaction: event => {
11+
// Filter out mock express server transactions
12+
if (event.transaction.includes('/anthropic/v1/')) {
13+
return null;
14+
}
15+
return event;
16+
},
1317
});
Lines changed: 115 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,115 @@
1+
import { instrumentAnthropicAiClient } from '@sentry/core';
2+
import * as Sentry from '@sentry/node';
3+
4+
class MockAnthropic {
5+
constructor(config) {
6+
this.apiKey = config.apiKey;
7+
8+
// Create messages object with create and countTokens methods
9+
this.messages = {
10+
create: this._messagesCreate.bind(this),
11+
countTokens: this._messagesCountTokens.bind(this),
12+
};
13+
14+
this.models = {
15+
retrieve: this._modelsRetrieve.bind(this),
16+
};
17+
}
18+
19+
/**
20+
* Create a mock message
21+
*/
22+
async _messagesCreate(params) {
23+
// Simulate processing time
24+
await new Promise(resolve => setTimeout(resolve, 10));
25+
26+
if (params.model === 'error-model') {
27+
const error = new Error('Model not found');
28+
error.status = 404;
29+
error.headers = { 'x-request-id': 'mock-request-123' };
30+
throw error;
31+
}
32+
33+
return {
34+
id: 'msg_mock123',
35+
type: 'message',
36+
model: params.model,
37+
role: 'assistant',
38+
content: [
39+
{
40+
type: 'text',
41+
text: 'Hello from Anthropic mock!',
42+
},
43+
],
44+
stop_reason: 'end_turn',
45+
stop_sequence: null,
46+
usage: {
47+
input_tokens: 10,
48+
output_tokens: 15,
49+
},
50+
};
51+
}
52+
53+
async _messagesCountTokens() {
54+
// Simulate processing time
55+
await new Promise(resolve => setTimeout(resolve, 10));
56+
57+
// For countTokens, just return input_tokens
58+
return {
59+
input_tokens: 15,
60+
};
61+
}
62+
63+
async _modelsRetrieve(modelId) {
64+
// Simulate processing time
65+
await new Promise(resolve => setTimeout(resolve, 10));
66+
67+
// Match what the actual implementation would return
68+
return {
69+
id: modelId,
70+
name: modelId,
71+
created_at: 1715145600,
72+
model: modelId, // Add model field to match the check in addResponseAttributes
73+
};
74+
}
75+
}
76+
77+
async function run() {
78+
await Sentry.startSpan({ op: 'function', name: 'main' }, async () => {
79+
const mockClient = new MockAnthropic({
80+
apiKey: 'mock-api-key',
81+
});
82+
83+
const client = instrumentAnthropicAiClient(mockClient);
84+
85+
// First test: basic message completion
86+
await client.messages.create({
87+
model: 'claude-3-haiku-20240307',
88+
system: 'You are a helpful assistant.',
89+
messages: [{ role: 'user', content: 'What is the capital of France?' }],
90+
temperature: 0.7,
91+
max_tokens: 100,
92+
});
93+
94+
// Second test: error handling
95+
try {
96+
await client.messages.create({
97+
model: 'error-model',
98+
messages: [{ role: 'user', content: 'This will fail' }],
99+
});
100+
} catch {
101+
// Error is expected and handled
102+
}
103+
104+
// Third test: count tokens with cached tokens
105+
await client.messages.countTokens({
106+
model: 'claude-3-haiku-20240307',
107+
messages: [{ role: 'user', content: 'What is the capital of France?' }],
108+
});
109+
110+
// Fourth test: models.retrieve
111+
await client.models.retrieve('claude-3-haiku-20240307');
112+
});
113+
}
114+
115+
run();
Lines changed: 36 additions & 54 deletions
Original file line numberDiff line numberDiff line change
@@ -1,39 +1,40 @@
1-
import { instrumentAnthropicAiClient } from '@sentry/core';
1+
import Anthropic from '@anthropic-ai/sdk';
22
import * as Sentry from '@sentry/node';
3+
import express from 'express';
34

4-
class MockAnthropic {
5-
constructor(config) {
6-
this.apiKey = config.apiKey;
5+
const PORT = 3333;
76

8-
// Create messages object with create and countTokens methods
9-
this.messages = {
10-
create: this._messagesCreate.bind(this),
11-
countTokens: this._messagesCountTokens.bind(this),
12-
};
7+
function startMockAnthropicServer() {
8+
const app = express();
9+
app.use(express.json());
1310

14-
this.models = {
15-
retrieve: this._modelsRetrieve.bind(this),
16-
};
17-
}
11+
app.post('/anthropic/v1/messages/count_tokens', (req, res) => {
12+
res.send({
13+
input_tokens: 15,
14+
});
15+
});
1816

19-
/**
20-
* Create a mock message
21-
*/
22-
async _messagesCreate(params) {
23-
// Simulate processing time
24-
await new Promise(resolve => setTimeout(resolve, 10));
17+
app.get('/anthropic/v1/models/:model', (req, res) => {
18+
res.send({
19+
id: req.params.model,
20+
name: req.params.model,
21+
created_at: 1715145600,
22+
model: req.params.model,
23+
});
24+
});
25+
26+
app.post('/anthropic/v1/messages', (req, res) => {
27+
const model = req.body.model;
2528

26-
if (params.model === 'error-model') {
27-
const error = new Error('Model not found');
28-
error.status = 404;
29-
error.headers = { 'x-request-id': 'mock-request-123' };
30-
throw error;
29+
if (model === 'error-model') {
30+
res.status(404).set('x-request-id', 'mock-request-123').send('Model not found');
31+
return;
3132
}
3233

33-
return {
34+
res.send({
3435
id: 'msg_mock123',
3536
type: 'message',
36-
model: params.model,
37+
model,
3738
role: 'assistant',
3839
content: [
3940
{
@@ -47,41 +48,20 @@ class MockAnthropic {
4748
input_tokens: 10,
4849
output_tokens: 15,
4950
},
50-
};
51-
}
52-
53-
async _messagesCountTokens() {
54-
// Simulate processing time
55-
await new Promise(resolve => setTimeout(resolve, 10));
56-
57-
// For countTokens, just return input_tokens
58-
return {
59-
input_tokens: 15,
60-
};
61-
}
62-
63-
async _modelsRetrieve(modelId) {
64-
// Simulate processing time
65-
await new Promise(resolve => setTimeout(resolve, 10));
66-
67-
// Match what the actual implementation would return
68-
return {
69-
id: modelId,
70-
name: modelId,
71-
created_at: 1715145600,
72-
model: modelId, // Add model field to match the check in addResponseAttributes
73-
};
74-
}
51+
});
52+
});
53+
return app.listen(PORT);
7554
}
7655

7756
async function run() {
57+
const server = startMockAnthropicServer();
58+
7859
await Sentry.startSpan({ op: 'function', name: 'main' }, async () => {
79-
const mockClient = new MockAnthropic({
60+
const client = new Anthropic({
8061
apiKey: 'mock-api-key',
62+
baseURL: `http://localhost:${PORT}/anthropic`,
8163
});
8264

83-
const client = instrumentAnthropicAiClient(mockClient);
84-
8565
// First test: basic message completion
8666
await client.messages.create({
8767
model: 'claude-3-haiku-20240307',
@@ -110,6 +90,8 @@ async function run() {
11090
// Fourth test: models.retrieve
11191
await client.models.retrieve('claude-3-haiku-20240307');
11292
});
93+
94+
server.close();
11395
}
11496

11597
run();

dev-packages/node-integration-tests/suites/tracing/anthropic/test.ts

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -189,6 +189,16 @@ describe('Anthropic integration', () => {
189189
]),
190190
};
191191

192+
createEsmAndCjsTests(__dirname, 'scenario-manual-client.mjs', 'instrument.mjs', (createRunner, test) => {
193+
test('creates anthropic related spans when manually insturmenting client', async () => {
194+
await createRunner()
195+
.ignore('event')
196+
.expect({ transaction: EXPECTED_TRANSACTION_DEFAULT_PII_FALSE })
197+
.start()
198+
.completed();
199+
});
200+
});
201+
192202
createEsmAndCjsTests(__dirname, 'scenario.mjs', 'instrument.mjs', (createRunner, test) => {
193203
test('creates anthropic related spans with sendDefaultPii: false', async () => {
194204
await createRunner()

0 commit comments

Comments
 (0)