Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions dev-packages/node-integration-tests/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
"test:watch": "yarn test --watch"
},
"dependencies": {
"@anthropic-ai/sdk": "0.63.0",
"@aws-sdk/client-s3": "^3.552.0",
"@hapi/hapi": "^21.3.10",
"@nestjs/common": "11.1.3",
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import * as Sentry from '@sentry/node';
import { nodeContextIntegration } from '@sentry/node-core';
import { loggingTransport } from '@sentry-internal/node-integration-tests';

Sentry.init({
Expand All @@ -13,6 +12,12 @@ Sentry.init({
recordInputs: true,
recordOutputs: true,
}),
nodeContextIntegration(),
],
beforeSendTransaction: event => {
// Filter out mock express server transactions
if (event.transaction.includes('/anthropic/v1/')) {
return null;
}
return event;
},
});
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import * as Sentry from '@sentry/node';
import { nodeContextIntegration } from '@sentry/node-core';
import { loggingTransport } from '@sentry-internal/node-integration-tests';

Sentry.init({
Expand All @@ -8,5 +7,11 @@ Sentry.init({
tracesSampleRate: 1.0,
sendDefaultPii: true,
transport: loggingTransport,
integrations: [Sentry.anthropicAIIntegration(), nodeContextIntegration()],
beforeSendTransaction: event => {
// Filter out mock express server transactions
if (event.transaction.includes('/anthropic/v1/')) {
return null;
}
return event;
},
});
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import * as Sentry from '@sentry/node';
import { nodeContextIntegration } from '@sentry/node-core';
import { loggingTransport } from '@sentry-internal/node-integration-tests';

Sentry.init({
Expand All @@ -8,6 +7,11 @@ Sentry.init({
tracesSampleRate: 1.0,
sendDefaultPii: false,
transport: loggingTransport,
// Force include the integration
integrations: [Sentry.anthropicAIIntegration(), nodeContextIntegration()],
beforeSendTransaction: event => {
// Filter out mock express server transactions
if (event.transaction.includes('/anthropic/v1/')) {
return null;
}
return event;
},
});
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
import { instrumentAnthropicAiClient } from '@sentry/core';
import * as Sentry from '@sentry/node';

class MockAnthropic {
constructor(config) {
this.apiKey = config.apiKey;

// Create messages object with create and countTokens methods
this.messages = {
create: this._messagesCreate.bind(this),
countTokens: this._messagesCountTokens.bind(this),
};

this.models = {
retrieve: this._modelsRetrieve.bind(this),
};
}

/**
* Create a mock message
*/
async _messagesCreate(params) {
// Simulate processing time
await new Promise(resolve => setTimeout(resolve, 10));

if (params.model === 'error-model') {
const error = new Error('Model not found');
error.status = 404;
error.headers = { 'x-request-id': 'mock-request-123' };
throw error;
}

return {
id: 'msg_mock123',
type: 'message',
model: params.model,
role: 'assistant',
content: [
{
type: 'text',
text: 'Hello from Anthropic mock!',
},
],
stop_reason: 'end_turn',
stop_sequence: null,
usage: {
input_tokens: 10,
output_tokens: 15,
},
};
}

async _messagesCountTokens() {
// Simulate processing time
await new Promise(resolve => setTimeout(resolve, 10));

// For countTokens, just return input_tokens
return {
input_tokens: 15,
};
}

async _modelsRetrieve(modelId) {
// Simulate processing time
await new Promise(resolve => setTimeout(resolve, 10));

// Match what the actual implementation would return
return {
id: modelId,
name: modelId,
created_at: 1715145600,
model: modelId, // Add model field to match the check in addResponseAttributes
};
}
}

async function run() {
await Sentry.startSpan({ op: 'function', name: 'main' }, async () => {
const mockClient = new MockAnthropic({
apiKey: 'mock-api-key',
});

const client = instrumentAnthropicAiClient(mockClient);

// First test: basic message completion
await client.messages.create({
model: 'claude-3-haiku-20240307',
system: 'You are a helpful assistant.',
messages: [{ role: 'user', content: 'What is the capital of France?' }],
temperature: 0.7,
max_tokens: 100,
});

// Second test: error handling
try {
await client.messages.create({
model: 'error-model',
messages: [{ role: 'user', content: 'This will fail' }],
});
} catch {
// Error is expected and handled
}

// Third test: count tokens with cached tokens
await client.messages.countTokens({
model: 'claude-3-haiku-20240307',
messages: [{ role: 'user', content: 'What is the capital of France?' }],
});

// Fourth test: models.retrieve
await client.models.retrieve('claude-3-haiku-20240307');
});
}

run();
Original file line number Diff line number Diff line change
@@ -1,39 +1,40 @@
import { instrumentAnthropicAiClient } from '@sentry/core';
import Anthropic from '@anthropic-ai/sdk';
import * as Sentry from '@sentry/node';
import express from 'express';

class MockAnthropic {
constructor(config) {
this.apiKey = config.apiKey;
const PORT = 3333;

// Create messages object with create and countTokens methods
this.messages = {
create: this._messagesCreate.bind(this),
countTokens: this._messagesCountTokens.bind(this),
};
function startMockAnthropicServer() {
const app = express();
app.use(express.json());

this.models = {
retrieve: this._modelsRetrieve.bind(this),
};
}
app.post('/anthropic/v1/messages/count_tokens', (req, res) => {
res.send({
input_tokens: 15,
});
});

/**
* Create a mock message
*/
async _messagesCreate(params) {
// Simulate processing time
await new Promise(resolve => setTimeout(resolve, 10));
app.get('/anthropic/v1/models/:model', (req, res) => {
res.send({
id: req.params.model,
name: req.params.model,
created_at: 1715145600,
model: req.params.model,
});
});

app.post('/anthropic/v1/messages', (req, res) => {
const model = req.body.model;

if (params.model === 'error-model') {
const error = new Error('Model not found');
error.status = 404;
error.headers = { 'x-request-id': 'mock-request-123' };
throw error;
if (model === 'error-model') {
res.status(404).set('x-request-id', 'mock-request-123').send('Model not found');
return;
}

return {
res.send({
id: 'msg_mock123',
type: 'message',
model: params.model,
model,
role: 'assistant',
content: [
{
Expand All @@ -47,41 +48,20 @@ class MockAnthropic {
input_tokens: 10,
output_tokens: 15,
},
};
}

async _messagesCountTokens() {
// Simulate processing time
await new Promise(resolve => setTimeout(resolve, 10));

// For countTokens, just return input_tokens
return {
input_tokens: 15,
};
}

async _modelsRetrieve(modelId) {
// Simulate processing time
await new Promise(resolve => setTimeout(resolve, 10));

// Match what the actual implementation would return
return {
id: modelId,
name: modelId,
created_at: 1715145600,
model: modelId, // Add model field to match the check in addResponseAttributes
};
}
});
});
return app.listen(PORT);
}

async function run() {
const server = startMockAnthropicServer();

await Sentry.startSpan({ op: 'function', name: 'main' }, async () => {
const mockClient = new MockAnthropic({
const client = new Anthropic({
apiKey: 'mock-api-key',
baseURL: `http://localhost:${PORT}/anthropic`,
});

const client = instrumentAnthropicAiClient(mockClient);

// First test: basic message completion
await client.messages.create({
model: 'claude-3-haiku-20240307',
Expand Down Expand Up @@ -110,6 +90,8 @@ async function run() {
// Fourth test: models.retrieve
await client.models.retrieve('claude-3-haiku-20240307');
});

server.close();
}

run();
Original file line number Diff line number Diff line change
Expand Up @@ -189,6 +189,16 @@ describe('Anthropic integration', () => {
]),
};

createEsmAndCjsTests(__dirname, 'scenario-manual-client.mjs', 'instrument.mjs', (createRunner, test) => {
test('creates anthropic related spans when manually insturmenting client', async () => {
await createRunner()
.ignore('event')
.expect({ transaction: EXPECTED_TRANSACTION_DEFAULT_PII_FALSE })
.start()
.completed();
});
});

createEsmAndCjsTests(__dirname, 'scenario.mjs', 'instrument.mjs', (createRunner, test) => {
test('creates anthropic related spans with sendDefaultPii: false', async () => {
await createRunner()
Expand Down
Loading