diff --git a/dev-packages/e2e-tests/test-applications/nextjs-15/tests/ai-test.test.ts b/dev-packages/e2e-tests/test-applications/nextjs-15/tests/ai-test.test.ts index 8f08a4a60841..c63716a34fad 100644 --- a/dev-packages/e2e-tests/test-applications/nextjs-15/tests/ai-test.test.ts +++ b/dev-packages/e2e-tests/test-applications/nextjs-15/tests/ai-test.test.ts @@ -21,7 +21,7 @@ test('should create AI spans with correct attributes', async ({ page }) => { // TODO: For now, this is sadly not fully working - the monkey patching of the ai package is not working // because of this, only spans that are manually opted-in at call time will be captured // this may be fixed by https://github.com/vercel/ai/pull/6716 in the future - const aiPipelineSpans = spans.filter(span => span.op === 'ai.pipeline.generate_text'); + const aiPipelineSpans = spans.filter(span => span.op === 'gen_ai.invoke_agent'); const aiGenerateSpans = spans.filter(span => span.op === 'gen_ai.generate_text'); const toolCallSpans = spans.filter(span => span.op === 'gen_ai.execute_tool'); diff --git a/dev-packages/node-integration-tests/suites/tracing/vercelai/test.ts b/dev-packages/node-integration-tests/suites/tracing/vercelai/test.ts index 946e2067212b..a10d602ee93a 100644 --- a/dev-packages/node-integration-tests/suites/tracing/vercelai/test.ts +++ b/dev-packages/node-integration-tests/suites/tracing/vercelai/test.ts @@ -26,11 +26,11 @@ describe('Vercel AI integration', () => { 'gen_ai.usage.output_tokens': 20, 'gen_ai.usage.total_tokens': 30, 'operation.name': 'ai.generateText', - 'sentry.op': 'ai.pipeline.generate_text', + 'sentry.op': 'gen_ai.invoke_agent', 'sentry.origin': 'auto.vercelai.otel', }, description: 'generateText', - op: 'ai.pipeline.generate_text', + op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', status: 'ok', }), @@ -83,11 +83,11 @@ describe('Vercel AI integration', () => { 'gen_ai.usage.output_tokens': 20, 'gen_ai.usage.total_tokens': 30, 'operation.name': 'ai.generateText', - 'sentry.op': 'ai.pipeline.generate_text', + 'sentry.op': 'gen_ai.invoke_agent', 'sentry.origin': 'auto.vercelai.otel', }, description: 'generateText', - op: 'ai.pipeline.generate_text', + op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', status: 'ok', }), @@ -140,11 +140,11 @@ describe('Vercel AI integration', () => { 'gen_ai.usage.output_tokens': 25, 'gen_ai.usage.total_tokens': 40, 'operation.name': 'ai.generateText', - 'sentry.op': 'ai.pipeline.generate_text', + 'sentry.op': 'gen_ai.invoke_agent', 'sentry.origin': 'auto.vercelai.otel', }, description: 'generateText', - op: 'ai.pipeline.generate_text', + op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', status: 'ok', }), @@ -220,11 +220,11 @@ describe('Vercel AI integration', () => { 'gen_ai.usage.output_tokens': 20, 'gen_ai.usage.total_tokens': 30, 'operation.name': 'ai.generateText', - 'sentry.op': 'ai.pipeline.generate_text', + 'sentry.op': 'gen_ai.invoke_agent', 'sentry.origin': 'auto.vercelai.otel', }, description: 'generateText', - op: 'ai.pipeline.generate_text', + op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', status: 'ok', }), @@ -280,11 +280,11 @@ describe('Vercel AI integration', () => { 'gen_ai.usage.output_tokens': 20, 'gen_ai.usage.total_tokens': 30, 'operation.name': 'ai.generateText', - 'sentry.op': 'ai.pipeline.generate_text', + 'sentry.op': 'gen_ai.invoke_agent', 'sentry.origin': 'auto.vercelai.otel', }, description: 'generateText', - op: 'ai.pipeline.generate_text', + op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', status: 'ok', }), @@ -341,11 +341,11 @@ describe('Vercel AI integration', () => { 'gen_ai.usage.output_tokens': 25, 'gen_ai.usage.total_tokens': 40, 'operation.name': 'ai.generateText', - 'sentry.op': 'ai.pipeline.generate_text', + 'sentry.op': 'gen_ai.invoke_agent', 'sentry.origin': 'auto.vercelai.otel', }, description: 'generateText', - op: 'ai.pipeline.generate_text', + op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', status: 'ok', }), diff --git a/packages/node/src/integrations/tracing/vercelai/index.ts b/packages/node/src/integrations/tracing/vercelai/index.ts index 9557d62b5e04..8ba6cb5af905 100644 --- a/packages/node/src/integrations/tracing/vercelai/index.ts +++ b/packages/node/src/integrations/tracing/vercelai/index.ts @@ -102,7 +102,7 @@ const _vercelAIIntegration = ((options: VercelAiOptions = {}) => { // Generate Spans if (name === 'ai.generateText') { - span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.pipeline.generate_text'); + span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.invoke_agent'); return; } @@ -113,7 +113,7 @@ const _vercelAIIntegration = ((options: VercelAiOptions = {}) => { } if (name === 'ai.streamText') { - span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.pipeline.stream_text'); + span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.invoke_agent'); return; } @@ -124,7 +124,7 @@ const _vercelAIIntegration = ((options: VercelAiOptions = {}) => { } if (name === 'ai.generateObject') { - span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.pipeline.generate_object'); + span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.invoke_agent'); return; } @@ -135,7 +135,7 @@ const _vercelAIIntegration = ((options: VercelAiOptions = {}) => { } if (name === 'ai.streamObject') { - span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.pipeline.stream_object'); + span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.invoke_agent'); return; } @@ -146,7 +146,7 @@ const _vercelAIIntegration = ((options: VercelAiOptions = {}) => { } if (name === 'ai.embed') { - span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.pipeline.embed'); + span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.invoke_agent'); return; } @@ -157,7 +157,7 @@ const _vercelAIIntegration = ((options: VercelAiOptions = {}) => { } if (name === 'ai.embedMany') { - span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.pipeline.embed_many'); + span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.invoke_agent'); return; }