From bdc354336053b87c6a7092c27fa7949da30f4fe4 Mon Sep 17 00:00:00 2001 From: Abhijeet Prasad Date: Mon, 7 Oct 2024 08:58:23 -0400 Subject: [PATCH 1/5] feat(node): Add @vercel/ai instrumentation --- .../node-integration-tests/package.json | 1 + .../suites/tracing/ai/scenario.js | 59 ++++++ .../suites/tracing/ai/test.ts | 129 ++++++++++++ .../node/src/integrations/tracing/index.ts | 3 + .../integrations/tracing/vercelai/index.ts | 195 ++++++++++++++++++ .../tracing/vercelai/instrumentation.ts | 81 ++++++++ .../integrations/tracing/vercelai/types.ts | 44 ++++ yarn.lock | 118 ++++++++++- 8 files changed, 624 insertions(+), 6 deletions(-) create mode 100644 dev-packages/node-integration-tests/suites/tracing/ai/scenario.js create mode 100644 dev-packages/node-integration-tests/suites/tracing/ai/test.ts create mode 100644 packages/node/src/integrations/tracing/vercelai/index.ts create mode 100644 packages/node/src/integrations/tracing/vercelai/instrumentation.ts create mode 100644 packages/node/src/integrations/tracing/vercelai/types.ts diff --git a/dev-packages/node-integration-tests/package.json b/dev-packages/node-integration-tests/package.json index f58bef2a0e45..21ae4f35d14c 100644 --- a/dev-packages/node-integration-tests/package.json +++ b/dev-packages/node-integration-tests/package.json @@ -38,6 +38,7 @@ "@types/mongodb": "^3.6.20", "@types/mysql": "^2.15.21", "@types/pg": "^8.6.5", + "ai": "^4.0.6", "amqplib": "^0.10.4", "apollo-server": "^3.11.1", "axios": "^1.7.7", diff --git a/dev-packages/node-integration-tests/suites/tracing/ai/scenario.js b/dev-packages/node-integration-tests/suites/tracing/ai/scenario.js new file mode 100644 index 000000000000..43747cf1986c --- /dev/null +++ b/dev-packages/node-integration-tests/suites/tracing/ai/scenario.js @@ -0,0 +1,59 @@ +const { loggingTransport } = require('@sentry-internal/node-integration-tests'); +const Sentry = require('@sentry/node'); + +Sentry.init({ + debug: true, + dsn: 'https://public@dsn.ingest.sentry.io/1337', + release: '1.0', + tracesSampleRate: 1.0, + transport: loggingTransport, +}); + +const { generateText } = require('ai'); +const { MockLanguageModelV1 } = require('ai/test'); + +async function run() { + await Sentry.startSpan({ op: 'function', name: 'main' }, async () => { + await generateText({ + model: new MockLanguageModelV1({ + doGenerate: async () => ({ + rawCall: { rawPrompt: null, rawSettings: {} }, + finishReason: 'stop', + usage: { promptTokens: 10, completionTokens: 20 }, + text: 'First span here!', + }), + }), + prompt: 'Where is the first span?', + }); + + // This span should have input and output prompts attached because telemetry is explicitly enabled. + await generateText({ + experimental_telemetry: { isEnabled: true }, + model: new MockLanguageModelV1({ + doGenerate: async () => ({ + rawCall: { rawPrompt: null, rawSettings: {} }, + finishReason: 'stop', + usage: { promptTokens: 10, completionTokens: 20 }, + text: 'Second span here!', + }), + }), + prompt: 'Where is the second span?', + }); + + // This span should not be captured because we've disabled telemetry + await generateText({ + experimental_telemetry: { isEnabled: false }, + model: new MockLanguageModelV1({ + doGenerate: async () => ({ + rawCall: { rawPrompt: null, rawSettings: {} }, + finishReason: 'stop', + usage: { promptTokens: 10, completionTokens: 20 }, + text: 'Third span here!', + }), + }), + prompt: 'Where is the third span?', + }); + }); +} + +run(); diff --git a/dev-packages/node-integration-tests/suites/tracing/ai/test.ts b/dev-packages/node-integration-tests/suites/tracing/ai/test.ts new file mode 100644 index 000000000000..bd9fb6890441 --- /dev/null +++ b/dev-packages/node-integration-tests/suites/tracing/ai/test.ts @@ -0,0 +1,129 @@ +import { cleanupChildProcesses, createRunner } from '../../../utils/runner'; + +describe('ai', () => { + afterAll(() => { + cleanupChildProcesses(); + }); + + test('creates ai related spans', done => { + const EXPECTED_TRANSACTION = { + transaction: 'main', + spans: expect.arrayContaining([ + expect.objectContaining({ + data: expect.objectContaining({ + 'ai.completion_tokens.used': 20, + 'ai.model.id': 'mock-model-id', + 'ai.model.provider': 'mock-provider', + 'ai.model_id': 'mock-model-id', + 'ai.operationId': 'ai.generateText', + 'ai.pipeline.name': 'generateText', + 'ai.prompt_tokens.used': 10, + 'ai.response.finishReason': 'stop', + 'ai.settings.maxRetries': 2, + 'ai.settings.maxSteps': 1, + 'ai.streaming': false, + 'ai.tokens.used': 30, + 'ai.usage.completionTokens': 20, + 'ai.usage.promptTokens': 10, + 'operation.name': 'ai.generateText', + 'sentry.op': 'ai.pipeline.generateText', + 'sentry.origin': 'auto.vercelai.otel', + }), + description: 'generateText', + op: 'ai.pipeline.generateText', + origin: 'auto.vercelai.otel', + status: 'ok', + }), + expect.objectContaining({ + data: expect.objectContaining({ + 'sentry.origin': 'auto.vercelai.otel', + 'sentry.op': 'ai.run.doGenerate', + 'operation.name': 'ai.generateText.doGenerate', + 'ai.operationId': 'ai.generateText.doGenerate', + 'ai.model.provider': 'mock-provider', + 'ai.model.id': 'mock-model-id', + 'ai.settings.maxRetries': 2, + 'gen_ai.system': 'mock-provider', + 'gen_ai.request.model': 'mock-model-id', + 'ai.pipeline.name': 'generateText.doGenerate', + 'ai.model_id': 'mock-model-id', + 'ai.streaming': false, + 'ai.response.finishReason': 'stop', + 'ai.response.model': 'mock-model-id', + 'ai.usage.promptTokens': 10, + 'ai.usage.completionTokens': 20, + 'gen_ai.response.finish_reasons': ['stop'], + 'gen_ai.usage.input_tokens': 10, + 'gen_ai.usage.output_tokens': 20, + 'ai.completion_tokens.used': 20, + 'ai.prompt_tokens.used': 10, + 'ai.tokens.used': 30, + }), + description: 'generateText.doGenerate', + op: 'ai.run.doGenerate', + origin: 'auto.vercelai.otel', + status: 'ok', + }), + expect.objectContaining({ + data: expect.objectContaining({ + 'ai.completion_tokens.used': 20, + 'ai.model.id': 'mock-model-id', + 'ai.model.provider': 'mock-provider', + 'ai.model_id': 'mock-model-id', + 'ai.prompt': '{"prompt":"Where is the second span?"}', + 'ai.operationId': 'ai.generateText', + 'ai.pipeline.name': 'generateText', + 'ai.prompt_tokens.used': 10, + 'ai.response.finishReason': 'stop', + 'ai.input_messages': '{"prompt":"Where is the second span?"}', + 'ai.settings.maxRetries': 2, + 'ai.settings.maxSteps': 1, + 'ai.streaming': false, + 'ai.tokens.used': 30, + 'ai.usage.completionTokens': 20, + 'ai.usage.promptTokens': 10, + 'operation.name': 'ai.generateText', + 'sentry.op': 'ai.pipeline.generateText', + 'sentry.origin': 'auto.vercelai.otel', + }), + description: 'generateText', + op: 'ai.pipeline.generateText', + origin: 'auto.vercelai.otel', + status: 'ok', + }), + expect.objectContaining({ + data: expect.objectContaining({ + 'sentry.origin': 'auto.vercelai.otel', + 'sentry.op': 'ai.run.doGenerate', + 'operation.name': 'ai.generateText.doGenerate', + 'ai.operationId': 'ai.generateText.doGenerate', + 'ai.model.provider': 'mock-provider', + 'ai.model.id': 'mock-model-id', + 'ai.settings.maxRetries': 2, + 'gen_ai.system': 'mock-provider', + 'gen_ai.request.model': 'mock-model-id', + 'ai.pipeline.name': 'generateText.doGenerate', + 'ai.model_id': 'mock-model-id', + 'ai.streaming': false, + 'ai.response.finishReason': 'stop', + 'ai.response.model': 'mock-model-id', + 'ai.usage.promptTokens': 10, + 'ai.usage.completionTokens': 20, + 'gen_ai.response.finish_reasons': ['stop'], + 'gen_ai.usage.input_tokens': 10, + 'gen_ai.usage.output_tokens': 20, + 'ai.completion_tokens.used': 20, + 'ai.prompt_tokens.used': 10, + 'ai.tokens.used': 30, + }), + description: 'generateText.doGenerate', + op: 'ai.run.doGenerate', + origin: 'auto.vercelai.otel', + status: 'ok', + }), + ]), + }; + + createRunner(__dirname, 'scenario.js').expect({ transaction: EXPECTED_TRANSACTION }).start(done); + }); +}); diff --git a/packages/node/src/integrations/tracing/index.ts b/packages/node/src/integrations/tracing/index.ts index c8f6348aeee0..03a0217438a5 100644 --- a/packages/node/src/integrations/tracing/index.ts +++ b/packages/node/src/integrations/tracing/index.ts @@ -19,6 +19,7 @@ import { instrumentNest, nestIntegration } from './nest/nest'; import { instrumentPostgres, postgresIntegration } from './postgres'; import { instrumentRedis, redisIntegration } from './redis'; import { instrumentTedious, tediousIntegration } from './tedious'; +import { instrumentVercelAi, vercelAIIntegration } from './vercelai'; /** * With OTEL, all performance integrations will be added, as OTEL only initializes them when the patched package is actually required. @@ -48,6 +49,7 @@ export function getAutoPerformanceIntegrations(): Integration[] { kafkaIntegration(), amqplibIntegration(), lruMemoizerIntegration(), + vercelAIIntegration(), ]; } @@ -78,5 +80,6 @@ export function getOpenTelemetryInstrumentationToPreload(): (((options?: any) => instrumentTedious, instrumentGenericPool, instrumentAmqplib, + instrumentVercelAi, ]; } diff --git a/packages/node/src/integrations/tracing/vercelai/index.ts b/packages/node/src/integrations/tracing/vercelai/index.ts new file mode 100644 index 000000000000..38eeadf2e4e2 --- /dev/null +++ b/packages/node/src/integrations/tracing/vercelai/index.ts @@ -0,0 +1,195 @@ +/* eslint-disable complexity */ +import { SEMANTIC_ATTRIBUTE_SENTRY_OP, defineIntegration, spanToJSON } from '@sentry/core'; +import type { IntegrationFn } from '@sentry/types'; +import { generateInstrumentOnce } from '../../../otel/instrument'; +import { addOriginToSpan } from '../../../utils/addOriginToSpan'; +import { SentryVercelAiInstrumentation } from './instrumentation'; + +const sentryVercelAiInstance = new SentryVercelAiInstrumentation({}); + +export const instrumentVercelAi = generateInstrumentOnce('vercelAI', () => sentryVercelAiInstance); + +const _vercelAIIntegration = (() => { + return { + name: 'vercelAI', + setupOnce() { + instrumentVercelAi(); + }, + preprocessEvent(event) { + if (!sentryVercelAiInstance.patchIsActive) { + return; + } + + if (event.type === 'transaction' && event.spans?.length) { + for (const span of event.spans) { + const { data: attributes, description: name } = span; + + if (!attributes || !name || span.origin !== 'auto.vercelai.otel') { + continue; + } + + // attributes around token usage can only be set on span finish + span.data = span.data || {}; + + if (attributes['ai.usage.completionTokens'] != undefined) { + span.data['ai.completion_tokens.used'] = attributes['ai.usage.completionTokens']; + } + if (attributes['ai.usage.promptTokens'] != undefined) { + span.data['ai.prompt_tokens.used'] = attributes['ai.usage.promptTokens']; + } + if ( + attributes['ai.usage.completionTokens'] != undefined && + attributes['ai.usage.promptTokens'] != undefined + ) { + span.data['ai.tokens.used'] = attributes['ai.usage.completionTokens'] + attributes['ai.usage.promptTokens']; + } + } + } + }, + setup(client) { + client.on('spanStart', span => { + if (!sentryVercelAiInstance.patchIsActive) { + return; + } + + const { data: attributes, description: name } = spanToJSON(span); + + if (!attributes || !name) { + return; + } + + // The id of the model + const aiModelId: string | undefined = attributes['ai.model.id']; + + // the provider of the model + const aiModelProvider: string | undefined = attributes['ai.model.provider']; + + // both of these must be defined for the integration to work + if (!aiModelId || !aiModelProvider) { + return; + } + + let isPipelineSpan = false; + + switch (name) { + case 'ai.generateText': { + span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.pipeline.generateText'); + isPipelineSpan = true; + break; + } + case 'ai.generateText.doGenerate': { + span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.run.doGenerate'); + break; + } + case 'ai.streamText': { + span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.pipeline.streamText'); + isPipelineSpan = true; + break; + } + case 'ai.streamText.doStream': { + span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.run.doStream'); + break; + } + case 'ai.generateObject': { + span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.pipeline.generateObject'); + isPipelineSpan = true; + break; + } + case 'ai.generateObject.doGenerate': { + span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.run.doGenerate'); + break; + } + case 'ai.streamObject': { + span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.pipeline.streamObject'); + isPipelineSpan = true; + break; + } + case 'ai.streamObject.doStream': { + span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.run.doStream'); + break; + } + case 'ai.embed': { + span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.pipeline.embed'); + isPipelineSpan = true; + break; + } + case 'ai.embed.doEmbed': { + span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.embeddings'); + break; + } + case 'ai.embedMany': { + span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.pipeline.embedMany'); + isPipelineSpan = true; + break; + } + case 'ai.embedMany.doEmbed': { + span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.embeddings'); + break; + } + case 'ai.toolCall': + case 'ai.stream.firstChunk': + case 'ai.stream.finish': + span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.run'); + break; + } + + addOriginToSpan(span, 'auto.vercelai.otel'); + + const nameWthoutAi = name.replace('ai.', ''); + span.setAttribute('ai.pipeline.name', nameWthoutAi); + span.updateName(nameWthoutAi); + + // If a Telemetry name is set and it is a pipeline span, use that as the operation name + if (attributes['ai.telemetry.functionId'] && isPipelineSpan) { + span.updateName(attributes['ai.telemetry.functionId']); + span.setAttribute('ai.pipeline.name', attributes['ai.telemetry.functionId']); + } + + if (attributes['ai.prompt']) { + span.setAttribute('ai.input_messages', attributes['ai.prompt']); + } + if (attributes['ai.model.id']) { + span.setAttribute('ai.model_id', attributes['ai.model.id']); + } + span.setAttribute('ai.streaming', name.includes('stream')); + }); + }, + }; +}) satisfies IntegrationFn; + +/** + * Adds Sentry tracing instrumentation for the [ai](https://www.npmjs.com/package/ai) library. + * + * For more information, see the [`ai` documentation](https://sdk.vercel.ai/docs/ai-sdk-core/telemetry). + * + * @example + * ```javascript + * const Sentry = require('@sentry/node'); + * + * Sentry.init({ + * integrations: [Sentry.vercelAIIntegration()], + * }); + * ``` + * + * By default this integration adds tracing support to all `ai` function calls. If you need to disable + * collecting spans for a specific call, you can do so by setting `experimental_telemetry.isEnabled` to + * `false` in the first argument of the function call. + * + * ```javascript + * const result = await generateText({ + * model: openai('gpt-4-turbo'), + * experimental_telemetry: { isEnabled: false }, + * }); + * ``` + * + * If you want to collect inputs and outputs for a specific call, you must specifically opt-in to each + * function call by setting `experimental_telemetry.recordInputs` and `experimental_telemetry.recordOutputs` + * to `true`. + * + * ```javascript + * const result = await generateText({ + * model: openai('gpt-4-turbo'), + * experimental_telemetry: { isEnabled: true, recordInputs: true, recordOutputs: true }, + * }); + */ +export const vercelAIIntegration = defineIntegration(_vercelAIIntegration); diff --git a/packages/node/src/integrations/tracing/vercelai/instrumentation.ts b/packages/node/src/integrations/tracing/vercelai/instrumentation.ts new file mode 100644 index 000000000000..5d5fab46a18d --- /dev/null +++ b/packages/node/src/integrations/tracing/vercelai/instrumentation.ts @@ -0,0 +1,81 @@ +import { InstrumentationBase, InstrumentationNodeModuleDefinition } from '@opentelemetry/instrumentation'; +import type { InstrumentationConfig, InstrumentationModuleDefinition } from '@opentelemetry/instrumentation'; +import { SDK_VERSION } from '@sentry/core'; +import type { TelemetrySettings } from './types'; + +// List of patched methods +// From: https://sdk.vercel.ai/docs/ai-sdk-core/telemetry#collected-data +const INSTRUMENTED_METHODS = [ + 'generateText', + 'streamText', + 'generateObject', + 'streamObject', + 'embed', + 'embedMany', +] as const; + +interface MethodFirstArg extends Record { + experimental_telemetry?: TelemetrySettings; +} + +type MethodArgs = [MethodFirstArg, ...unknown[]]; + +type PatchedModuleExports = Record<(typeof INSTRUMENTED_METHODS)[number], (...args: MethodArgs) => unknown> & + Record; + +/** + * This detects is added by the Sentry Vercel AI Integration to detect if the integration should + * be enabled. + * + * It also patches the `ai` module to enable Vercel AI telemetry automatically for all methods. + */ +export class SentryVercelAiInstrumentation extends InstrumentationBase { + public patchIsActive: boolean = false; + + public constructor(config: InstrumentationConfig = {}) { + super('sentry-vercel-ai', SDK_VERSION, config); + } + + /** + * Initializes the instrumentation by defining the modules to be patched. + */ + public init(): InstrumentationModuleDefinition { + const module = new InstrumentationNodeModuleDefinition('ai', ['>=3.0.0 <5'], this._patch.bind(this)); + return module; + } + + /** + * Patches module exports to enable Vercel AI telemetry. + */ + private _patch(moduleExports: PatchedModuleExports): unknown { + this.patchIsActive = true; + + function generatePatch(name: string) { + return (...args: MethodArgs) => { + const existingExperimentalTelemetry = args[0].experimental_telemetry || {}; + const isEnabled = existingExperimentalTelemetry.isEnabled; + + // if `isEnabled` is not explicitly set to `true` or `false`, enable telemetry + // but disable capturing inputs and outputs by default + if (isEnabled === undefined) { + args[0].experimental_telemetry = { + isEnabled: true, + recordInputs: false, + recordOutputs: false, + ...existingExperimentalTelemetry, + }; + } + + // @ts-expect-error we know that the method exists + return moduleExports[name].apply(this, args); + }; + } + + const patchedModuleExports = INSTRUMENTED_METHODS.reduce((acc, curr) => { + acc[curr] = generatePatch(curr); + return acc; + }, {} as PatchedModuleExports); + + return { ...moduleExports, ...patchedModuleExports }; + } +} diff --git a/packages/node/src/integrations/tracing/vercelai/types.ts b/packages/node/src/integrations/tracing/vercelai/types.ts new file mode 100644 index 000000000000..8773f84d52c6 --- /dev/null +++ b/packages/node/src/integrations/tracing/vercelai/types.ts @@ -0,0 +1,44 @@ +/** + * Telemetry configuration. + */ +export type TelemetrySettings = { + /** + * Enable or disable telemetry. Disabled by default while experimental. + */ + isEnabled?: boolean; + /** + * Enable or disable input recording. Enabled by default. + * + * You might want to disable input recording to avoid recording sensitive + * information, to reduce data transfers, or to increase performance. + */ + recordInputs?: boolean; + /** + * Enable or disable output recording. Enabled by default. + * + * You might want to disable output recording to avoid recording sensitive + * information, to reduce data transfers, or to increase performance. + */ + recordOutputs?: boolean; + /** + * Identifier for this function. Used to group telemetry data by function. + */ + functionId?: string; + /** + * Additional information to include in the telemetry data. + */ + metadata?: Record; +}; + +/** + * Attribute values may be any non-nullish primitive value except an object. + * + * null or undefined attribute values are invalid and will result in undefined behavior. + */ +export declare type AttributeValue = + | string + | number + | boolean + | Array + | Array + | Array; diff --git a/yarn.lock b/yarn.lock index 320f57f45de4..962e62fc4748 100644 --- a/yarn.lock +++ b/yarn.lock @@ -94,6 +94,42 @@ resolved "https://registry.yarnpkg.com/@adobe/css-tools/-/css-tools-4.4.0.tgz#728c484f4e10df03d5a3acd0d8adcbbebff8ad63" integrity sha512-Ff9+ksdQQB3rMncgqDK78uLznstjyfIf2Arnh22pW8kBpLs6rpKDwgnZT46hin5Hl1WzazzK64DOrhSwYpS7bQ== +"@ai-sdk/provider-utils@2.0.2": + version "2.0.2" + resolved "https://registry.yarnpkg.com/@ai-sdk/provider-utils/-/provider-utils-2.0.2.tgz#ea9d510be442b38bd40ae50dbf5b64ffc396952b" + integrity sha512-IAvhKhdlXqiSmvx/D4uNlFYCl8dWT+M9K+IuEcSgnE2Aj27GWu8sDIpAf4r4Voc+wOUkOECVKQhFo8g9pozdjA== + dependencies: + "@ai-sdk/provider" "1.0.1" + eventsource-parser "^3.0.0" + nanoid "^3.3.7" + secure-json-parse "^2.7.0" + +"@ai-sdk/provider@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@ai-sdk/provider/-/provider-1.0.1.tgz#8172a3cbbfa61bb40b88512165f70fe3c186cb60" + integrity sha512-mV+3iNDkzUsZ0pR2jG0sVzU6xtQY5DtSCBy3JFycLp6PwjyLw/iodfL3MwdmMCRJWgs3dadcHejRnMvF9nGTBg== + dependencies: + json-schema "^0.4.0" + +"@ai-sdk/react@1.0.3": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@ai-sdk/react/-/react-1.0.3.tgz#b9bc24e20bdc5768cbb0d9c65471fb60ab2675ec" + integrity sha512-Mak7qIRlbgtP4I7EFoNKRIQTlABJHhgwrN8SV2WKKdmsfWK2RwcubQWz1hp88cQ0bpF6KxxjSY1UUnS/S9oR5g== + dependencies: + "@ai-sdk/provider-utils" "2.0.2" + "@ai-sdk/ui-utils" "1.0.2" + swr "^2.2.5" + throttleit "2.1.0" + +"@ai-sdk/ui-utils@1.0.2": + version "1.0.2" + resolved "https://registry.yarnpkg.com/@ai-sdk/ui-utils/-/ui-utils-1.0.2.tgz#2b5ad527f821b055663ddc60f2c45a82956091a0" + integrity sha512-hHrUdeThGHu/rsGZBWQ9PjrAU9Htxgbo9MFyR5B/aWoNbBeXn1HLMY1+uMEnXL5pRPlmyVRjgIavWg7UgeNDOw== + dependencies: + "@ai-sdk/provider" "1.0.1" + "@ai-sdk/provider-utils" "2.0.2" + zod-to-json-schema "^3.23.5" + "@ampproject/remapping@2.2.0": version "2.2.0" resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.0.tgz#56c133824780de3174aed5ab6834f3026790154d" @@ -7520,6 +7556,11 @@ dependencies: "@opentelemetry/api" "^1.3.0" +"@opentelemetry/api@1.9.0", "@opentelemetry/api@^1.0.0", "@opentelemetry/api@^1.3.0", "@opentelemetry/api@^1.8", "@opentelemetry/api@^1.9.0": + version "1.9.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.9.0.tgz#d03eba68273dc0f7509e2a3d5cba21eae10379fe" + integrity sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg== + "@opentelemetry/api@^0.12.0": version "0.12.0" resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-0.12.0.tgz#0359c3926e8f16fdcd8c78f196bd1e9fc4e66777" @@ -7527,11 +7568,6 @@ dependencies: "@opentelemetry/context-base" "^0.12.0" -"@opentelemetry/api@^1.0.0", "@opentelemetry/api@^1.3.0", "@opentelemetry/api@^1.8", "@opentelemetry/api@^1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.9.0.tgz#d03eba68273dc0f7509e2a3d5cba21eae10379fe" - integrity sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg== - "@opentelemetry/context-async-hooks@^1.25.1": version "1.25.1" resolved "https://registry.yarnpkg.com/@opentelemetry/context-async-hooks/-/context-async-hooks-1.25.1.tgz#810bff2fcab84ec51f4684aff2d21f6c057d9e73" @@ -9637,6 +9673,11 @@ dependencies: "@types/ms" "*" +"@types/diff-match-patch@^1.0.36": + version "1.0.36" + resolved "https://registry.yarnpkg.com/@types/diff-match-patch/-/diff-match-patch-1.0.36.tgz#dcef10a69d357fe9d43ac4ff2eca6b85dbf466af" + integrity sha512-xFdR6tkm0MWvBfO8xXCSsinYxHcqkQUlcHeSpMC2ukzOb6lwQAfDmW+Qt0AvlGd8HpsS28qKsB+oPeJn9I39jg== + "@types/duplexify@^3.6.0": version "3.6.0" resolved "https://registry.yarnpkg.com/@types/duplexify/-/duplexify-3.6.0.tgz#dfc82b64bd3a2168f5bd26444af165bf0237dcd8" @@ -11670,6 +11711,19 @@ aggregate-error@^3.0.0: clean-stack "^2.0.0" indent-string "^4.0.0" +ai@^4.0.6: + version "4.0.6" + resolved "https://registry.yarnpkg.com/ai/-/ai-4.0.6.tgz#94ef793df8525c01043e15a60030ce88d7b5c7d5" + integrity sha512-TD7fH0LymjIYWmdQViB5SoBb1iuuDPOZ7RMU3W9r4SeUf68RzWyixz118QHQTENNqPiGA6vs5NDVAmZOnhzqYA== + dependencies: + "@ai-sdk/provider" "1.0.1" + "@ai-sdk/provider-utils" "2.0.2" + "@ai-sdk/react" "1.0.3" + "@ai-sdk/ui-utils" "1.0.2" + "@opentelemetry/api" "1.9.0" + jsondiffpatch "0.6.0" + zod-to-json-schema "^3.23.5" + ajv-formats@2.1.1, ajv-formats@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/ajv-formats/-/ajv-formats-2.1.1.tgz#6e669400659eb74973bbf2e33327180a0996b520" @@ -14436,7 +14490,7 @@ cli-width@^3.0.0: resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-3.0.0.tgz#a2f48437a2caa9a22436e794bf071ec9e61cedf6" integrity sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw== -client-only@0.0.1: +client-only@0.0.1, client-only@^0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/client-only/-/client-only-0.0.1.tgz#38bba5d403c41ab150bff64a95c85013cf73bca1" integrity sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA== @@ -16002,6 +16056,11 @@ devlop@^1.0.0: dependencies: dequal "^2.0.0" +diff-match-patch@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/diff-match-patch/-/diff-match-patch-1.0.5.tgz#abb584d5f10cd1196dfc55aa03701592ae3f7b37" + integrity sha512-IayShXAgj/QMXgB0IWmKx+rOPuGMhqm5w6jvFxmVenXKIzRqTAAsbBPT3kWQeGANj3jGgvcvv4yK6SxqYmikgw== + diff-sequences@^27.5.1: version "27.5.1" resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-27.5.1.tgz#eaecc0d327fd68c8d9672a1e64ab8dccb2ef5327" @@ -18137,6 +18196,11 @@ events@^3.0.0, events@^3.2.0, events@^3.3.0: resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== +eventsource-parser@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/eventsource-parser/-/eventsource-parser-3.0.0.tgz#9303e303ef807d279ee210a17ce80f16300d9f57" + integrity sha512-T1C0XCUimhxVQzW4zFipdx0SficT651NnkR0ZSH3yQwh+mFMdLfgjABVi4YtMTtaL4s168593DaoaRLMqryavA== + exec-sh@^0.3.2, exec-sh@^0.3.4: version "0.3.6" resolved "https://registry.yarnpkg.com/exec-sh/-/exec-sh-0.3.6.tgz#ff264f9e325519a60cb5e273692943483cca63bc" @@ -22466,6 +22530,11 @@ json-schema-traverse@^1.0.0: resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== +json-schema@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5" + integrity sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA== + json-stable-stringify-without-jsonify@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" @@ -22515,6 +22584,15 @@ jsonc-parser@3.2.0, jsonc-parser@^3.0.0, jsonc-parser@^3.2.0: resolved "https://registry.yarnpkg.com/jsonc-parser/-/jsonc-parser-3.2.0.tgz#31ff3f4c2b9793f89c67212627c51c6394f88e76" integrity sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w== +jsondiffpatch@0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/jsondiffpatch/-/jsondiffpatch-0.6.0.tgz#daa6a25bedf0830974c81545568d5f671c82551f" + integrity sha512-3QItJOXp2AP1uv7waBkao5nCvhEv+QmJAd38Ybq7wNI74Q+BBmnLn4EDKz6yI9xGAIQoUF87qHt+kc1IVxB4zQ== + dependencies: + "@types/diff-match-patch" "^1.0.36" + chalk "^5.3.0" + diff-match-patch "^1.0.5" + jsonfile@^2.1.0: version "2.4.0" resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-2.4.0.tgz#3736a2b428b87bbda0cc83b53fa3d633a35c2ae8" @@ -30027,6 +30105,11 @@ section-matter@^1.0.0: extend-shallow "^2.0.1" kind-of "^6.0.0" +secure-json-parse@^2.7.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/secure-json-parse/-/secure-json-parse-2.7.0.tgz#5a5f9cd6ae47df23dba3151edd06855d47e09862" + integrity sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw== + select-hose@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" @@ -31515,6 +31598,14 @@ svgo@^3.3.2: csso "^5.0.5" picocolors "^1.0.0" +swr@^2.2.5: + version "2.2.5" + resolved "https://registry.yarnpkg.com/swr/-/swr-2.2.5.tgz#063eea0e9939f947227d5ca760cc53696f46446b" + integrity sha512-QtxqyclFeAsxEUeZIYmsaQ0UjimSq1RZ9Un7I68/0ClKK/U3LoyQunwkQfJZr2fc22DfIXLNDc2wFyTEikCUpg== + dependencies: + client-only "^0.0.1" + use-sync-external-store "^1.2.0" + symbol-observable@4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-4.0.0.tgz#5b425f192279e87f2f9b937ac8540d1984b39205" @@ -31867,6 +31958,11 @@ throat@^6.0.1: resolved "https://registry.yarnpkg.com/throat/-/throat-6.0.1.tgz#d514fedad95740c12c2d7fc70ea863eb51ade375" integrity sha512-8hmiGIJMDlwjg7dlJ4yKGLK8EsYqKgPWbG3b4wjJddKNwc7N7Dpn08Df4szr/sZdMVeOstrdYSsqzX6BYbcB+w== +throttleit@2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/throttleit/-/throttleit-2.1.0.tgz#a7e4aa0bf4845a5bd10daa39ea0c783f631a07b4" + integrity sha512-nt6AMGKW1p/70DF/hGBdJB57B8Tspmbp5gfJ8ilhLnt7kkr2ye7hzD6NVG8GGErk2HWF34igrL2CXmNIkzKqKw== + through2@^2.0.0: version "2.0.5" resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" @@ -33176,6 +33272,11 @@ urlpattern-polyfill@8.0.2: resolved "https://registry.yarnpkg.com/urlpattern-polyfill/-/urlpattern-polyfill-8.0.2.tgz#99f096e35eff8bf4b5a2aa7d58a1523d6ebc7ce5" integrity sha512-Qp95D4TPJl1kC9SKigDcqgyM2VDVO4RiJc2d4qe5GrYm+zbIQCWWKAFaJNQ4BhdFeDGwBmAxqJBwWSJDb9T3BQ== +use-sync-external-store@^1.2.0: + version "1.2.2" + resolved "https://registry.yarnpkg.com/use-sync-external-store/-/use-sync-external-store-1.2.2.tgz#c3b6390f3a30eba13200d2302dcdf1e7b57b2ef9" + integrity sha512-PElTlVMwpblvbNqQ82d2n6RjStvdSoNe9FG28kNfz3WiXilJm4DdNkEzRhCZuIDwY8U08WVihhGR5iRqAwfDiw== + use@^3.1.0: version "3.1.1" resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" @@ -34574,6 +34675,11 @@ zip-stream@^6.0.1: compress-commons "^6.0.2" readable-stream "^4.0.0" +zod-to-json-schema@^3.23.5: + version "3.23.5" + resolved "https://registry.yarnpkg.com/zod-to-json-schema/-/zod-to-json-schema-3.23.5.tgz#ec23def47dcafe3a4d640eba6a346b34f9a693a5" + integrity sha512-5wlSS0bXfF/BrL4jPAbz9da5hDlDptdEppYfe+x4eIJ7jioqKG9uUxOwPzqof09u/XeVdrgFu29lZi+8XNDJtA== + zod@^3.22.3: version "3.23.8" resolved "https://registry.yarnpkg.com/zod/-/zod-3.23.8.tgz#e37b957b5d52079769fb8097099b592f0ef4067d" From 280747e95c17ad5299f670fbdcf85668e26305a1 Mon Sep 17 00:00:00 2001 From: Abhijeet Prasad Date: Thu, 5 Dec 2024 08:51:11 -0500 Subject: [PATCH 2/5] fix: Use right total_tokens key --- .../node-integration-tests/suites/tracing/ai/test.ts | 8 ++++---- packages/node/src/integrations/tracing/vercelai/index.ts | 3 ++- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/dev-packages/node-integration-tests/suites/tracing/ai/test.ts b/dev-packages/node-integration-tests/suites/tracing/ai/test.ts index bd9fb6890441..ea08ba55a5c7 100644 --- a/dev-packages/node-integration-tests/suites/tracing/ai/test.ts +++ b/dev-packages/node-integration-tests/suites/tracing/ai/test.ts @@ -22,7 +22,7 @@ describe('ai', () => { 'ai.settings.maxRetries': 2, 'ai.settings.maxSteps': 1, 'ai.streaming': false, - 'ai.tokens.used': 30, + 'ai.total_tokens.used': 30, 'ai.usage.completionTokens': 20, 'ai.usage.promptTokens': 10, 'operation.name': 'ai.generateText', @@ -57,7 +57,7 @@ describe('ai', () => { 'gen_ai.usage.output_tokens': 20, 'ai.completion_tokens.used': 20, 'ai.prompt_tokens.used': 10, - 'ai.tokens.used': 30, + 'ai.total_tokens.used': 30, }), description: 'generateText.doGenerate', op: 'ai.run.doGenerate', @@ -79,7 +79,7 @@ describe('ai', () => { 'ai.settings.maxRetries': 2, 'ai.settings.maxSteps': 1, 'ai.streaming': false, - 'ai.tokens.used': 30, + 'ai.total_tokens.used': 30, 'ai.usage.completionTokens': 20, 'ai.usage.promptTokens': 10, 'operation.name': 'ai.generateText', @@ -114,7 +114,7 @@ describe('ai', () => { 'gen_ai.usage.output_tokens': 20, 'ai.completion_tokens.used': 20, 'ai.prompt_tokens.used': 10, - 'ai.tokens.used': 30, + 'ai.total_tokens.used': 30, }), description: 'generateText.doGenerate', op: 'ai.run.doGenerate', diff --git a/packages/node/src/integrations/tracing/vercelai/index.ts b/packages/node/src/integrations/tracing/vercelai/index.ts index 38eeadf2e4e2..655e066f5601 100644 --- a/packages/node/src/integrations/tracing/vercelai/index.ts +++ b/packages/node/src/integrations/tracing/vercelai/index.ts @@ -41,7 +41,8 @@ const _vercelAIIntegration = (() => { attributes['ai.usage.completionTokens'] != undefined && attributes['ai.usage.promptTokens'] != undefined ) { - span.data['ai.tokens.used'] = attributes['ai.usage.completionTokens'] + attributes['ai.usage.promptTokens']; + span.data['ai.total_tokens.used'] = + attributes['ai.usage.completionTokens'] + attributes['ai.usage.promptTokens']; } } } From 8a581cfb0a69507ab32e029a6720ae1ace65ec99 Mon Sep 17 00:00:00 2001 From: Abhijeet Prasad Date: Thu, 5 Dec 2024 09:08:25 -0500 Subject: [PATCH 3/5] fix types --- packages/node/src/integrations/tracing/vercelai/index.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/node/src/integrations/tracing/vercelai/index.ts b/packages/node/src/integrations/tracing/vercelai/index.ts index 655e066f5601..bbafc7669f0e 100644 --- a/packages/node/src/integrations/tracing/vercelai/index.ts +++ b/packages/node/src/integrations/tracing/vercelai/index.ts @@ -1,6 +1,6 @@ /* eslint-disable complexity */ import { SEMANTIC_ATTRIBUTE_SENTRY_OP, defineIntegration, spanToJSON } from '@sentry/core'; -import type { IntegrationFn } from '@sentry/types'; +import type { IntegrationFn } from '@sentry/core'; import { generateInstrumentOnce } from '../../../otel/instrument'; import { addOriginToSpan } from '../../../utils/addOriginToSpan'; import { SentryVercelAiInstrumentation } from './instrumentation'; From 562e082a012207e37f01eac7b2dee27712833a2b Mon Sep 17 00:00:00 2001 From: Abhijeet Prasad Date: Thu, 5 Dec 2024 14:30:10 -0500 Subject: [PATCH 4/5] fix closure for instrumentation --- .../multiple-routers/common-infix/server.ts | 1 + .../suites/tracing/ai/scenario.js | 1 - .../src/integrations/tracing/vercelai/index.ts | 16 ++++++---------- .../tracing/vercelai/instrumentation.ts | 8 ++++---- 4 files changed, 11 insertions(+), 15 deletions(-) diff --git a/dev-packages/node-integration-tests/suites/express/multiple-routers/common-infix/server.ts b/dev-packages/node-integration-tests/suites/express/multiple-routers/common-infix/server.ts index eff1564d3f0a..24073af67fa4 100644 --- a/dev-packages/node-integration-tests/suites/express/multiple-routers/common-infix/server.ts +++ b/dev-packages/node-integration-tests/suites/express/multiple-routers/common-infix/server.ts @@ -2,6 +2,7 @@ import { loggingTransport } from '@sentry-internal/node-integration-tests'; import * as Sentry from '@sentry/node'; Sentry.init({ + debug: true, dsn: 'https://public@dsn.ingest.sentry.io/1337', release: '1.0', tracesSampleRate: 1.0, diff --git a/dev-packages/node-integration-tests/suites/tracing/ai/scenario.js b/dev-packages/node-integration-tests/suites/tracing/ai/scenario.js index 43747cf1986c..780e322c0639 100644 --- a/dev-packages/node-integration-tests/suites/tracing/ai/scenario.js +++ b/dev-packages/node-integration-tests/suites/tracing/ai/scenario.js @@ -2,7 +2,6 @@ const { loggingTransport } = require('@sentry-internal/node-integration-tests'); const Sentry = require('@sentry/node'); Sentry.init({ - debug: true, dsn: 'https://public@dsn.ingest.sentry.io/1337', release: '1.0', tracesSampleRate: 1.0, diff --git a/packages/node/src/integrations/tracing/vercelai/index.ts b/packages/node/src/integrations/tracing/vercelai/index.ts index bbafc7669f0e..d3fafc33bb02 100644 --- a/packages/node/src/integrations/tracing/vercelai/index.ts +++ b/packages/node/src/integrations/tracing/vercelai/index.ts @@ -3,11 +3,9 @@ import { SEMANTIC_ATTRIBUTE_SENTRY_OP, defineIntegration, spanToJSON } from '@se import type { IntegrationFn } from '@sentry/core'; import { generateInstrumentOnce } from '../../../otel/instrument'; import { addOriginToSpan } from '../../../utils/addOriginToSpan'; -import { SentryVercelAiInstrumentation } from './instrumentation'; +import { SentryVercelAiInstrumentation, sentryVercelAiPatched } from './instrumentation'; -const sentryVercelAiInstance = new SentryVercelAiInstrumentation({}); - -export const instrumentVercelAi = generateInstrumentOnce('vercelAI', () => sentryVercelAiInstance); +export const instrumentVercelAi = generateInstrumentOnce('vercelAI', () => new SentryVercelAiInstrumentation({})); const _vercelAIIntegration = (() => { return { @@ -15,11 +13,7 @@ const _vercelAIIntegration = (() => { setupOnce() { instrumentVercelAi(); }, - preprocessEvent(event) { - if (!sentryVercelAiInstance.patchIsActive) { - return; - } - + processEvent(event) { if (event.type === 'transaction' && event.spans?.length) { for (const span of event.spans) { const { data: attributes, description: name } = span; @@ -46,10 +40,12 @@ const _vercelAIIntegration = (() => { } } } + + return event; }, setup(client) { client.on('spanStart', span => { - if (!sentryVercelAiInstance.patchIsActive) { + if (!sentryVercelAiPatched) { return; } diff --git a/packages/node/src/integrations/tracing/vercelai/instrumentation.ts b/packages/node/src/integrations/tracing/vercelai/instrumentation.ts index 5d5fab46a18d..97721eaee15d 100644 --- a/packages/node/src/integrations/tracing/vercelai/instrumentation.ts +++ b/packages/node/src/integrations/tracing/vercelai/instrumentation.ts @@ -23,6 +23,8 @@ type MethodArgs = [MethodFirstArg, ...unknown[]]; type PatchedModuleExports = Record<(typeof INSTRUMENTED_METHODS)[number], (...args: MethodArgs) => unknown> & Record; +export let sentryVercelAiPatched = false; + /** * This detects is added by the Sentry Vercel AI Integration to detect if the integration should * be enabled. @@ -30,10 +32,8 @@ type PatchedModuleExports = Record<(typeof INSTRUMENTED_METHODS)[number], (...ar * It also patches the `ai` module to enable Vercel AI telemetry automatically for all methods. */ export class SentryVercelAiInstrumentation extends InstrumentationBase { - public patchIsActive: boolean = false; - public constructor(config: InstrumentationConfig = {}) { - super('sentry-vercel-ai', SDK_VERSION, config); + super('@sentry/instrumentation-vercel-ai', SDK_VERSION, config); } /** @@ -48,7 +48,7 @@ export class SentryVercelAiInstrumentation extends InstrumentationBase { * Patches module exports to enable Vercel AI telemetry. */ private _patch(moduleExports: PatchedModuleExports): unknown { - this.patchIsActive = true; + sentryVercelAiPatched = true; function generatePatch(name: string) { return (...args: MethodArgs) => { From 24dc093f080680dec60277c811ef56429ebcb96e Mon Sep 17 00:00:00 2001 From: Abhijeet Prasad Date: Thu, 5 Dec 2024 17:22:19 -0500 Subject: [PATCH 5/5] conditionally run test for Node 18 --- dev-packages/node-integration-tests/suites/tracing/ai/test.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/dev-packages/node-integration-tests/suites/tracing/ai/test.ts b/dev-packages/node-integration-tests/suites/tracing/ai/test.ts index ea08ba55a5c7..e269f9da9db3 100644 --- a/dev-packages/node-integration-tests/suites/tracing/ai/test.ts +++ b/dev-packages/node-integration-tests/suites/tracing/ai/test.ts @@ -1,6 +1,8 @@ +import { conditionalTest } from '../../../utils'; import { cleanupChildProcesses, createRunner } from '../../../utils/runner'; -describe('ai', () => { +// `ai` SDK only support Node 18+ +conditionalTest({ min: 18 })('ai', () => { afterAll(() => { cleanupChildProcesses(); });