Skip to content

Commit

Permalink
fix(sdk): properly initialize token enrich value for instrumentations (
Browse files Browse the repository at this point in the history
  • Loading branch information
nirga authored Jul 28, 2024
1 parent 1e404b7 commit 143bc66
Show file tree
Hide file tree
Showing 5 changed files with 39 additions and 12 deletions.
1 change: 1 addition & 0 deletions packages/sample-app/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
"run:prompt_mgmt": "npm run build && node dist/src/sample_prompt_mgmt.js",
"run:sample_vision": "npm run build && node dist/src/sample_vision_prompt.js",
"run:sample_azure": "npm run build && node dist/src/sample_azure.js",
"run:openai_streaming": "npm run build && node dist/src/sample_openai_streaming.js",
"run:sampler": "npm run build && node dist/src/sample_sampler.js",
"run:llamaindex": "npm run build && node dist/src/sample_llamaindex.js",
"run:pinecone": "npm run build && node dist/src/sample_pinecone.js",
Expand Down
32 changes: 32 additions & 0 deletions packages/sample-app/src/sample_openai_streaming.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import * as traceloop from "@traceloop/node-server-sdk";
import OpenAI from "openai";

traceloop.initialize({
appName: "sample_openai_streaming",
apiKey: process.env.TRACELOOP_API_KEY,
disableBatch: true,
});
const openai = new OpenAI();

async function create_joke() {
const responseStream = await traceloop.withTask(
{ name: "joke_creation" },
() => {
return openai.chat.completions.create({
model: "gpt-3.5-turbo",
messages: [
{ role: "user", content: "Tell me a joke about opentelemetry" },
],
stream: true,
});
},
);
let result = "";
for await (const chunk of responseStream) {
result += chunk.choices[0]?.delta?.content || "";
}
console.log(result);
return result;
}

create_joke();
4 changes: 0 additions & 4 deletions packages/traceloop-sdk/src/lib/configuration/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -54,10 +54,6 @@ export const initialize = (options: InitializeOptions) => {
options.traceloopSyncDevPollingInterval =
Number(process.env.TRACELOOP_SYNC_DEV_POLLING_INTERVAL) || 5;
}

if (options.shouldEnrichMetrics === undefined) {
options.shouldEnrichMetrics = true;
}
}

validateConfiguration(options);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,12 +49,6 @@ export interface InitializeOptions {
*/
logLevel?: "debug" | "info" | "warn" | "error";

/**
* Whether to enrich metrics with additional data like OpenAI token usage for streaming requests. Optional.
* Defaults to true.
*/
shouldEnrichMetrics?: boolean;

/**
* Whether to log prompts, completions and embeddings on traces. Optional.
* Defaults to true.
Expand Down
8 changes: 6 additions & 2 deletions packages/traceloop-sdk/src/lib/tracing/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -56,9 +56,11 @@ const instrumentations: Instrumentation[] = [];

export const initInstrumentations = () => {
const exceptionLogger = (e: Error) => Telemetry.getInstance().logException(e);
const enrichTokens =
(process.env.TRACELOOP_ENRICH_TOKENS || "true").toLowerCase() === "true";

openAIInstrumentation = new OpenAIInstrumentation({
enrichTokens: _configuration?.shouldEnrichMetrics,
enrichTokens,
exceptionLogger,
});
instrumentations.push(openAIInstrumentation);
Expand Down Expand Up @@ -109,13 +111,15 @@ export const manuallyInitInstrumentations = (
instrumentModules: InitializeOptions["instrumentModules"],
) => {
const exceptionLogger = (e: Error) => Telemetry.getInstance().logException(e);
const enrichTokens =
(process.env.TRACELOOP_ENRICH_TOKENS || "true").toLowerCase() === "true";

// Clear the instrumentations array that was initialized by default
instrumentations.length = 0;

if (instrumentModules?.openAI) {
openAIInstrumentation = new OpenAIInstrumentation({
enrichTokens: _configuration?.shouldEnrichMetrics,
enrichTokens,
exceptionLogger,
});
instrumentations.push(openAIInstrumentation);
Expand Down

0 comments on commit 143bc66

Please sign in to comment.