Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions .github/workflows/ci-nodejs-openai-sampleagent.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,6 @@ jobs:
uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node-version }}
cache: 'npm'
cache-dependency-path: '**/package-lock.json'

- name: Install dependencies
run: npm install
Expand Down
31 changes: 19 additions & 12 deletions nodejs/langchain/sample-agent/Agent-Code-Walkthrough.md
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,7 @@ class LangChainClient implements Client {
return agentMessage;
}

async invokeAgentWithScope(prompt: string): Promise<string> {
async invokeInferenceScope(prompt: string) {
const inferenceDetails: InferenceDetails = {
operationName: InferenceOperationType.CHAT,
model: "gpt-4o-mini",
Expand All @@ -259,18 +259,25 @@ class LangChainClient implements Client {
tenantId: 'typescript-sample-tenant',
};

let response = '';
const scope = InferenceScope.start(inferenceDetails, agentDetails, tenantDetails);

const response = await this.invokeAgent(prompt);

// Record the inference response with token usage
scope?.recordOutputMessages([response]);
scope?.recordInputMessages([prompt]);
scope?.recordResponseId(`resp-${Date.now()}`);
scope?.recordInputTokens(45);
scope?.recordOutputTokens(78);
scope?.recordFinishReasons(['stop']);

try {
await scope.withActiveSpanAsync(async () => {
response = await this.invokeAgent(prompt);
// Record the inference response with token usage
scope.recordOutputMessages([response]);
scope.recordInputMessages([prompt]);
scope.recordResponseId(`resp-${Date.now()}`);
scope.recordInputTokens(45);
scope.recordOutputTokens(78);
scope.recordFinishReasons(['stop']);
});
} catch (error) {
scope.recordError(error as Error);
throw error;
} finally {
scope.dispose();
}
return response;
}
}
Expand Down
44 changes: 36 additions & 8 deletions nodejs/langchain/sample-agent/src/agent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,12 @@ import { ActivityTypes } from '@microsoft/agents-activity';
// Notification Imports
import '@microsoft/agents-a365-notifications';
import { AgentNotificationActivity } from '@microsoft/agents-a365-notifications';

// Observability Imports
import {
AgentDetails,
TenantDetails,
} from '@microsoft/agents-a365-observability';
import { BaggageBuilder } from '@microsoft/agents-a365-observability';
import { Client, getClient } from './client';

export class A365Agent extends AgentApplication<TurnState> {
Expand Down Expand Up @@ -42,14 +47,37 @@ export class A365Agent extends AgentApplication<TurnState> {
return;
}

const agentDetails: AgentDetails = {
agentId: 'typescript-compliance-agent',
agentName: 'TypeScript Compliance Agent',
conversationId: 'conv-12345',
};

const tenantDetails: TenantDetails = {
tenantId: 'typescript-sample-tenant',
};
const baggageScope = new BaggageBuilder()
.tenantId(tenantDetails.tenantId)
.agentId(agentDetails.agentId)
.agentName(agentDetails.agentName)
.conversationId(agentDetails.conversationId)
.correlationId(`corr-${Date.now()}`)
.build();

try {
const client: Client = await getClient(this.authorization, A365Agent.authHandlerName, turnContext);
const response = await client.invokeAgentWithScope(userMessage);
await turnContext.sendActivity(response);
} catch (error) {
console.error('LLM query error:', error);
const err = error as any;
await turnContext.sendActivity(`Error: ${err.message || err}`);
await baggageScope.run(async () => {
try {
const client: Client = await getClient(this.authorization, A365Agent.authHandlerName, turnContext);
const response = await client.invokeInferenceScope(userMessage);
await turnContext.sendActivity(response);
} catch (error) {
console.error('LLM query error:', error);
const err = error as any;
await turnContext.sendActivity(`Error: ${err.message || err}`);
}
});
} finally {
baggageScope.dispose();
}
}

Expand Down
33 changes: 20 additions & 13 deletions nodejs/langchain/sample-agent/src/client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ import {
} from '@microsoft/agents-a365-observability';

export interface Client {
invokeAgentWithScope(prompt: string): Promise<string>;
invokeInferenceScope(prompt: string): Promise<string>;
}

const sdk = ObservabilityManager.configure(
Expand Down Expand Up @@ -132,7 +132,7 @@ class LangChainClient implements Client {
return agentMessage;
}

async invokeAgentWithScope(prompt: string) {
async invokeInferenceScope(prompt: string) {
const inferenceDetails: InferenceDetails = {
operationName: InferenceOperationType.CHAT,
model: "gpt-4o-mini",
Expand All @@ -148,18 +148,25 @@ class LangChainClient implements Client {
tenantId: 'typescript-sample-tenant',
};

let response = '';
const scope = InferenceScope.start(inferenceDetails, agentDetails, tenantDetails);

const response = await this.invokeAgent(prompt);

// Record the inference response with token usage
scope?.recordOutputMessages([response]);
scope?.recordInputMessages([prompt]);
scope?.recordResponseId(`resp-${Date.now()}`);
scope?.recordInputTokens(45);
scope?.recordOutputTokens(78);
scope?.recordFinishReasons(['stop']);

try {
await scope.withActiveSpanAsync(async () => {
response = await this.invokeAgent(prompt);
// Record the inference response with token usage
scope.recordOutputMessages([response]);
scope.recordInputMessages([prompt]);
scope.recordResponseId(`resp-${Date.now()}`);
scope.recordInputTokens(45);
scope.recordOutputTokens(78);
scope.recordFinishReasons(['stop']);
});
} catch (error) {
scope.recordError(error as Error);
throw error;
} finally {
scope.dispose();
}
return response;
}
}
28 changes: 17 additions & 11 deletions nodejs/openai/sample-agent/AGENT-CODE-WALKTHROUGH.md
Original file line number Diff line number Diff line change
Expand Up @@ -199,17 +199,23 @@ async invokeAgentWithScope(prompt: string) {
};

const scope = InferenceScope.start(inferenceDetails, agentDetails, tenantDetails);

const response = await this.invokeAgent(prompt);

// Record the inference response with token usage
scope?.recordOutputMessages([response]);
scope?.recordInputMessages([prompt]);
scope?.recordResponseId(`resp-${Date.now()}`);
scope?.recordInputTokens(45);
scope?.recordOutputTokens(78);
scope?.recordFinishReasons(['stop']);

try {
await scope.withActiveSpanAsync(async () => {
response = await this.invokeAgent(prompt);
// Record the inference response with token usage
scope.recordOutputMessages([response]);
scope.recordInputMessages([prompt]);
scope.recordResponseId(`resp-${Date.now()}`);
scope.recordInputTokens(45);
scope.recordOutputTokens(78);
scope.recordFinishReasons(['stop']);
});
} catch (error) {
scope.recordError(error as Error);
throw error;
} finally {
scope.dispose();
}
return response;
}
```
Expand Down
1 change: 1 addition & 0 deletions nodejs/openai/sample-agent/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
"@microsoft/agents-a365-runtime": "^0.1.0-preview.30",
"@microsoft/agents-a365-tooling": "^0.1.0-preview.30",
"@microsoft/agents-a365-tooling-extensions-openai": "^0.1.0-preview.30",
"@microsoft/agents-a365-observability-extensions-openai": "^0.1.0-preview.30",
"@openai/agents": "*",
"dotenv": "^17.2.2",
"express": "^5.1.0"
Expand Down
19 changes: 10 additions & 9 deletions nodejs/openai/sample-agent/src/agent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import '@microsoft/agents-a365-notifications';
import { AgentNotificationActivity } from '@microsoft/agents-a365-notifications';

import { Client, getClient } from './client';
import { BaggageBuilder } from '@microsoft/agents-a365-observability';

export class MyAgent extends AgentApplication<TurnState> {
static authHandlerName: string = 'agentic';
Expand Down Expand Up @@ -45,15 +46,15 @@ export class MyAgent extends AgentApplication<TurnState> {
return;
}

try {
const client: Client = await getClient(this.authorization, MyAgent.authHandlerName, turnContext);
const response = await client.invokeAgentWithScope(userMessage);
await turnContext.sendActivity(response);
} catch (error) {
console.error('LLM query error:', error);
const err = error as any;
await turnContext.sendActivity(`Error: ${err.message || err}`);
}
try {
const client: Client = await getClient(this.authorization, MyAgent.authHandlerName, turnContext);
const response = await client.invokeAgentWithScope(userMessage);
await turnContext.sendActivity(response);
} catch (error) {
console.error('LLM query error:', error);
const err = error as any;
await turnContext.sendActivity(`Error: ${err.message || err}`);
}
}

async handleAgentNotificationActivity(context: TurnContext, state: TurnState, agentNotificationActivity: AgentNotificationActivity) {
Expand Down
44 changes: 32 additions & 12 deletions nodejs/openai/sample-agent/src/client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import {
TenantDetails,
InferenceDetails
} from '@microsoft/agents-a365-observability';
import { OpenAIAgentsTraceInstrumentor } from '@microsoft/agents-a365-observability-extensions-openai';

export interface Client {
invokeAgentWithScope(prompt: string): Promise<string>;
Expand All @@ -27,7 +28,15 @@ const sdk = ObservabilityManager.configure(
.withService('TypeScript Sample Agent', '1.0.0')
);

// Initialize OpenAI Agents instrumentation
const openAIAgentsTraceInstrumentor = new OpenAIAgentsTraceInstrumentor({
enabled: true,
tracerName: 'openai-agent-auto-instrumentation',
tracerVersion: '1.0.0'
});

sdk.start();
openAIAgentsTraceInstrumentor.enable();

const toolService = new McpToolRegistrationService();

Expand Down Expand Up @@ -98,6 +107,7 @@ class OpenAIClient implements Client {
}

async invokeAgentWithScope(prompt: string) {
let response = '';
const inferenceDetails: InferenceDetails = {
operationName: InferenceOperationType.CHAT,
model: this.agent.model.toString(),
Expand All @@ -112,19 +122,29 @@ class OpenAIClient implements Client {
const tenantDetails: TenantDetails = {
tenantId: 'typescript-sample-tenant',
};

const scope = InferenceScope.start(inferenceDetails, agentDetails, tenantDetails);

const response = await this.invokeAgent(prompt);

// Record the inference response with token usage
scope?.recordOutputMessages([response]);
scope?.recordInputMessages([prompt]);
scope?.recordResponseId(`resp-${Date.now()}`);
scope?.recordInputTokens(45);
scope?.recordOutputTokens(78);
scope?.recordFinishReasons(['stop']);

try {
await scope.withActiveSpanAsync(async () => {
try {
response = await this.invokeAgent(prompt);

// Record the inference response with token usage
scope.recordOutputMessages([response]);
scope.recordInputMessages([prompt]);
scope.recordResponseId(`resp-${Date.now()}`);
scope.recordInputTokens(45);
scope.recordOutputTokens(78);
scope.recordFinishReasons(['stop']);
} catch (error) {
scope.recordError(error as Error);
scope.recordFinishReasons(['error']);
throw error;
}
});
} finally {
scope.dispose();
}
return response;
}

Expand Down
33 changes: 21 additions & 12 deletions nodejs/vercel-sdk/sample-agent/src/client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ class VercelAiClient implements Client {
return agentMessage;
}

async invokeAgentWithScope(prompt: string) {
async invokeAgentWithScope(prompt: string): Promise<string> {
const inferenceDetails: InferenceDetails = {
operationName: InferenceOperationType.CHAT,
model: modelName,
Expand All @@ -111,18 +111,27 @@ class VercelAiClient implements Client {
tenantId: 'typescript-sample-tenant',
};

let response = '';
const scope = InferenceScope.start(inferenceDetails, agentDetails, tenantDetails);

const response = await this.invokeAgent(prompt);

// Record the inference response with token usage
scope?.recordOutputMessages([response]);
scope?.recordInputMessages([prompt]);
scope?.recordResponseId(`resp-${Date.now()}`);
scope?.recordInputTokens(45);
scope?.recordOutputTokens(78);
scope?.recordFinishReasons(['stop']);

try {
await scope.withActiveSpanAsync(async () => {
try {
response = await this.invokeAgent(prompt);
scope.recordOutputMessages([response]);
scope.recordInputMessages([prompt]);
scope.recordResponseId(`resp-${Date.now()}`);
scope.recordInputTokens(45);
scope.recordOutputTokens(78);
scope.recordFinishReasons(['stop']);
} catch (error) {
scope.recordError(error as Error);
scope.recordFinishReasons(['error']);
throw error;
}
});
} finally {
scope.dispose();
}
return response;
}
}