diff --git a/nodejs/langchain/quickstart-before/.env.template b/nodejs/langchain/quickstart-before/.env.template new file mode 100644 index 00000000..7bb80719 --- /dev/null +++ b/nodejs/langchain/quickstart-before/.env.template @@ -0,0 +1,2 @@ +# OpenAI Configuration +OPENAI_API_KEY= \ No newline at end of file diff --git a/nodejs/langchain/quickstart-before/AGENT-CODE-WALKTHROUGH.md b/nodejs/langchain/quickstart-before/AGENT-CODE-WALKTHROUGH.md new file mode 100644 index 00000000..2d06a77b --- /dev/null +++ b/nodejs/langchain/quickstart-before/AGENT-CODE-WALKTHROUGH.md @@ -0,0 +1,309 @@ +# Agent Code Walkthrough + +Step-by-step walkthrough of the implementation in `src/agent.ts`. +This is a quickstart starting point for building a LangChain agent with the Microsoft 365 Agents SDK. + +## Overview + +| Component | Purpose | +|-----------|---------| +| **LangChain** | Core AI orchestration framework | +| **Microsoft 365 Agents SDK** | Enterprise hosting and authentication integration | + +## File Structure and Organization + +``` +quickstart-before/ +├── src/ +│ ├── agent.ts # Main agent implementation +│ ├── client.ts # LangChain client wrapper +│ └── index.ts # Express server entry point +├── package.json # Dependencies and scripts +├── tsconfig.json # TypeScript configuration +└── .env # Configuration (not committed) +``` + +--- + +--- + +## Step 1: Dependency Imports + +### agent.ts imports: +```typescript +import { TurnState, AgentApplication, TurnContext } from '@microsoft/agents-hosting'; +import { ActivityTypes } from '@microsoft/agents-activity'; +``` + +### client.ts imports: +```typescript +import { createAgent, ReactAgent } from "langchain"; +import { ChatOpenAI } from "@langchain/openai"; +``` + +**What it does**: Brings in all the external libraries and tools the agent needs to work. + +**Key Imports**: +- **@microsoft/agents-hosting**: Bot Framework integration for hosting and turn management +- **@microsoft/agents-activity**: Activity types for different message formats +- **langchain**: LangChain framework for building AI agents +- **@langchain/openai**: OpenAI chat model integration for LangChain + +--- + +## Step 2: Agent Initialization + +```typescript +class MyAgent extends AgentApplication { + constructor() { + super(); + + this.onActivity(ActivityTypes.Message, async (context: TurnContext, state: TurnState) => { + await this.handleAgentMessageActivity(context, state); + }); + } +} +``` + +**What it does**: Creates the main AI agent and sets up its basic behavior. + +**What happens**: +1. **Extends AgentApplication**: Inherits Bot Framework hosting capabilities +2. **Event Routing**: Registers a handler for incoming messages + +--- + +## Step 3: Agent Creation + +The agent client wrapper is defined in `client.ts`: + +```typescript +export async function getClient(): Promise { + // Create the model + const model = new ChatOpenAI({ + model: "gpt-4o-mini", + }); + + // Create the agent + const agent = createAgent({ + model: model, + tools: [], + name: 'My Custom Agent', + }); + + return new LangChainClient(agent); +} +``` + +**What it does**: Creates a LangChain React agent with an OpenAI model. + +**What happens**: +1. **Model Creation**: Initializes ChatOpenAI with the specified model (gpt-4o-mini) +2. **Agent Creation**: Creates a React agent with the model and tools +3. **Returns Client**: Wraps the agent in a client interface + +--- + +## Step 4: Message Processing + +```typescript +async handleAgentMessageActivity(turnContext: TurnContext, state: TurnState): Promise { + const userMessage = turnContext.activity.text?.trim() || ''; + + if (!userMessage) { + await turnContext.sendActivity('Please send me a message and I\'ll help you!'); + return; + } + + try { + const client: Client = await getClient(); + const response = await client.invokeAgent(userMessage); + await turnContext.sendActivity(response); + } catch (error) { + console.error('LLM query error:', error); + const err = error as any; + await turnContext.sendActivity(`Error: ${err.message || err}`); + } +} +``` + +**What it does**: Handles regular chat messages from users. + +**What happens**: +1. **Extract Message**: Gets the user's text from the activity +2. **Validate Input**: Checks for non-empty message +3. **Create Client**: Gets LangChain client +4. **Invoke Agent**: Calls agent with user message +5. **Send Response**: Returns AI-generated response to user +6. **Error Handling**: Catches problems and returns friendly error messages +--- + +## Step 5: Agent Invocation + +Agent invocation is handled in `client.ts`: + +```typescript +async invokeAgent(userMessage: string): Promise { + const result = await this.agent.invoke({ + messages: [ + { + role: "user", + content: userMessage, + }, + ], + }); + + let agentMessage: any = ''; + + // Extract the content from the LangChain response + if (result.messages && result.messages.length > 0) { + const lastMessage = result.messages[result.messages.length - 1]; + agentMessage = lastMessage.content || "No content in response"; + } + + // Fallback if result is already a string + if (typeof result === 'string') { + agentMessage = result; + } + + if (!agentMessage) { + return "Sorry, I couldn't get a response from the agent :("; + } + + return agentMessage; +} +``` + +**What it does**: Invokes the LangChain agent with the user's message and extracts the response. + +**What happens**: +1. **Invoke Agent**: Calls the LangChain agent with the user message +2. **Extract Response**: Gets the agent's response from the result +3. **Handle Fallbacks**: Returns a friendly message if no response is available +4. **Return Result**: Returns the agent's response as a string +--- + +## Step 6: Main Entry Point + +The main entry point is in `index.ts`: + +```typescript +import { configDotenv } from 'dotenv'; + +configDotenv(); + +import { AuthConfiguration, authorizeJWT, CloudAdapter, Request } from '@microsoft/agents-hosting'; +import express, { Response } from 'express' +import { agentApplication } from './agent'; + +const authConfig: AuthConfiguration = {}; + +const server = express() +server.use(express.json()) +server.use(authorizeJWT(authConfig)) + +server.post('/api/messages', (req: Request, res: Response) => { + const adapter = agentApplication.adapter as CloudAdapter; + adapter.process(req, res, async (context) => { + await agentApplication.run(context) + }) +}) + +const port = process.env.PORT || 3978 +server.listen(port, async () => { + console.log(`\nServer listening to port ${port} for appId ${authConfig.clientId} debug ${process.env.DEBUG}`) +}) +``` + +**What it does**: Starts the HTTP server and sets up Bot Framework integration. + +**What happens**: +1. **Load Environment**: Reads .env file before importing other modules +2. **Create Express Server**: Sets up HTTP server with JSON parsing +3. **JWT Authorization**: Adds authentication middleware +4. **Bot Framework Endpoint**: Creates /api/messages endpoint for Bot Framework +5. **Start Server**: Listens on configured port (default 3978) + +**Why it's useful**: This is the entry point that makes your agent accessible via HTTP! +--- + +## Design Patterns and Best Practices + +### 1. **Factory Pattern** + +Clean client creation through factory function: + +```typescript +const client = await getClient(); +``` + +### 2. **Event-Driven Architecture** + +Bot Framework event routing: + +```typescript +this.onActivity(ActivityTypes.Message, async (context, state) => { + await this.handleAgentMessageActivity(context, state); +}); +``` + +--- + +## Extension Points + +### 1. **Adding Tools** + +Extend the agent with LangChain tools: + +```typescript +const agent = createAgent({ + model: model, + tools: [myCustomTool], + name: 'My Custom Agent', +}); +``` + +### 2. **Customizing the Model** + +Change model parameters: + +```typescript +const model = new ChatOpenAI({ + model: "gpt-4o", + temperature: 0.7, +}); +``` + +--- + +## Performance Considerations + +### 1. **Async Operations** +- All I/O operations are asynchronous +- Proper promise handling throughout + +### 2. **Error Recovery** +- User-friendly error messages +- Comprehensive error logging + +--- + +## Debugging Guide + +### 1. **Enable Debug Logging** + +Set DEBUG environment variable: + +```bash +DEBUG=* +``` + +### 2. **Test Agent Response** + +Check agent invocation: + +```typescript +console.log('Agent response:', response); +``` + +This architecture provides a solid foundation for building AI agents with LangChain while maintaining flexibility for customization and extension. \ No newline at end of file diff --git a/nodejs/langchain/quickstart-before/README.md b/nodejs/langchain/quickstart-before/README.md new file mode 100644 index 00000000..75017a2b --- /dev/null +++ b/nodejs/langchain/quickstart-before/README.md @@ -0,0 +1,72 @@ +# Sample Agent - Node.js LangChain + +This directory contains a quickstart agent implementation using Node.js and LangChain. + +## Demonstrates + +This sample is used to demonstrate how to build an agent using the Agent365 framework with Node.js and LangChain. The sample includes basic LangChain Agent SDK usage hosted with Agents SDK that is testable on [agentsplayground](https://learn.microsoft.com/en-us/microsoft-365/agents-sdk/test-with-toolkit-project?tabs=windows). +Please refer to this [quickstart guide](https://review.learn.microsoft.com/en-us/microsoft-agent-365/developer/quickstart-nodejs-langchain?branch=main) on how to extend your agent using Agent365 SDK. + +## Prerequisites + +- Node.js 18+ +- LangChain +- Agents SDK + +## How to run this sample + +1. **Setup environment variables** + ```bash + # Copy the example environment file + cp .env.template .env + ``` + +2. **Install dependencies** + ```bash + npm install + ``` + +3. **Build the project** + ```bash + npm run build + ``` + +4. **Start the agent** + ```bash + npm start + ``` + +5. **Optionally, while testing you can run in dev mode** + ```bash + npm run dev + ``` + +6. **Start AgentsPlayground to chat with your agent** + ```bash + agentsplayground + ``` + +The agent will start and be ready to receive requests through the configured hosting mechanism. + +## Documentation + +For detailed information about this sample, please refer to: + +- **[AGENT-CODE-WALKTHROUGH.md](AGENT-CODE-WALKTHROUGH.md)** - Detailed code explanation and architecture walkthrough + +## 📚 Related Documentation + +- [LangChain Agent SDK Documentation](https://docs.langchain.com/oss/javascript/langchain/overview) +- [Microsoft 365 Agents SDK](https://github.com/microsoft/Agents-for-js/tree/main) +- [Model Context Protocol (MCP)](https://github.com/modelcontextprotocol/typescript-sdk/tree/main) + +## 🤝 Contributing + +1. Follow the existing code patterns and structure +2. Add comprehensive logging and error handling +3. Update documentation for new features +4. Test thoroughly with different authentication methods + +## 📄 License + +This project is licensed under the MIT License - see the [LICENSE](../../../LICENSE.md) file for details. \ No newline at end of file diff --git a/nodejs/langchain/quickstart-before/package.json b/nodejs/langchain/quickstart-before/package.json new file mode 100644 index 00000000..448e3a5e --- /dev/null +++ b/nodejs/langchain/quickstart-before/package.json @@ -0,0 +1,43 @@ +{ + "name": "langchain-sample", + "version": "2025.11.6", + "description": "Sample agent integrating LangChain Agents with Microsoft 365 Agents SDK and Agent365 SDK", + "main": "src/index.ts", + "scripts": { + "preinstall": "node preinstall-local-packages.js", + "start": "node dist/index.js", + "dev": "nodemon --watch src/*.ts --exec ts-node src/index.ts", + "test-tool": "agentsplayground", + "eval": "node --env-file .env src/evals/index.js", + "build": "tsc" + }, + "keywords": [ + "langchain", + "microsoft-365", + "agent", + "ai" + ], + "license": "MIT", + "dependencies": { + "@langchain/core": "*", + "@langchain/langgraph": "*", + "@langchain/mcp-adapters": "*", + "@langchain/openai": "*", + "@microsoft/agents-activity": "^1.1.0-alpha.85", + "@microsoft/agents-hosting": "^1.1.0-alpha.85", + "dotenv": "^17.2.3", + "express": "^5.1.0", + "langchain": "^1.0.1", + "node-fetch": "^3.3.2", + "uuid": "^9.0.0" + }, + "devDependencies": { + "@azure/monitor-opentelemetry-exporter": "^1.0.0-beta.32", + "@babel/cli": "^7.28.3", + "@babel/core": "^7.28.4", + "@babel/preset-env": "^7.28.3", + "@microsoft/m365agentsplayground": "^0.2.16", + "nodemon": "^3.1.10", + "ts-node": "^10.9.2" + } +} diff --git a/nodejs/langchain/quickstart-before/src/agent.ts b/nodejs/langchain/quickstart-before/src/agent.ts new file mode 100644 index 00000000..5cd4d607 --- /dev/null +++ b/nodejs/langchain/quickstart-before/src/agent.ts @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +import { TurnState, AgentApplication, TurnContext } from '@microsoft/agents-hosting'; +import { ActivityTypes } from '@microsoft/agents-activity'; + +import { Client, getClient } from './client'; + +class MyAgent extends AgentApplication { + constructor() { + super(); + + this.onActivity(ActivityTypes.Message, async (context: TurnContext, state: TurnState) => { + await this.handleAgentMessageActivity(context, state); + }); + } + + /** + * Handles incoming user messages and sends responses. + */ + async handleAgentMessageActivity(turnContext: TurnContext, state: TurnState): Promise { + const userMessage = turnContext.activity.text?.trim() || ''; + + if (!userMessage) { + await turnContext.sendActivity('Please send me a message and I\'ll help you!'); + return; + } + + try { + const client: Client = await getClient(); + const response = await client.invokeAgent(userMessage); + await turnContext.sendActivity(response); + } catch (error) { + console.error('LLM query error:', error); + const err = error as any; + await turnContext.sendActivity(`Error: ${err.message || err}`); + } + } +} + +export const agentApplication = new MyAgent(); \ No newline at end of file diff --git a/nodejs/langchain/quickstart-before/src/client.ts b/nodejs/langchain/quickstart-before/src/client.ts new file mode 100644 index 00000000..4a950edc --- /dev/null +++ b/nodejs/langchain/quickstart-before/src/client.ts @@ -0,0 +1,89 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +import { createAgent, ReactAgent } from "langchain"; +import { ChatOpenAI } from "@langchain/openai"; + +export interface Client { + invokeAgent(prompt: string): Promise; +} + +/** + * Creates and configures a LangChain client with Agent365 MCP tools. + * + * This factory function initializes a LangChain React agent with access to + * Microsoft 365 tools through MCP (Model Context Protocol) servers. It handles + * tool discovery, authentication, and agent configuration. + * + * @returns Promise - Configured LangChain client ready for agent interactions + * + * @example + * ```typescript + * const client = await getClient(authorization, turnContext); + * const response = await client.invokeAgent("Send an email to john@example.com"); + * ``` + */ +export async function getClient(): Promise { + // Create the model + const model = new ChatOpenAI({ + model: "gpt-4o-mini", + }); + + // Create the agent + const agent = createAgent({ + model: model, + tools: [], + name: 'My Custom Agent', + }); + + return new LangChainClient(agent); +} + +/** + * LangChainClient provides an interface to interact with LangChain agents. + * It creates a React agent with tools and exposes an invokeAgent method. + */ +class LangChainClient implements Client { + private agent: ReactAgent; + + constructor(agent: ReactAgent) { + this.agent = agent; + } + + /** + * Sends a user message to the LangChain agent and returns the AI's response. + * Handles streaming results and error reporting. + * + * @param {string} userMessage - The message or prompt to send to the agent. + * @returns {Promise} The response from the agent, or an error message if the query fails. + */ + async invokeAgent(userMessage: string): Promise { + const result = await this.agent.invoke({ + messages: [ + { + role: "user", + content: userMessage, + }, + ], + }); + + let agentMessage: any = ''; + + // Extract the content from the LangChain response + if (result.messages && result.messages.length > 0) { + const lastMessage = result.messages[result.messages.length - 1]; + agentMessage = lastMessage.content || "No content in response"; + } + + // Fallback if result is already a string + if (typeof result === 'string') { + agentMessage = result; + } + + if (!agentMessage) { + return "Sorry, I couldn't get a response from the agent :("; + } + + return agentMessage; + } +} \ No newline at end of file diff --git a/nodejs/langchain/quickstart-before/src/index.ts b/nodejs/langchain/quickstart-before/src/index.ts new file mode 100644 index 00000000..00b702b2 --- /dev/null +++ b/nodejs/langchain/quickstart-before/src/index.ts @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +// It is important to load environment variables before importing other modules +import { configDotenv } from 'dotenv'; + +configDotenv(); + +import { AuthConfiguration, authorizeJWT, CloudAdapter, Request } from '@microsoft/agents-hosting'; +import express, { Response } from 'express'; +import { agentApplication } from './agent'; + +const authConfig: AuthConfiguration = {}; + +const server = express(); +server.use(express.json()); +server.use(authorizeJWT(authConfig)); + +server.post('/api/messages', (req: Request, res: Response) => { + const adapter = agentApplication.adapter as CloudAdapter; + adapter.process(req, res, async (context) => { + await agentApplication.run(context); + }); +}); + +const port = process.env.PORT || 3978; +server.listen(port, async () => { + console.log(`\nServer listening to port ${port} for appId ${authConfig.clientId} debug ${process.env.DEBUG}`); +}).on('error', async (err) => { + console.error(err); + process.exit(1); +}).on('close', async () => { + console.log('Server closed'); + process.exit(0); +}); \ No newline at end of file diff --git a/nodejs/langchain/quickstart-before/tsconfig.json b/nodejs/langchain/quickstart-before/tsconfig.json new file mode 100644 index 00000000..6f07ceb7 --- /dev/null +++ b/nodejs/langchain/quickstart-before/tsconfig.json @@ -0,0 +1,20 @@ +{ + "compilerOptions": { + "incremental": true, + "lib": ["ES2021"], + "target": "es2019", + "module": "node16", + "declaration": true, + "sourceMap": true, + "composite": true, + "strict": true, + "moduleResolution": "node16", + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "rootDir": "src", + "outDir": "dist", + "tsBuildInfoFile": "dist/.tsbuildinfo" + } +} \ No newline at end of file