This repository has been archived by the owner on Sep 9, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 38
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #67 from OcularEngineering/AddOpenAIPlugin
Add Open AI Service As Second LLM Provider
- Loading branch information
Showing
16 changed files
with
325 additions
and
11 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,9 @@ | ||
/lib | ||
node_modules | ||
.DS_store | ||
.env* | ||
/*.js | ||
!index.js | ||
yarn.lock | ||
.turbo | ||
/dist |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,13 @@ | ||
/lib | ||
node_modules | ||
.DS_store | ||
.env* | ||
/*.js | ||
!index.js | ||
package-lock.json | ||
yarn.lock | ||
src | ||
.gitignore | ||
.eslintrc | ||
.babelrc | ||
.prettierrc |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
# GoogleDrive | ||
|
||
Intergrate Ocular with Open AI Service. | ||
|
||
|
||
## Features | ||
|
||
- Allow Ocular To Communicate With Open AI Service. Ocular uses an LLMService such as Open AI to generate embeddings and perfom Chat completion. | ||
|
||
|
||
## How to Install | ||
|
||
1\. In `ocular/core-config.js` add the following at the end of the `plugins` array: | ||
|
||
```js | ||
const plugins = [ | ||
// ... | ||
{ | ||
resolve: `open-ai`, | ||
options: { | ||
// open_ai_key: process.env.AZURE_OPEN_AI_KEY, | ||
// open_ai_version: "2023-05-15", | ||
// endpoint: process.env.AZURE_OPEN_AI_ENDPOINT, | ||
// embedding_deployment_name: process.env.AZURE_OPEN_AI_EMBEDDER_DEPLOYMENT_NAME, | ||
// embedding_model: process.env.AZURE_OPEN_AI_EMBEDDING_MODEL, | ||
// chat_deployment_name: process.env.AZURE_OPEN_AI_CHAT_DEPLOYMENT_NAME, | ||
// chat_model: process.env.AZURE_OPEN_AI_CHAT_MODEL, | ||
} | ||
}, | ||
] | ||
``` |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,19 @@ | ||
module.exports = { | ||
globals: { | ||
preset: 'ts-jest', | ||
testEnvironment: 'node', | ||
"ts-jest": { | ||
tsconfig: "tsconfig.spec.json", | ||
isolatedModules: false, | ||
}, | ||
}, | ||
transform: { | ||
"^.+\\.[jt]s?$": "ts-jest", | ||
}, | ||
transformIgnorePatterns: [ | ||
"/node_modules/(?!(axios)/).*", | ||
"/dist/" | ||
], | ||
testEnvironment: `node`, | ||
moduleFileExtensions: [`js`, `jsx`, `ts`, `tsx`, `json`], | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,26 @@ | ||
{ | ||
"name": "open-ai", | ||
"version": "0.0.0", | ||
"description": "Ocular Open AI", | ||
"main": "dist/index.js", | ||
"types": "dist/index.d.ts", | ||
"author": "Louis Murerwa", | ||
"devDependencies": { | ||
"cross-env": "^5.2.1", | ||
"typeorm": "^0.3.16", | ||
"typescript": "^4.9.5" | ||
}, | ||
"scripts": { | ||
"prepare": "cross-env NODE_ENV=production npm run build", | ||
"test": "jest src", | ||
"build": "tsc", | ||
"watch": "tsc --watch", | ||
"clean": "rimraf dist node_modules" | ||
}, | ||
"dependencies": { | ||
"@ocular/types": "*", | ||
"@ocular/utils": "*", | ||
"openai": "^4.29.2", | ||
"tiktoken": "^1.0.13" | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,52 @@ | ||
import { OpenAI } from 'openai'; | ||
import OpenAIService from '../services/open-ai'; | ||
|
||
describe('OpenAIService', () => { | ||
let service; | ||
|
||
beforeEach(() => { | ||
service = new OpenAIService({ | ||
rateLimiterService: { | ||
getRequestQueue: jest.fn().mockReturnValue({ | ||
removeTokens: jest.fn() | ||
}) | ||
} | ||
}, { | ||
embedding_model: process.env.OPEN_AI_EMBEDDING_MODEL, | ||
open_ai_key: process.env.OPEN_AI_KEY, | ||
chat_model: process.env.OPEN_AI_CHAT_MODEL | ||
}); | ||
}); | ||
|
||
it('should create embeddings', async () => { | ||
const doc = { content: 't' }; | ||
const embeddings = await service.createEmbeddings("test content"); | ||
expect(embeddings).toEqual([1, 2, 3]); | ||
expect(OpenAI).toHaveBeenCalledWith({ | ||
apiKey: 'test-key', | ||
defaultQuery: { 'api-version': 'test-version' }, | ||
defaultHeaders: { 'api-key': 'test-key' }, | ||
baseURL: 'test-endpoint/openai/deployments/test-deployment' | ||
}); | ||
expect(service.openai_.embeddings.create).toHaveBeenCalledWith({ | ||
input: 'test content', | ||
model: 'test-model' | ||
}); | ||
}); | ||
|
||
it('should return empty array if doc content is not provided', async () => { | ||
const doc = {}; | ||
const embeddings = await service.createEmbeddings("test"); | ||
expect(embeddings).toEqual([]); | ||
}); | ||
|
||
// it('should complete chat', async () => { | ||
// const messages = [ | ||
// {role: 'system', content: 'You are a helpful assistant.'}, | ||
// {role: 'user', content: 'Translate the following English text to French: "Hello, how are you?"'} | ||
// ]; | ||
// const result = await service.completeChat(messages); | ||
// expect(result).toEqual( "\"Bonjour, comment vas-tu ?\""); | ||
// }); | ||
|
||
}); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,17 @@ | ||
import { AppNameDefinitions, RateLimiterOpts, PluginNameDefinitions} from "@ocular/types"; | ||
import { RateLimiterService } from "@ocular/ocular"; | ||
|
||
export default async (container, options) => { | ||
try { | ||
// Register Rate Limiter For Google Drive | ||
if (!options.rate_limiter_opts) { | ||
throw new Error("No options provided for rate limiter") | ||
} | ||
const rateLimiterOpts: RateLimiterOpts = options.rate_limiter_opts | ||
const rateLimiterService: RateLimiterService = container.resolve("rateLimiterService") | ||
console.log("Registering Rate Limiter For OpenAI",options) | ||
await rateLimiterService.register(PluginNameDefinitions.OPENAI,rateLimiterOpts.requests, rateLimiterOpts.interval); | ||
} catch (err) { | ||
console.log(err) | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,81 @@ | ||
import { IndexableDocument, AbstractLLMService, IndexableDocChunk, Message, PluginNameDefinitions } from "@ocular/types"; | ||
import { OpenAI } from 'openai'; | ||
import { encoding_for_model, type TiktokenModel } from 'tiktoken'; | ||
import { RateLimiterService } from "@ocular/ocular"; | ||
import { RateLimiterQueue } from "rate-limiter-flexible" | ||
|
||
export default class OpenAIService extends AbstractLLMService { | ||
|
||
static identifier = PluginNameDefinitions.OPENAI | ||
|
||
protected openAIKey_: string | ||
protected openAI_: OpenAI | ||
protected embeddingModel_: string | ||
protected chatModel_: string | ||
protected tokenLimit_:number = 4096 | ||
protected rateLimiterService_: RateLimiterService; | ||
protected requestQueue_: RateLimiterQueue | ||
|
||
constructor(container, options) { | ||
super(arguments[0],options) | ||
|
||
// Rate Limiter Service | ||
this.rateLimiterService_ = container.rateLimiterService; | ||
this.requestQueue_ = this.rateLimiterService_.getRequestQueue(PluginNameDefinitions.OPENAI); | ||
|
||
// Models | ||
this.embeddingModel_ = options.embedding_model | ||
this.chatModel_ = options.chat_model | ||
|
||
// Chat Deployment | ||
this.openAIKey_ = options.open_ai_key | ||
this.openAI_ = new OpenAI({ | ||
apiKey: this.openAIKey_ || "" | ||
}) | ||
} | ||
|
||
async createEmbeddings(text:string): Promise<number[]> { | ||
try{ | ||
// Rate Limiter Limits On Token Count | ||
const tokenCount = this.getChatModelTokenCount(text) | ||
await this.requestQueue_.removeTokens(tokenCount,PluginNameDefinitions.OPENAI) | ||
const result = await this.openAI_.embeddings.create({ | ||
model: this.embeddingModel_, | ||
input: text | ||
}) | ||
return result.data[0].embedding; | ||
} catch(error){ | ||
console.log("Open AI: Error",error) | ||
} | ||
} | ||
|
||
async completeChat(messages: Message[]): Promise<string> { | ||
try{ | ||
const result = await this.openAI_.chat.completions.create({ | ||
model: this.chatModel_, | ||
messages, | ||
temperature: 0.3, | ||
max_tokens: 1024, | ||
n: 1, | ||
}); | ||
console.log("Result Open AI",result.choices[0].message.content) | ||
return result.choices[0].message.content; | ||
}catch(error){ | ||
console.log("Open AI: Error",error) | ||
} | ||
} | ||
|
||
getChatModelTokenCount(content : string): number { | ||
const encoder = encoding_for_model(this.chatModel_ as TiktokenModel); | ||
let tokens = 2; | ||
for (const value of Object.values(content)) { | ||
tokens += encoder.encode(value).length; | ||
} | ||
encoder.free(); | ||
return tokens; | ||
} | ||
|
||
getTokenLimit(): number { | ||
return this.tokenLimit_; | ||
} | ||
} |
Oops, something went wrong.