Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add RedPill API Support #198

Merged
merged 2 commits into from
Nov 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions core/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
DISCORD_APPLICATION_ID=
DISCORD_API_TOKEN= # Bot token
OPENAI_API_KEY=sk-* # OpenAI API key, starting with sk-
REDPILL_API_KEY= # REDPILL API Key
GROQ_API_KEY=gsk_*

ELEVENLABS_XI_API_KEY= # API key from elevenlabs
Expand Down
5 changes: 5 additions & 0 deletions core/src/cli/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,11 @@ export function getTokenForProvider(
character.settings?.secrets?.CLAUDE_API_KEY ||
settings.CLAUDE_API_KEY
);
case ModelProvider.REDPILL:
return (
character.settings?.secrets?.REDPILL_API_KEY ||
settings.REDPILL_API_KEY
);
}
}
export function initializeDatabase() {
Expand Down
45 changes: 35 additions & 10 deletions core/src/core/generation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ export async function generateText({
case ModelProvider.GROQ: {
console.log("Initializing Groq model.");
const groq = createGroq({ apiKey });

const { text: groqResponse } = await aiGenerateText({
model: groq.languageModel(model),
prompt: context,
Expand All @@ -143,26 +143,51 @@ export async function generateText({
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
});

response = groqResponse;
console.log("Received response from Groq model.");
break;
}

case ModelProvider.LLAMALOCAL:
case ModelProvider.LLAMALOCAL: {
prettyConsole.log(
"Using local Llama model for text completion."
"Using local Llama model for text completion."
);
response = await runtime.llamaService.queueTextCompletion(
context,
temperature,
_stop,
frequency_penalty,
presence_penalty,
max_response_length
context,
temperature,
_stop,
frequency_penalty,
presence_penalty,
max_response_length
);
prettyConsole.log("Received response from local Llama model.");
break;
}

case ModelProvider.REDPILL: {
prettyConsole.log("Initializing RedPill model.");
const serverUrl = models[provider].endpoint;
const openai = createOpenAI({ apiKey, baseURL: serverUrl });

console.log('****** MODEL\n', model)
console.log('****** CONTEXT\n', context)

const { text: openaiResponse } = await aiGenerateText({
model: openai.languageModel(model),
prompt: context,
temperature: temperature,
maxTokens: max_response_length,
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
});

console.log("****** RESPONSE\n", openaiResponse);

response = openaiResponse;
prettyConsole.log("Received response from OpenAI model.");
break;
}

default: {
const errorMessage = `Unsupported provider: ${provider}`;
Expand Down
20 changes: 20 additions & 0 deletions core/src/core/models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ type Models = {
[ModelProvider.LLAMALOCAL]: Model;
[ModelProvider.GOOGLE]: Model;
[ModelProvider.CLAUDE_VERTEX]: Model;
[ModelProvider.REDPILL]: Model;
// TODO: add OpenRouter - feel free to do this :)
};

Expand Down Expand Up @@ -149,6 +150,25 @@ const models: Models = {
[ModelClass.EMBEDDING]: "text-embedding-004",
},
},
[ModelProvider.REDPILL]: {
endpoint: "https://api.red-pill.ai/v1",
settings: {
stop: [],
maxInputTokens: 128000,
maxOutputTokens: 8192,
frequency_penalty: 0.0,
presence_penalty: 0.0,
temperature: 0.6,
},
// Available models: https://docs.red-pill.ai/get-started/supported-models
// To test other models, change the models below
model: {
[ModelClass.SMALL]: "gpt-4o-mini", // [ModelClass.SMALL]: "claude-3-5-sonnet-20241022",
[ModelClass.MEDIUM]: "gpt-4o", // [ModelClass.MEDIUM]: "claude-3-5-sonnet-20241022",
[ModelClass.LARGE]: "gpt-4o", // [ModelClass.LARGE]: "claude-3-opus-20240229",
[ModelClass.EMBEDDING]: "text-embedding-3-small",
},
},
};

export function getModel(provider: ModelProvider, type: ModelClass) {
Expand Down
1 change: 1 addition & 0 deletions core/src/core/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,7 @@ export enum ModelProvider {
LLAMALOCAL = "llama_local",
GOOGLE = "google",
CLAUDE_VERTEX = "claude_vertex",
REDPILL = "redpill"
}

/**
Expand Down
Loading