Skip to content

Commit

Permalink
Merge pull request #1008 from proteanx/venice
Browse files Browse the repository at this point in the history
feat: add venice.ai api model provider
  • Loading branch information
lalalune authored Dec 13, 2024
2 parents 5a3d348 + d3e0f78 commit 9192179
Show file tree
Hide file tree
Showing 5 changed files with 50 additions and 0 deletions.
6 changes: 6 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -225,6 +225,12 @@ WALLET_SECRET_SALT= # ONLY DEFINE IF YOU WANT TO USE TEE Plugin, otherwise it wi
# Galadriel Configuration
GALADRIEL_API_KEY=gal-* # Get from https://dashboard.galadriel.com/

# Venice Configuration
VENICE_API_KEY= # generate from venice settings
SMALL_VENICE_MODEL= # Default: llama-3.3-70b
MEDIUM_VENICE_MODEL= # Default: llama-3.3-70b
LARGE_VENICE_MODEL= # Default: llama-3.1-405b

# fal.ai Configuration
FAL_API_KEY=
FAL_AI_LORA_PATH=
Expand Down
5 changes: 5 additions & 0 deletions agent/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -286,6 +286,11 @@ export function getTokenForProvider(
character.settings?.secrets?.HYPERBOLIC_API_KEY ||
settings.HYPERBOLIC_API_KEY
);
case ModelProviderName.VENICE:
return (
character.settings?.secrets?.VENICE_API_KEY ||
settings.VENICE_API_KEY
);
}
}

Expand Down
23 changes: 23 additions & 0 deletions packages/core/src/generation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -533,6 +533,29 @@ export async function generateText({
break;
}

case ModelProviderName.VENICE: {
elizaLogger.debug("Initializing Venice model.");
const venice = createOpenAI({
apiKey: apiKey,
baseURL: endpoint
});

const { text: veniceResponse } = await aiGenerateText({
model: venice.languageModel(model),
prompt: context,
system:
runtime.character.system ??
settings.SYSTEM_PROMPT ??
undefined,
temperature: temperature,
maxTokens: max_response_length,
});

response = veniceResponse;
elizaLogger.debug("Received response from Venice model.");
break;
}

default: {
const errorMessage = `Unsupported provider: ${provider}`;
elizaLogger.error(errorMessage);
Expand Down
14 changes: 14 additions & 0 deletions packages/core/src/models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -451,6 +451,20 @@ export const models: Models = {
[ModelClass.IMAGE]: settings.IMAGE_HYPERBOLIC_MODEL || "FLUX.1-dev",
},
},
[ModelProviderName.VENICE]: {
endpoint: "https://api.venice.ai/api/v1",
settings: {
stop: [],
maxInputTokens: 128000,
maxOutputTokens: 8192,
temperature: 0.6,
},
model: {
[ModelClass.SMALL]: settings.SMALL_VENICE_MODEL || "llama-3.3-70b",
[ModelClass.MEDIUM]: settings.MEDIUM_VENICE_MODEL || "llama-3.3-70b",
[ModelClass.LARGE]: settings.LARGE_VENICE_MODEL || "llama-3.1-405b",
},
},
};

export function getModel(provider: ModelProviderName, type: ModelClass) {
Expand Down
2 changes: 2 additions & 0 deletions packages/core/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -207,6 +207,7 @@ export type Models = {
[ModelProviderName.VOLENGINE]: Model;
[ModelProviderName.NANOGPT]: Model;
[ModelProviderName.HYPERBOLIC]: Model;
[ModelProviderName.VENICE]: Model;
};

/**
Expand Down Expand Up @@ -234,6 +235,7 @@ export enum ModelProviderName {
VOLENGINE = "volengine",
NANOGPT = "nanogpt",
HYPERBOLIC = "hyperbolic",
VENICE = "venice",
}

/**
Expand Down

0 comments on commit 9192179

Please sign in to comment.