Skip to content

Commit

Permalink
Merge pull request #2220 from ai16z-demirix/fix/issue-2164
Browse files Browse the repository at this point in the history
improvement: using strict types to avoid erorrs like issue 2164
  • Loading branch information
monilpat authored Jan 16, 2025
2 parents 15b5d25 + 016cfff commit 9cc6c51
Showing 1 changed file with 10 additions and 8 deletions.
18 changes: 10 additions & 8 deletions packages/core/src/generation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -531,20 +531,23 @@ export async function generateText({
const openai = createOpenAI({
apiKey,
baseURL: endpoint,
fetch: async (url: string, options: any) => {
fetch: async (input: RequestInfo | URL, init?: RequestInit): Promise<Response> => {
const url = typeof input === 'string' ? input : input.toString();
const chain_id =
runtime.getSetting("ETERNALAI_CHAIN_ID") || "45762";

const options: RequestInit = { ...init };
if (options?.body) {
const body = JSON.parse(options.body);
const body = JSON.parse(options.body as string);
body.chain_id = chain_id;
options.body = JSON.stringify(body);
}

const fetching = await runtime.fetch(url, options);
if (
parseBooleanFromText(
runtime.getSetting("ETERNALAI_LOG")
)
) {

if (parseBooleanFromText(
runtime.getSetting("ETERNALAI_LOG")
)) {
elizaLogger.info(
"Request data: ",
JSON.stringify(options, null, 2)
Expand Down Expand Up @@ -1195,7 +1198,6 @@ export async function splitChunks(
* @param opts.presence_penalty The presence penalty to apply (0.0 to 2.0)
* @param opts.temperature The temperature to control randomness (0.0 to 2.0)
* @param opts.serverUrl The URL of the API server
* @param opts.token The API token for authentication
* @param opts.max_context_length Maximum allowed context length in tokens
* @param opts.max_response_length Maximum allowed response length in tokens
* @returns Promise resolving to a boolean value parsed from the model's response
Expand Down

0 comments on commit 9cc6c51

Please sign in to comment.