Skip to content

Commit

Permalink
prettier
Browse files Browse the repository at this point in the history
  • Loading branch information
lalalune committed Nov 7, 2024
1 parent ad34b78 commit 84f2da0
Show file tree
Hide file tree
Showing 59 changed files with 1,776 additions and 1,238 deletions.
2 changes: 1 addition & 1 deletion core/src/actions/imageGeneration.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ export const imageGeneration: Action = {
description: "Generate an image to go along with the message.",
validate: async (runtime: IAgentRuntime, message: Memory) => {
// TODO: Abstract this to an image provider thing

const anthropicApiKeyOk = !!runtime.getSetting("ANTHROPIC_API_KEY");
const togetherApiKeyOk = !!runtime.getSetting("TOGETHER_API_KEY");

Expand Down
1 change: 0 additions & 1 deletion core/src/adapters/sqlite.ts
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,6 @@ export class SqliteDatabaseAdapter extends DatabaseAdapter {
tableName: string;
agentId?: UUID;
}): Promise<Memory[]> {

if (!params.tableName) {
// default to messages
params.tableName = "messages";
Expand Down
2 changes: 1 addition & 1 deletion core/src/clients/discord/actions/summarize_conversation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -288,7 +288,7 @@ const summarizeAction = {
"gpt-4o-mini"
),
});

const summary = await generateText({
runtime,
context,
Expand Down
52 changes: 25 additions & 27 deletions core/src/clients/twitter/interactions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -311,38 +311,36 @@ export class TwitterInteractionClient extends ClientBase {

if (response.text) {
try {
const callback: HandlerCallback = async (
response: Content
) => {
const memories = await sendTweetChunks(
this,
response,
message.roomId,
this.runtime.getSetting("TWITTER_USERNAME"),
tweet.id
);
return memories;
};
const callback: HandlerCallback = async (response: Content) => {
const memories = await sendTweetChunks(
this,
response,
message.roomId,
this.runtime.getSetting("TWITTER_USERNAME"),
tweet.id
);
return memories;
};

const responseMessages = await callback(response);
const responseMessages = await callback(response);

state = (await this.runtime.updateRecentMessageState(
state
)) as State;
state = (await this.runtime.updateRecentMessageState(
state
)) as State;

for (const responseMessage of responseMessages) {
await this.runtime.messageManager.createMemory(
responseMessage
);
}
for (const responseMessage of responseMessages) {
await this.runtime.messageManager.createMemory(
responseMessage
);
}

await this.runtime.evaluate(message, state);
await this.runtime.evaluate(message, state);

await this.runtime.processActions(
message,
responseMessages,
state
);
await this.runtime.processActions(
message,
responseMessages,
state
);
const responseInfo = `Context:\n\n${context}\n\nSelected Post: ${tweet.id} - ${tweet.username}: ${tweet.text}\nAgent's Output:\n${response.text}`;
// f tweets folder dont exist, create
if (!fs.existsSync("tweets")) {
Expand Down
5 changes: 2 additions & 3 deletions core/src/clients/twitter/post.ts
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ export class TwitterPostClient extends ClientBase {
let content = slice.slice(0, contentLength);
// if its bigger than 280, delete the last line
if (content.length > 280) {
content = content.slice(0, content.lastIndexOf("\n"));
content = content.slice(0, content.lastIndexOf("\n"));
}
if (content.length > contentLength) {
// slice at the last period
Expand All @@ -130,8 +130,7 @@ export class TwitterPostClient extends ClientBase {
);
// read the body of the response
const body = await result.json();
const tweetResult =
body.data.create_tweet.tweet_results.result;
const tweetResult = body.data.create_tweet.tweet_results.result;

const tweet = {
id: tweetResult.rest_id,
Expand Down
2 changes: 1 addition & 1 deletion core/src/core/defaultCharacter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -398,4 +398,4 @@ export const defaultCharacter: Character = {
"dive deeper into stuff when its interesting",
],
},
};
};
12 changes: 7 additions & 5 deletions core/src/core/generation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,8 @@ export async function generateText({
}

const provider = runtime.modelProvider;
const endpoint = runtime.character.modelEndpointOverride || models[provider].endpoint;
const endpoint =
runtime.character.modelEndpointOverride || models[provider].endpoint;
const model = models[provider].model[modelClass];
const temperature = models[provider].settings.temperature;
const frequency_penalty = models[provider].settings.frequency_penalty;
Expand Down Expand Up @@ -161,9 +162,7 @@ export async function generateText({
}

case ModelProvider.LLAMALOCAL: {
elizaLogger.log(
"Using local Llama model for text completion."
);
elizaLogger.log("Using local Llama model for text completion.");
response = await runtime.llamaService.queueTextCompletion(
context,
temperature,
Expand Down Expand Up @@ -258,7 +257,10 @@ export async function generateShouldRespond({
let retryDelay = 1000;
while (true) {
try {
elizaLogger.log("Attempting to generate text with context:", context);
elizaLogger.log(
"Attempting to generate text with context:",
context
);
const response = await generateText({
runtime,
context,
Expand Down
10 changes: 7 additions & 3 deletions core/src/core/runtime.ts
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,6 @@ export class AgentRuntime implements IAgentRuntime {
this.token = opts.token;

(opts.character.plugins ?? []).forEach((plugin) => {

plugin.actions.forEach((action) => {
this.registerAction(action);
});
Expand Down Expand Up @@ -482,14 +481,19 @@ export class AgentRuntime implements IAgentRuntime {
);
if (simileAction) {
action = _action;
elizaLogger.success(`Action found in similes: ${action.name}`);
elizaLogger.success(
`Action found in similes: ${action.name}`
);
break;
}
}
}

if (!action) {
elizaLogger.error("No action found for", responses[0].content.action);
elizaLogger.error(
"No action found for",
responses[0].content.action
);
return;
}

Expand Down
4 changes: 1 addition & 3 deletions core/src/services/llama.ts
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,6 @@ class LlamaService {
"https://huggingface.co/NousResearch/Hermes-3-Llama-3.1-8B-GGUF/resolve/main/Hermes-3-Llama-3.1-8B.Q8_0.gguf?download=true";
const modelName = "model.gguf";
this.modelPath = path.join(__dirname, modelName);

}
private async ensureInitialized() {
if (!this.modelInitialized) {
Expand Down Expand Up @@ -116,7 +115,7 @@ class LlamaService {
this.model = await this.llama.loadModel({
modelPath: this.modelPath,
});

this.ctx = await this.model.createContext({ contextSize: 8192 });
this.sequence = this.ctx.getSequence();

Expand Down Expand Up @@ -391,4 +390,3 @@ class LlamaService {
}

export default LlamaService;

18 changes: 4 additions & 14 deletions core/tsconfig.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,7 @@
"compilerOptions": {
"target": "ESNext",
"module": "ESNext",
"lib": [
"ESNext",
"dom"
],
"lib": ["ESNext", "dom"],
"moduleResolution": "Bundler",
"outDir": "./dist",
"rootDir": "./src",
Expand All @@ -30,13 +27,6 @@
"./node_modules/jest/types"
]
},
"include": [
"src/**/*"
],
"exclude": [
"node_modules",
"dist",
"src/**/*.d.ts",
"types/**/*.test.ts"
]
}
"include": ["src/**/*"],
"exclude": ["node_modules", "dist", "src/**/*.d.ts", "types/**/*.test.ts"]
}
2 changes: 1 addition & 1 deletion core/tsup.config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ export default defineConfig({
"fs", // Externalize fs to use Node.js built-in module
"path", // Externalize other built-ins if necessary
"http",
"https"
"https",
// Add other modules you want to externalize
],
});
46 changes: 23 additions & 23 deletions docs/docs/api/_media/README_CN.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,39 +4,39 @@

## 功能

- 🛠 支持discord/推特/telegram连接
- 👥 支持多模态agent
- 📚 简单的导入文档并与文档交互
- 💾 可检索的内存和文档存储
- 🚀 高可拓展性,你可以自定义客户端和行为来进行功能拓展
- ☁️ 多模型支持,包括Llama、OpenAI、Grok、Anthropic等
- 📦 简单好用
- 🛠 支持discord/推特/telegram连接
- 👥 支持多模态agent
- 📚 简单的导入文档并与文档交互
- 💾 可检索的内存和文档存储
- 🚀 高可拓展性,你可以自定义客户端和行为来进行功能拓展
- ☁️ 多模型支持,包括Llama、OpenAI、Grok、Anthropic等
- 📦 简单好用

你可以用Eliza做什么?

- 🤖 聊天机器人
- 🕵️ 自主Agents
- 📈 业务流程自动化处理
- 🎮 游戏NPC
- 🤖 聊天机器人
- 🕵️ 自主Agents
- 📈 业务流程自动化处理
- 🎮 游戏NPC

# 开始使用

**前置要求(必须):**

- [Node.js 22+](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm)
- Nodejs安装
- [pnpm](https://pnpm.io/installation)
- 使用pnpm
- [Node.js 22+](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm)
- Nodejs安装
- [pnpm](https://pnpm.io/installation)
- 使用pnpm

### 编辑.env文件

- - 将 .env.example 复制为 .env 并填写适当的值
- 编辑推特环境并输入你的推特账号和密码
- - 将 .env.example 复制为 .env 并填写适当的值
- 编辑推特环境并输入你的推特账号和密码

### 编辑角色文件

- 查看文件 `src/core/defaultCharacter.ts` - 您可以修改它
- 您也可以使用 `node --loader ts-node/esm src/index.ts --characters="path/to/your/character.json"` 加载角色并同时运行多个机器人。
- 查看文件 `src/core/defaultCharacter.ts` - 您可以修改它
- 您也可以使用 `node --loader ts-node/esm src/index.ts --characters="path/to/your/character.json"` 加载角色并同时运行多个机器人。

在完成账号和角色文件的配置后,输入以下命令行启动你的bot:

Expand Down Expand Up @@ -163,9 +163,9 @@ pnpm test:sqljs # Run tests with SQL.js

测试使用 Jest 编写,位于 src/\*_/_.test.ts 文件中。测试环境配置如下:

- 从 .env.test 加载环境变量
- 使用 2 分钟的超时时间来运行长时间运行的测试
- 支持 ESM 模块
- 按顺序运行测试 (--runInBand)
- 从 .env.test 加载环境变量
- 使用 2 分钟的超时时间来运行长时间运行的测试
- 支持 ESM 模块
- 按顺序运行测试 (--runInBand)

要创建新测试,请在要测试的代码旁边添加一个 .test.ts 文件。
Loading

0 comments on commit 84f2da0

Please sign in to comment.