Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 31 additions & 0 deletions deploy/docker-compose.tools.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
# 工具容器配置
# 用于在 Docker 环境中运行维护脚本
#
# 使用方法(在项目根目录执行):
# docker compose -f docker-compose.yaml -f deploy/docker-compose.tools.yaml run --rm tools bun run scripts/clear-session-bindings.ts
#
# 带参数示例:
# docker compose -f docker-compose.yaml -f deploy/docker-compose.tools.yaml run --rm tools bun run scripts/clear-session-bindings.ts --priority 10 --dry-run
# docker compose -f docker-compose.yaml -f deploy/docker-compose.tools.yaml run --rm tools bun run scripts/clear-session-bindings.ts --id 1,2,3 --yes
#
# 说明:
# - 必须同时指定主 docker-compose.yaml 以共享网络
# - tools 服务会自动加入主服务网络,可直接访问 postgres 和 redis

services:
tools:
image: oven/bun:1.3.2-slim
working_dir: /app
init: true
volumes:
- .:/app
env_file:
- .env
environment:
DSN: postgresql://${DB_USER:-postgres}:${DB_PASSWORD:-postgres}@postgres:5432/${DB_NAME:-claude_code_hub}
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

DSN 连接字符串中直接拼接 ${DB_PASSWORD} 存在风险。如果密码包含特殊字符(如 @, :, / 等),可能会破坏连接字符串的格式,导致数据库连接失败。更安全的方式是确保密码在使用前经过 URL 编码。

REDIS_URL: redis://redis:6379
TZ: Asia/Shanghai
profiles:
- tools
stdin_open: true
tty: true
131 changes: 0 additions & 131 deletions public/seed/litellm-prices.json
Original file line number Diff line number Diff line change
Expand Up @@ -2907,29 +2907,6 @@
"supports_tool_choice": true,
"supports_vision": true
},
"azure/gpt-5.1-codex-max": {
"cache_read_input_token_cost": 1.25e-7,
"input_cost_per_token": 1.25e-6,
"litellm_provider": "azure",
"max_input_tokens": 400000,
"max_output_tokens": 128000,
"max_tokens": 128000,
"mode": "responses",
"output_cost_per_token": 1e-5,
"supported_endpoints": ["/v1/responses"],
"supported_modalities": ["text", "image"],
"supported_output_modalities": ["text"],
"supports_function_calling": true,
"supports_native_streaming": true,
"supports_parallel_function_calling": true,
"supports_pdf_input": true,
"supports_prompt_caching": true,
"supports_reasoning": true,
"supports_response_schema": true,
"supports_system_messages": false,
"supports_tool_choice": true,
"supports_vision": true
},
"azure/gpt-5.1-codex-mini": {
"cache_read_input_token_cost": 2.5e-8,
"input_cost_per_token": 2.5e-7,
Expand Down Expand Up @@ -4527,19 +4504,6 @@
"supports_function_calling": true,
"supports_tool_choice": true
},
"azure_ai/mistral-large-3": {
"input_cost_per_token": 5e-7,
"litellm_provider": "azure_ai",
"max_input_tokens": 256000,
"max_output_tokens": 8191,
"max_tokens": 8191,
"mode": "chat",
"output_cost_per_token": 1.5e-6,
"source": "https://azure.microsoft.com/en-us/blog/introducing-mistral-large-3-in-microsoft-foundry-open-capable-and-ready-for-production-workloads/",
"supports_function_calling": true,
"supports_tool_choice": true,
"supports_vision": true
},
"azure_ai/mistral-medium-2505": {
"input_cost_per_token": 4e-7,
"litellm_provider": "azure_ai",
Expand Down Expand Up @@ -11151,7 +11115,6 @@
"max_tokens": 65536,
"mode": "image_generation",
"output_cost_per_image": 0.134,
"output_cost_per_image_token": 1.2e-4,
"output_cost_per_token": 1.2e-5,
"output_cost_per_token_batches": 6e-6,
"source": "https://ai.google.dev/gemini-api/docs/pricing",
Expand Down Expand Up @@ -12602,7 +12565,6 @@
"max_tokens": 65536,
"mode": "image_generation",
"output_cost_per_image": 0.134,
"output_cost_per_image_token": 1.2e-4,
"output_cost_per_token": 1.2e-5,
"rpm": 1000,
"tpm": 4000000,
Expand Down Expand Up @@ -14770,29 +14732,6 @@
"supports_tool_choice": true,
"supports_vision": true
},
"gpt-5.1-codex-max": {
"cache_read_input_token_cost": 1.25e-7,
"input_cost_per_token": 1.25e-6,
"litellm_provider": "openai",
"max_input_tokens": 400000,
"max_output_tokens": 128000,
"max_tokens": 128000,
"mode": "responses",
"output_cost_per_token": 1e-5,
"supported_endpoints": ["/v1/responses"],
"supported_modalities": ["text", "image"],
"supported_output_modalities": ["text"],
"supports_function_calling": true,
"supports_native_streaming": true,
"supports_parallel_function_calling": true,
"supports_pdf_input": true,
"supports_prompt_caching": true,
"supports_reasoning": true,
"supports_response_schema": true,
"supports_system_messages": false,
"supports_tool_choice": true,
"supports_vision": true
},
"gpt-5.1-codex-mini": {
"cache_read_input_token_cost": 2.5e-8,
"cache_read_input_token_cost_priority": 4.5e-8,
Expand Down Expand Up @@ -15201,60 +15140,6 @@
"supports_response_schema": true,
"supports_tool_choice": true
},
"amazon-nova/nova-micro-v1": {
"input_cost_per_token": 3.5e-8,
"litellm_provider": "amazon_nova",
"max_input_tokens": 128000,
"max_output_tokens": 10000,
"max_tokens": 10000,
"mode": "chat",
"output_cost_per_token": 1.4e-7,
"supports_function_calling": true,
"supports_prompt_caching": true,
"supports_response_schema": true
},
"amazon-nova/nova-lite-v1": {
"input_cost_per_token": 6e-8,
"litellm_provider": "amazon_nova",
"max_input_tokens": 300000,
"max_output_tokens": 10000,
"max_tokens": 10000,
"mode": "chat",
"output_cost_per_token": 2.4e-7,
"supports_function_calling": true,
"supports_pdf_input": true,
"supports_prompt_caching": true,
"supports_response_schema": true,
"supports_vision": true
},
"amazon-nova/nova-premier-v1": {
"input_cost_per_token": 2.5e-6,
"litellm_provider": "amazon_nova",
"max_input_tokens": 1000000,
"max_output_tokens": 10000,
"max_tokens": 10000,
"mode": "chat",
"output_cost_per_token": 1.25e-5,
"supports_function_calling": true,
"supports_pdf_input": true,
"supports_prompt_caching": false,
"supports_response_schema": true,
"supports_vision": true
},
"amazon-nova/nova-pro-v1": {
"input_cost_per_token": 8e-7,
"litellm_provider": "amazon_nova",
"max_input_tokens": 300000,
"max_output_tokens": 10000,
"max_tokens": 10000,
"mode": "chat",
"output_cost_per_token": 3.2e-6,
"supports_function_calling": true,
"supports_pdf_input": true,
"supports_prompt_caching": true,
"supports_response_schema": true,
"supports_vision": true
},
"groq/deepseek-r1-distill-llama-70b": {
"input_cost_per_token": 7.5e-7,
"litellm_provider": "groq",
Expand Down Expand Up @@ -16996,21 +16881,6 @@
"supports_response_schema": true,
"supports_tool_choice": true
},
"mistral/mistral-large-3": {
"input_cost_per_token": 5e-7,
"litellm_provider": "mistral",
"max_input_tokens": 256000,
"max_output_tokens": 8191,
"max_tokens": 8191,
"mode": "chat",
"output_cost_per_token": 1.5e-6,
"source": "https://docs.mistral.ai/models/mistral-large-3-25-12",
"supports_assistant_prefill": true,
"supports_function_calling": true,
"supports_response_schema": true,
"supports_tool_choice": true,
"supports_vision": true
},
"mistral/mistral-medium": {
"input_cost_per_token": 2.7e-6,
"litellm_provider": "mistral",
Expand Down Expand Up @@ -23874,7 +23744,6 @@
"max_tokens": 65536,
"mode": "image_generation",
"output_cost_per_image": 0.134,
"output_cost_per_image_token": 1.2e-4,
"output_cost_per_token": 1.2e-5,
"output_cost_per_token_batches": 6e-6,
"source": "https://docs.cloud.google.com/vertex-ai/generative-ai/docs/models/gemini/3-pro-image"
Expand Down
Loading
Loading