@@ -30,25 +30,40 @@ echo_green() {
30
30
# Global Variables & Model Definitions #
31
31
# ##############################################################################
32
32
33
- export ACSH_VERSION=0.4.2
33
+ export ACSH_VERSION=0.4.3
34
34
35
35
# Supported models defined in an associative array.
36
36
unset _autocomplete_modellist
37
37
declare -A _autocomplete_modellist
38
38
# OpenAI models
39
- _autocomplete_modellist[' openai: gpt-4o' ]=' { "completion_cost":0.0000150, "prompt_cost":0.00000500, "endpoint": "https://api.openai.com/v1/chat/completions", "model": "gpt-4o", "provider": "openai" }'
40
- _autocomplete_modellist[' openai: gpt-4o-mini' ]=' { "completion_cost":0.0000006, "prompt_cost":0.00000015, "endpoint": "https://api.openai.com/v1/chat/completions", "model": "gpt-4o-mini", "provider": "openai" }'
41
- _autocomplete_modellist[' openai: gpt-3.5-turbo-0125' ]=' { "completion_cost":0.0000015, "prompt_cost":0.00000050, "endpoint": "https://api.openai.com/v1/chat/completions", "model": "gpt-3.5-turbo-0125", "provider": "openai" }'
39
+ _autocomplete_modellist[' openai: gpt-4o' ]=' { "completion_cost":0.0000100, "prompt_cost":0.00000250, "endpoint": "https://api.openai.com/v1/chat/completions", "model": "gpt-4o", "provider": "openai" }'
40
+ _autocomplete_modellist[' openai: gpt-4o-mini' ]=' { "completion_cost":0.0000060, "prompt_cost":0.00000015, "endpoint": "https://api.openai.com/v1/chat/completions", "model": "gpt-4o-mini", "provider": "openai" }'
41
+ _autocomplete_modellist[' openai: o1' ]=' { "completion_cost":0.0000600, "prompt_cost":0.00001500, "endpoint": "https://api.openai.com/v1/chat/completions", "model": "o1", "provider": "openai" }'
42
+ _autocomplete_modellist[' openai: o1-mini' ]=' { "completion_cost":0.0000440, "prompt_cost":0.00001100, "endpoint": "https://api.openai.com/v1/chat/completions", "model": "o1-mini", "provider": "openai" }'
43
+ _autocomplete_modellist[' openai: o3-mini' ]=' { "completion_cost":0.0000440, "prompt_cost":0.00001100, "endpoint": "https://api.openai.com/v1/chat/completions", "model": "o3-mini", "provider": "openai" }'
42
44
# Anthropic models
43
- _autocomplete_modellist[' anthropic: claude-3-5 -sonnet-20240620 ' ]=' { "completion_cost":0.0000150, "prompt_cost":0.0000030, "endpoint": "https://api.anthropic.com/v1/messages", "model": "claude-3-5 -sonnet-20240620 ", "provider": "anthropic" }'
44
- _autocomplete_modellist[' anthropic: claude-3-opus-20240229 ' ]=' { "completion_cost":0.0000750 , "prompt_cost":0.0000150 , "endpoint": "https://api.anthropic.com/v1/messages", "model": "claude-3-opus-20240229 ", "provider": "anthropic" }'
45
- _autocomplete_modellist[' anthropic: claude-3-haiku-20240307 ' ]=' { "completion_cost":0.00000125 , "prompt_cost":0.00000025 , "endpoint": "https://api.anthropic.com/v1/messages", "model": "claude-3-haiku-20240307 ", "provider": "anthropic" }'
45
+ _autocomplete_modellist[' anthropic: claude-3-7 -sonnet-20250219 ' ]=' { "completion_cost":0.0000150, "prompt_cost":0.0000030, "endpoint": "https://api.anthropic.com/v1/messages", "model": "claude-3-7 -sonnet-20240219 ", "provider": "anthropic" }'
46
+ _autocomplete_modellist[' anthropic: claude-3-5-sonnet-20241022 ' ]=' { "completion_cost":0.0000150 , "prompt_cost":0.0000030 , "endpoint": "https://api.anthropic.com/v1/messages", "model": "claude-3-5-sonnet-20241022 ", "provider": "anthropic" }'
47
+ _autocomplete_modellist[' anthropic: claude-3-5- haiku-20241022 ' ]=' { "completion_cost":0.0000040 , "prompt_cost":0.0000008 , "endpoint": "https://api.anthropic.com/v1/messages", "model": "claude-3-5- haiku-20241022 ", "provider": "anthropic" }'
46
48
# Groq models
49
+ # Production Models
47
50
_autocomplete_modellist[' groq: llama3-8b-8192' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "llama3-8b-8192", "provider": "groq" }'
48
51
_autocomplete_modellist[' groq: llama3-70b-8192' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "llama3-70b-8192", "provider": "groq" }'
52
+ _autocomplete_modellist[' groq: llama-3.3-70b-versatile' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "llama-3.3-70b-versatile", "provider": "groq" }'
53
+ _autocomplete_modellist[' groq: llama-3.1-8b-instant' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "llama-3.1-8b-instant", "provider": "groq" }'
54
+ _autocomplete_modellist[' groq: llama-guard-3-8b' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "llama-guard-3-8b", "provider": "groq" }'
49
55
_autocomplete_modellist[' groq: mixtral-8x7b-32768' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "mixtral-8x7b-32768", "provider": "groq" }'
50
- _autocomplete_modellist[' groq: gemma-7b-it' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "gemma-7b-it", "provider": "groq" }'
51
56
_autocomplete_modellist[' groq: gemma2-9b-it' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "gemma2-9b-it", "provider": "groq" }'
57
+ # Groq models
58
+ # Preview Models
59
+ _autocomplete_modellist[' groq: mistral-saba-24b' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "mistral-saba-24b", "provider": "groq" }'
60
+ _autocomplete_modellist[' groq: qwen-2.5-coder-32b' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "qwen-2.5-coder-32b", "provider": "groq" }'
61
+ _autocomplete_modellist[' groq: deepseek-r1-distill-qwen-32b' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "deepseek-r1-distill-qwen-32b", "provider": "groq" }'
62
+ _autocomplete_modellist[' groq: deepseek-r1-distill-llama-70b-specdec' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "deepseek-r1-distill-llama-70b-specdec", "provider": "groq" }'
63
+ _autocomplete_modellist[' groq: llama-3.3-70b-specdec' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "llama-3.3-70b-specdec", "provider": "groq" }'
64
+ _autocomplete_modellist[' groq: llama-3.2-1b-preview' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "llama-3.2-1b-preview", "provider": "groq" }'
65
+ _autocomplete_modellist[' groq: llama-3.2-3b-preview' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "llama-3.2-3b-preview", "provider": "groq" }'
66
+
52
67
# Ollama model
53
68
_autocomplete_modellist[' ollama: codellama' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "http://localhost:11434/api/chat", "model": "codellama", "provider": "ollama" }'
54
69
0 commit comments