Skip to content

Commit 11054fb

Browse files
committed
♻️ (autocomplete.sh): replace exit with return in error_exit for better handling in completion context
⬆️ (autocomplete.sh, install.sh): bump ACSH_VERSION from 0.4.3 to 0.4.4 ♻️ (autocomplete.sh): improve error handling by using echo_error instead of error_exit ♻️ (autocomplete.sh): use heredoc in _get_help_message for better formatting ♻️ (autocomplete.sh): add support for custom API key mapping to OLLAMA ♻️ (autocomplete.sh): add non-interactive option for remove_command function
1 parent ecc4cd9 commit 11054fb

File tree

2 files changed

+29
-18
lines changed

2 files changed

+29
-18
lines changed

autocomplete.sh

Lines changed: 28 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,8 @@
1515

1616
error_exit() {
1717
echo -e "\e[31mAutocomplete.sh - $1\e[0m" >&2
18-
exit 1
18+
# In a completion context, exit is too severe. Use return instead.
19+
return 1
1920
}
2021

2122
echo_error() {
@@ -30,9 +31,8 @@ echo_green() {
3031
# Global Variables & Model Definitions #
3132
###############################################################################
3233

33-
export ACSH_VERSION=0.4.3
34+
export ACSH_VERSION=0.4.4
3435

35-
# Supported models defined in an associative array.
3636
unset _autocomplete_modellist
3737
declare -A _autocomplete_modellist
3838
# OpenAI models
@@ -46,24 +46,21 @@ _autocomplete_modellist['anthropic: claude-3-7-sonnet-20250219']='{ "completion_
4646
_autocomplete_modellist['anthropic: claude-3-5-sonnet-20241022']='{ "completion_cost":0.0000150, "prompt_cost":0.0000030, "endpoint": "https://api.anthropic.com/v1/messages", "model": "claude-3-5-sonnet-20241022", "provider": "anthropic" }'
4747
_autocomplete_modellist['anthropic: claude-3-5-haiku-20241022']='{ "completion_cost":0.0000040, "prompt_cost":0.0000008, "endpoint": "https://api.anthropic.com/v1/messages", "model": "claude-3-5-haiku-20241022", "provider": "anthropic" }'
4848
# Groq models
49-
# Production Models
5049
_autocomplete_modellist['groq: llama3-8b-8192']='{ "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "llama3-8b-8192", "provider": "groq" }'
5150
_autocomplete_modellist['groq: llama3-70b-8192']='{ "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "llama3-70b-8192", "provider": "groq" }'
5251
_autocomplete_modellist['groq: llama-3.3-70b-versatile']='{ "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "llama-3.3-70b-versatile", "provider": "groq" }'
5352
_autocomplete_modellist['groq: llama-3.1-8b-instant']='{ "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "llama-3.1-8b-instant", "provider": "groq" }'
5453
_autocomplete_modellist['groq: llama-guard-3-8b']='{ "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "llama-guard-3-8b", "provider": "groq" }'
5554
_autocomplete_modellist['groq: mixtral-8x7b-32768']='{ "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "mixtral-8x7b-32768", "provider": "groq" }'
5655
_autocomplete_modellist['groq: gemma2-9b-it']='{ "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "gemma2-9b-it", "provider": "groq" }'
57-
# Groq models
58-
# Preview Models
56+
# Groq preview models
5957
_autocomplete_modellist['groq: mistral-saba-24b']='{ "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "mistral-saba-24b", "provider": "groq" }'
6058
_autocomplete_modellist['groq: qwen-2.5-coder-32b']='{ "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "qwen-2.5-coder-32b", "provider": "groq" }'
6159
_autocomplete_modellist['groq: deepseek-r1-distill-qwen-32b']='{ "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "deepseek-r1-distill-qwen-32b", "provider": "groq" }'
6260
_autocomplete_modellist['groq: deepseek-r1-distill-llama-70b-specdec']='{ "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "deepseek-r1-distill-llama-70b-specdec", "provider": "groq" }'
6361
_autocomplete_modellist['groq: llama-3.3-70b-specdec']='{ "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "llama-3.3-70b-specdec", "provider": "groq" }'
6462
_autocomplete_modellist['groq: llama-3.2-1b-preview']='{ "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "llama-3.2-1b-preview", "provider": "groq" }'
6563
_autocomplete_modellist['groq: llama-3.2-3b-preview']='{ "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "llama-3.2-3b-preview", "provider": "groq" }'
66-
6764
# Ollama model
6865
_autocomplete_modellist['ollama: codellama']='{ "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "http://localhost:11434/api/chat", "model": "codellama", "provider": "ollama" }'
6966

@@ -79,7 +76,7 @@ _get_terminal_info() {
7976
* Operating system: \$OSTYPE=$OSTYPE
8077
* Shell: \$BASH=$BASH
8178
* Terminal type: \$TERM=$TERM
82-
* Hostname: \$HOSTNAME=$HOSTNAME"
79+
* Hostname: \$HOSTNAME"
8380
echo "$terminal_info"
8481
}
8582

@@ -145,15 +142,19 @@ _get_recent_files() {
145142
find . -maxdepth 1 -type f -exec ls -ld {} + | sort -r | head -n "$FILE_LIMIT"
146143
}
147144

145+
# Rewritten _get_help_message using a heredoc to preserve formatting.
148146
_get_help_message() {
149147
local COMMAND HELP_INFO
150148
COMMAND=$(echo "$1" | awk '{print $1}')
151149
HELP_INFO=""
152150
{
153151
set +e
154-
HELP_INFO=$($COMMAND --help 2>&1 || true)
152+
HELP_INFO=$(cat <<EOF
153+
$($COMMAND --help 2>&1 || true)
154+
EOF
155+
)
155156
set -e
156-
} || HELP_INFO="Error: '$COMMAND --help' not available"
157+
} || HELP_INFO="'$COMMAND --help' not available"
157158
echo "$HELP_INFO"
158159
}
159160

@@ -341,7 +342,8 @@ openai_completion() {
341342
user_input=${*:-$default_user_input}
342343

343344
if [[ -z "$ACSH_ACTIVE_API_KEY" && ${ACSH_PROVIDER^^} != "OLLAMA" ]]; then
344-
error_exit "ACSH_ACTIVE_API_KEY not set. Please set it with: export ${ACSH_PROVIDER^^}_API_KEY=<your-api-key>"
345+
echo_error "ACSH_ACTIVE_API_KEY not set. Please set it with: export ${ACSH_PROVIDER^^}_API_KEY=<your-api-key>"
346+
return
345347
fi
346348
api_key="${ACSH_ACTIVE_API_KEY:-$OPENAI_API_KEY}"
347349
payload=$(_build_payload "$user_input")
@@ -350,15 +352,15 @@ openai_completion() {
350352
attempt=1
351353
while [ $attempt -le $max_attempts ]; do
352354
if [[ "${ACSH_PROVIDER^^}" == "ANTHROPIC" ]]; then
353-
response=$(curl -s -m "$timeout" -w "\n%{http_code}" "$endpoint" \
355+
response=$(\curl -s -m "$timeout" -w "\n%{http_code}" "$endpoint" \
354356
-H "content-type: application/json" \
355357
-H "anthropic-version: 2023-06-01" \
356358
-H "x-api-key: $api_key" \
357359
--data "$payload")
358360
elif [[ "${ACSH_PROVIDER^^}" == "OLLAMA" ]]; then
359-
response=$(curl -s -m "$timeout" -w "\n%{http_code}" "$endpoint" --data "$payload")
361+
response=$(\curl -s -m "$timeout" -w "\n%{http_code}" "$endpoint" --data "$payload")
360362
else
361-
response=$(curl -s -m "$timeout" -w "\n%{http_code}" "$endpoint" \
363+
response=$(\curl -s -m "$timeout" -w "\n%{http_code}" "$endpoint" \
362364
-H "Content-Type: application/json" \
363365
-H "Authorization: Bearer $api_key" \
364366
-d "$payload")
@@ -716,12 +718,16 @@ acsh_load_config() {
716718
if [[ -z "$ACSH_OLLAMA_API_KEY" && -n "$LLM_API_KEY" ]]; then
717719
export ACSH_OLLAMA_API_KEY="$LLM_API_KEY"
718720
fi
721+
# If the custom API key was set, map it to OLLAMA if needed.
722+
if [[ -z "$ACSH_OLLAMA_API_KEY" && -n "$ACSH_CUSTOM_API_KEY" ]]; then
723+
export ACSH_OLLAMA_API_KEY="$ACSH_CUSTOM_API_KEY"
724+
fi
719725
case "${ACSH_PROVIDER:-openai}" in
720726
"openai") export ACSH_ACTIVE_API_KEY="$ACSH_OPENAI_API_KEY" ;;
721727
"anthropic") export ACSH_ACTIVE_API_KEY="$ACSH_ANTHROPIC_API_KEY" ;;
722728
"groq") export ACSH_ACTIVE_API_KEY="$ACSH_GROQ_API_KEY" ;;
723729
"ollama") export ACSH_ACTIVE_API_KEY="$ACSH_OLLAMA_API_KEY" ;;
724-
*) error_exit "Unknown provider: $ACSH_PROVIDER" ;;
730+
*) echo_error "Unknown provider: $ACSH_PROVIDER" ;;
725731
esac
726732
else
727733
echo "Configuration file not found: $config_file"
@@ -787,10 +793,15 @@ remove_command() {
787793
autocomplete_script=$(command -v autocomplete)
788794
if [ -n "$autocomplete_script" ]; then
789795
echo "Autocomplete script is at: $autocomplete_script"
790-
read -r -p "Remove the autocomplete script? (y/n): " confirm
791-
if [[ $confirm == "y" ]]; then
796+
if [ "$1" == "-y" ]; then
792797
rm "$autocomplete_script"
793798
echo "Removed: $autocomplete_script"
799+
else
800+
read -r -p "Remove the autocomplete script? (y/n): " confirm
801+
if [[ $confirm == "y" ]]; then
802+
rm "$autocomplete_script"
803+
echo "Removed: $autocomplete_script"
804+
fi
794805
fi
795806
fi
796807
echo "Uninstallation complete."

docs/install.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
# This install script downloads the latest version of the LLMs
55

66
# The URL of the latest version of the LLMs
7-
ACSH_VERSION="v0.4.3"
7+
ACSH_VERSION="v0.4.4"
88
BRANCH_OR_VERSION=${1:-$ACSH_VERSION}
99

1010
# The default location to install the LLMs

0 commit comments

Comments
 (0)