typo fix
Some checks failed
Docker Build and Push / build-and-push (push) Has been cancelled
Tests / test (bash) (push) Has been cancelled
Tests / test (zsh) (push) Has been cancelled
Tests / lint (push) Has been cancelled
Tests / docker (push) Has been cancelled

This commit is contained in:
2025-12-02 09:54:46 +01:00
parent 7d9ff5c2e1
commit 0e1ea99c34
3 changed files with 5 additions and 5 deletions

View File

@@ -21,7 +21,7 @@ Change settings:
```bash
finish config set temperature 0.5
finish config set endpoint http://localhost:1234/v1/chat/completions
finish config set endpoint http://plato.lan:1234/v1/chat/completions
finish config set model your-model-name
```

View File

@@ -26,7 +26,7 @@ export ACSH_VERSION=0.5.0
unset _finish_modellist
declare -A _finish_modellist
# LM-Studio models
_finish_modellist['lmstudio: darkidol-llama-3.1-8b-instruct-1.3-uncensored_gguf:2']='{ "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "http://localhost:1234/v1/chat/completions", "model": "darkidol-llama-3.1-8b-instruct-1.3-uncensored_gguf:2", "provider": "lmstudio" }'
_finish_modellist['lmstudio: darkidol-llama-3.1-8b-instruct-1.3-uncensored_gguf:2']='{ "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "http://plato.lan:1234/v1/chat/completions", "model": "darkidol-llama-3.1-8b-instruct-1.3-uncensored_gguf:2", "provider": "lmstudio" }'
# Ollama model
_finish_modellist['ollama: codellama']='{ "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "http://localhost:11434/api/chat", "model": "codellama", "provider": "ollama" }'
@@ -246,7 +246,7 @@ log_request() {
openai_completion() {
local content status_code response_body default_user_input user_input api_key payload endpoint timeout attempt max_attempts
endpoint=${ACSH_ENDPOINT:-"http://localhost:1234/v1/chat/completions"}
endpoint=${ACSH_ENDPOINT:-"http://plato.lan:1234/v1/chat/completions"}
timeout=${ACSH_TIMEOUT:-30}
default_user_input="Write two to six most likely commands given the provided information"
user_input=${*:-$default_user_input}
@@ -560,7 +560,7 @@ build_config() {
provider: lmstudio
model: darkidol-llama-3.1-8b-instruct-1.3-uncensored_gguf:2
temperature: 0.0
endpoint: http://localhost:1234/v1/chat/completions
endpoint: http://plato.lan:1234/v1/chat/completions
api_prompt_cost: 0.000000
api_completion_cost: 0.000000

View File

@@ -9,7 +9,7 @@ setup() {
# Configure for local LM-Studio
finish config set provider lmstudio
finish config set endpoint http://localhost:1234/v1/chat/completions
finish config set endpoint http://plato.lan:1234/v1/chat/completions
finish config set model darkidol-llama-3.1-8b-instruct-1.3-uncensored_gguf:2
}