typo fix
This commit is contained in:
2
USAGE.md
2
USAGE.md
@@ -21,7 +21,7 @@ Change settings:
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
finish config set temperature 0.5
|
finish config set temperature 0.5
|
||||||
finish config set endpoint http://localhost:1234/v1/chat/completions
|
finish config set endpoint http://plato.lan:1234/v1/chat/completions
|
||||||
finish config set model your-model-name
|
finish config set model your-model-name
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ export ACSH_VERSION=0.5.0
|
|||||||
unset _finish_modellist
|
unset _finish_modellist
|
||||||
declare -A _finish_modellist
|
declare -A _finish_modellist
|
||||||
# LM-Studio models
|
# LM-Studio models
|
||||||
_finish_modellist['lmstudio: darkidol-llama-3.1-8b-instruct-1.3-uncensored_gguf:2']='{ "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "http://localhost:1234/v1/chat/completions", "model": "darkidol-llama-3.1-8b-instruct-1.3-uncensored_gguf:2", "provider": "lmstudio" }'
|
_finish_modellist['lmstudio: darkidol-llama-3.1-8b-instruct-1.3-uncensored_gguf:2']='{ "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "http://plato.lan:1234/v1/chat/completions", "model": "darkidol-llama-3.1-8b-instruct-1.3-uncensored_gguf:2", "provider": "lmstudio" }'
|
||||||
# Ollama model
|
# Ollama model
|
||||||
_finish_modellist['ollama: codellama']='{ "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "http://localhost:11434/api/chat", "model": "codellama", "provider": "ollama" }'
|
_finish_modellist['ollama: codellama']='{ "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "http://localhost:11434/api/chat", "model": "codellama", "provider": "ollama" }'
|
||||||
|
|
||||||
@@ -246,7 +246,7 @@ log_request() {
|
|||||||
|
|
||||||
openai_completion() {
|
openai_completion() {
|
||||||
local content status_code response_body default_user_input user_input api_key payload endpoint timeout attempt max_attempts
|
local content status_code response_body default_user_input user_input api_key payload endpoint timeout attempt max_attempts
|
||||||
endpoint=${ACSH_ENDPOINT:-"http://localhost:1234/v1/chat/completions"}
|
endpoint=${ACSH_ENDPOINT:-"http://plato.lan:1234/v1/chat/completions"}
|
||||||
timeout=${ACSH_TIMEOUT:-30}
|
timeout=${ACSH_TIMEOUT:-30}
|
||||||
default_user_input="Write two to six most likely commands given the provided information"
|
default_user_input="Write two to six most likely commands given the provided information"
|
||||||
user_input=${*:-$default_user_input}
|
user_input=${*:-$default_user_input}
|
||||||
@@ -560,7 +560,7 @@ build_config() {
|
|||||||
provider: lmstudio
|
provider: lmstudio
|
||||||
model: darkidol-llama-3.1-8b-instruct-1.3-uncensored_gguf:2
|
model: darkidol-llama-3.1-8b-instruct-1.3-uncensored_gguf:2
|
||||||
temperature: 0.0
|
temperature: 0.0
|
||||||
endpoint: http://localhost:1234/v1/chat/completions
|
endpoint: http://plato.lan:1234/v1/chat/completions
|
||||||
api_prompt_cost: 0.000000
|
api_prompt_cost: 0.000000
|
||||||
api_completion_cost: 0.000000
|
api_completion_cost: 0.000000
|
||||||
|
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ setup() {
|
|||||||
|
|
||||||
# Configure for local LM-Studio
|
# Configure for local LM-Studio
|
||||||
finish config set provider lmstudio
|
finish config set provider lmstudio
|
||||||
finish config set endpoint http://localhost:1234/v1/chat/completions
|
finish config set endpoint http://plato.lan:1234/v1/chat/completions
|
||||||
finish config set model darkidol-llama-3.1-8b-instruct-1.3-uncensored_gguf:2
|
finish config set model darkidol-llama-3.1-8b-instruct-1.3-uncensored_gguf:2
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user