44 lines
1.1 KiB
Bash
44 lines
1.1 KiB
Bash
#!/usr/bin/env bats
|
|
|
|
setup() {
|
|
# Install finish.sh and run testing against the main branch
|
|
wget -qO- https://git.appmodel.nl/Tour/finish/raw/branch/main/docs/install.sh | bash -s -- main
|
|
|
|
# Source bashrc to make sure finish is available in the current session
|
|
source ~/.bashrc
|
|
|
|
# Configure for local LM-Studio
|
|
finish config set provider lmstudio
|
|
finish config set endpoint http://localhost:1234/v1/chat/completions
|
|
finish config set model darkidol-llama-3.1-8b-instruct-1.3-uncensored_gguf:2
|
|
}
|
|
|
|
teardown() {
|
|
# Remove finish.sh installation
|
|
finish remove -y
|
|
}
|
|
|
|
@test "which finish returns something" {
|
|
run which finish
|
|
[ "$status" -eq 0 ]
|
|
[ -n "$output" ]
|
|
}
|
|
|
|
@test "finish returns a string containing finish.sh (case insensitive)" {
|
|
run finish
|
|
[ "$status" -eq 0 ]
|
|
[[ "$output" =~ [Aa]utocomplete\.sh ]]
|
|
}
|
|
|
|
@test "finish config should have lmstudio provider" {
|
|
run finish config
|
|
[ "$status" -eq 0 ]
|
|
[[ "$output" =~ lmstudio ]]
|
|
}
|
|
|
|
@test "finish command 'ls # show largest files' should return something" {
|
|
run finish command "ls # show largest files"
|
|
[ "$status" -eq 0 ]
|
|
[ -n "$output" ]
|
|
}
|