#!/usr/bin/env bash set -euo pipefail # ----- tiny test framework ----- pass() { printf "✔ %s\n" "$1"; } fail() { printf "✘ %s\n" "$1"; exit 1; } assert_ok() { if [[ $1 -ne 0 ]]; then fail "$2"; fi } assert_nonempty() { if [[ -z "$1" ]]; then fail "$2"; fi } assert_contains() { if [[ ! "$1" =~ $2 ]]; then fail "$3"; fi } # Ensure output does not contain any of the common error phrases. assert_no_common_errors() { local output="$1" local -a patterns=( "ACSH_ACTIVE_API_KEY not set" "Bad Request" "Unauthorized" "Too Many Requests" "Internal Server Error" "Unknown Error" "Failed to parse completions" "SyntaxError" "ERROR:" ) for pat in "${patterns[@]}"; do if grep -qE "$pat" <<<"$output"; then printf "Test failed: output contains error pattern '%s'\nFull output follows:\n%s\n" "$pat" "$output" exit 1 fi done } # -------------------------------- echo "=== OLLAMA TEST: SETUP ===" # Always run from the repo root for predictable paths SCRIPT_DIR="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)" cd "$SCRIPT_DIR/.." # Install finish (adds to PATH and shell rc files) bash ./docs/install.sh main # Load the updated shell rc so `finish` is available on PATH in this session if [[ -f ~/.bashrc ]]; then # shellcheck disable=SC1090 source ~/.bashrc || true fi # ---- Check prerequisites for local Ollama ---- OLLAMA_URL=${OLLAMA_URL:-"http://localhost:11434"} echo "Checking Ollama at $OLLAMA_URL ..." if ! curl -fsS --max-time 2 "$OLLAMA_URL/api/version" >/dev/null 2>&1; then echo "SKIP: Ollama is not reachable at $OLLAMA_URL. Start it with: 'ollama serve' and ensure a model is pulled (e.g., 'ollama pull llama3')." exit 0 fi # Try to discover an installed model; prefer the first listed MODEL=$(curl -fsS "$OLLAMA_URL/api/tags" | jq -r '.models[0].name // empty' || echo "") if [[ -z "$MODEL" ]]; then echo "SKIP: No local Ollama models found. Pull one first, e.g.: 'ollama pull llama3:latest'" exit 0 fi echo "Using Ollama model: $MODEL" # -------------------------------- TESTS -------------------------------- # 1) configure finish to use local Ollama finish config set provider ollama finish config set endpoint "$OLLAMA_URL/api/chat" finish config set model "$MODEL" # 2) which finish should return something out=$(which finish 2>&1) ; code=$? assert_ok $code "which finish should exit 0" assert_nonempty "$out" "which finish returned empty output" pass "which finish returns path" # 3) finish output should contain 'finish.sh' out=$(finish 2>&1) ; code=$? assert_ok $code "finish should exit 0" assert_contains "$out" "finish\.sh" "finish output does not contain finish.sh" pass "finish outputs reference to finish.sh" # 4) config should mention ollama out=$(finish config 2>&1) ; code=$? assert_ok $code "finish config should exit 0" assert_contains "$out" "ollama" "finish config missing ollama provider" pass "finish config contains ollama" # 5) finish command should run and return output out=$(finish command "ls # show largest files" 2>&1) ; code=$? assert_ok $code "finish command did not exit 0" assert_nonempty "$out" "finish command returned empty output" assert_no_common_errors "$out" pass "finish command executed and returned output (ollama)" # ------------------------------- CLEANUP -------------------------------- echo "=== OLLAMA TEST: CLEANUP ===" finish remove -y || true echo "Ollama test passed."