# Makefile for running the Vet Clinic Chat Assistant locally with Ollama .PHONY: run ollama-start ollama-stop ollama-pull ollama-status # Start Ollama server (if not already running) ollama-start: ollama serve & @echo "Ollama server started." # Stop Ollama server ollama-stop: pkill -f "ollama serve" || true @echo "Ollama server stopped." # Pull a model (default: llama3) ollama-pull: ollama pull qwen3:latest # Show Ollama status ollama-status: ollama list # Run the Go server (assumes Ollama is running) run: ollama-pull OPENAI_API_KEY=ollama OPENAI_BASE_URL=http://localhost:11434/api/chat OPENAI_MODEL=qwen3:latest go run . # Run tests .PHONY: test test: go test ./...