vetrag/openrouter_integration_test.go

90 lines
2.7 KiB
Go

package main
import (
"context"
"encoding/json"
"net/http"
"net/http/httptest"
"strings"
"testing"
)
// Test OpenAI/OpenRouter style success response parsing
func TestLLMClient_OpenRouterStyle_ExtractKeywords(t *testing.T) {
// Save and restore original config
orig := appConfig
defer func() { appConfig = orig }()
appConfig.LLM.ExtractKeywordsPrompt = "Dummy {{.Message}}" // simple template
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path != "/v1/chat/completions" {
w.WriteHeader(http.StatusNotFound)
return
}
// Optionally verify header presence
if got := r.Header.Get("Authorization"); got == "" {
w.WriteHeader(http.StatusUnauthorized)
return
}
w.Header().Set("Content-Type", "application/json")
resp := map[string]interface{}{
"choices": []map[string]interface{}{
{
"message": map[string]interface{}{
"role": "assistant",
"content": `{"translate":"dog has diarrhea","keyword":["diarrhea","digestive"],"animal":"dog"}`,
},
},
},
}
json.NewEncoder(w).Encode(resp)
}))
defer ts.Close()
llm := NewLLMClient("test-key", ts.URL+"/v1/chat/completions", "meta-llama/test")
res, err := llm.ExtractKeywords(context.Background(), "kutya hasmenés")
if err != nil {
te(t, "unexpected error: %v", err)
}
if res["translate"] != "dog has diarrhea" {
te(t, "translate mismatch: %v", res["translate"])
}
kw, ok := res["keyword"].([]interface{})
if !ok || len(kw) != 2 || kw[0] != "diarrhea" {
te(t, "keyword list mismatch: %#v", res["keyword"])
}
if res["animal"] != "dog" {
te(t, "animal mismatch: %v", res["animal"])
}
}
// Test OpenAI/OpenRouter style error response handling
func TestLLMClient_OpenRouterStyle_Error(t *testing.T) {
orig := appConfig
defer func() { appConfig = orig }()
appConfig.LLM.ExtractKeywordsPrompt = "Dummy {{.Message}}"
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusTooManyRequests)
json.NewEncoder(w).Encode(map[string]interface{}{
"error": map[string]interface{}{
"message": "Rate limit",
"type": "rate_limit",
},
})
}))
defer ts.Close()
llm := NewLLMClient("test-key", ts.URL+"/v1/chat/completions", "meta-llama/test")
_, err := llm.ExtractKeywords(context.Background(), "test")
if err == nil || !contains(err.Error(), "Rate limit") {
te(t, "expected rate limit error, got: %v", err)
}
}
// --- helpers ---
func contains(haystack, needle string) bool { return strings.Contains(haystack, needle) }
func te(t *testing.T, format string, args ...interface{}) { t.Helper(); t.Fatalf(format, args...) }