98 lines
3.0 KiB
Go
98 lines
3.0 KiB
Go
package main
|
|
|
|
import (
|
|
"context"
|
|
"encoding/json"
|
|
"net/http"
|
|
"net/http/httptest"
|
|
"strings"
|
|
"testing"
|
|
)
|
|
|
|
// Test OpenAI/OpenRouter style success response parsing
|
|
func TestLLMClient_OpenRouterStyle_ExtractKeywords(t *testing.T) {
|
|
// Save and restore original config
|
|
orig := appConfig
|
|
defer func() { appConfig = orig }()
|
|
|
|
appConfig.LLM.ExtractKeywordsPrompt = "Dummy {{.Message}}" // simple template
|
|
|
|
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
|
// Format the response exactly as the OpenAI API would
|
|
w.Header().Set("Content-Type", "application/json")
|
|
w.WriteHeader(http.StatusOK)
|
|
resp := map[string]interface{}{
|
|
"choices": []map[string]interface{}{
|
|
{
|
|
"message": map[string]interface{}{
|
|
"role": "assistant",
|
|
"content": `{"translate":"dog has diarrhea","keyword":["diarrhea","digestive"],"animal":"dog"}`,
|
|
},
|
|
"index": 0,
|
|
},
|
|
},
|
|
"id": "test-id",
|
|
"object": "chat.completion",
|
|
"created": 1717585613,
|
|
"model": "meta-llama/test",
|
|
"usage": map[string]interface{}{
|
|
"prompt_tokens": 50,
|
|
"completion_tokens": 20,
|
|
"total_tokens": 70,
|
|
},
|
|
}
|
|
json.NewEncoder(w).Encode(resp)
|
|
}))
|
|
defer ts.Close()
|
|
|
|
// Pass the server URL directly (not adding /v1 as that causes issues)
|
|
llm := NewOpenAIClient("test-key", ts.URL, "meta-llama/test", nil)
|
|
res, err := llm.ExtractKeywords(context.Background(), "kutya hasmenés")
|
|
if err != nil {
|
|
te(t, "unexpected error: %v", err)
|
|
}
|
|
if res["translate"] != "dog has diarrhea" {
|
|
te(t, "translate mismatch: %v", res["translate"])
|
|
}
|
|
kw, ok := res["keyword"].([]interface{})
|
|
if !ok || len(kw) != 2 || kw[0] != "diarrhea" {
|
|
te(t, "keyword list mismatch: %#v", res["keyword"])
|
|
}
|
|
if res["animal"] != "dog" {
|
|
te(t, "animal mismatch: %v", res["animal"])
|
|
}
|
|
}
|
|
|
|
// Test OpenAI/OpenRouter style error response handling
|
|
func TestLLMClient_OpenRouterStyle_Error(t *testing.T) {
|
|
orig := appConfig
|
|
defer func() { appConfig = orig }()
|
|
appConfig.LLM.ExtractKeywordsPrompt = "Dummy {{.Message}}"
|
|
|
|
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
|
// Simulate a rate limit error response from OpenAI API
|
|
w.Header().Set("Content-Type", "application/json")
|
|
w.WriteHeader(http.StatusTooManyRequests)
|
|
json.NewEncoder(w).Encode(map[string]interface{}{
|
|
"error": map[string]interface{}{
|
|
"message": "Rate limit exceeded, please try again in 20ms",
|
|
"type": "rate_limit_exceeded",
|
|
"param": nil,
|
|
"code": "rate_limit_exceeded",
|
|
},
|
|
})
|
|
}))
|
|
defer ts.Close()
|
|
|
|
// Use the same URL structure as the success test
|
|
llm := NewOpenAIClient("test-key", ts.URL, "meta-llama/test", nil)
|
|
_, err := llm.ExtractKeywords(context.Background(), "test")
|
|
if err == nil || !contains(err.Error(), "Rate limit") {
|
|
te(t, "expected rate limit error, got: %v", err)
|
|
}
|
|
}
|
|
|
|
// --- helpers ---
|
|
func contains(haystack, needle string) bool { return strings.Contains(haystack, needle) }
|
|
func te(t *testing.T, format string, args ...interface{}) { t.Helper(); t.Fatalf(format, args...) }
|