325 lines
9.9 KiB
Go
325 lines
9.9 KiB
Go
package main
|
|
|
|
import (
|
|
"bytes"
|
|
"context"
|
|
"encoding/json"
|
|
"fmt"
|
|
"io"
|
|
"net/http"
|
|
"strings"
|
|
"time"
|
|
|
|
"github.com/sirupsen/logrus"
|
|
)
|
|
|
|
// --- OpenAIClient implementation ---
|
|
|
|
type OpenAIClient struct {
|
|
APIKey string
|
|
BaseURL string
|
|
Model string
|
|
Repo ChatRepositoryAPI
|
|
}
|
|
|
|
func NewOpenAIClient(apiKey, baseURL, model string, repo ChatRepositoryAPI) *OpenAIClient {
|
|
return &OpenAIClient{APIKey: apiKey, BaseURL: baseURL, Model: model, Repo: repo}
|
|
}
|
|
|
|
func (llm *OpenAIClient) ExtractKeywords(ctx context.Context, message string) (map[string]interface{}, error) {
|
|
_, parsed, err := llm.ExtractKeywordsRaw(ctx, message)
|
|
return parsed, err
|
|
}
|
|
|
|
func (llm *OpenAIClient) ExtractKeywordsRaw(ctx context.Context, message string) (string, map[string]interface{}, error) {
|
|
prompt, err := renderPrompt(appConfig.LLM.ExtractKeywordsPrompt, map[string]string{"Message": message})
|
|
if err != nil {
|
|
logrus.WithError(err).Error("[CONFIG] Failed to render ExtractKeywords prompt")
|
|
return "", nil, err
|
|
}
|
|
logrus.WithField("prompt", prompt).Info("[LLM] ExtractKeywords prompt")
|
|
|
|
// Use the utility function instead of inline format definition
|
|
format := GetExtractKeywordsFormat()
|
|
|
|
resp, err := llm.openAICompletion(ctx, prompt, format)
|
|
logrus.WithFields(logrus.Fields{"response": resp, "err": err}).Info("[LLM] ExtractKeywords response")
|
|
if err != nil {
|
|
return resp, nil, err
|
|
}
|
|
var result map[string]interface{}
|
|
if err := json.Unmarshal([]byte(resp), &result); err != nil {
|
|
return resp, nil, err
|
|
}
|
|
return resp, result, nil
|
|
}
|
|
|
|
func (llm *OpenAIClient) DisambiguateBestMatch(ctx context.Context, message string, candidates []Visit) (string, error) {
|
|
_, vr, err := llm.DisambiguateBestMatchRaw(ctx, message, candidates)
|
|
return vr, err
|
|
}
|
|
|
|
func (llm *OpenAIClient) DisambiguateBestMatchRaw(ctx context.Context, message string, candidates []Visit) (string, string, error) {
|
|
// Use the utility function instead of inline format definition
|
|
format := GetDisambiguateFormat()
|
|
|
|
entries, _ := json.Marshal(candidates)
|
|
prompt, err := renderPrompt(appConfig.LLM.DisambiguatePrompt, map[string]string{"Entries": string(entries), "Message": message})
|
|
if err != nil {
|
|
logrus.WithError(err).Error("[CONFIG] Failed to render Disambiguate prompt")
|
|
return "", "", err
|
|
}
|
|
logrus.WithField("prompt", prompt).Info("[LLM] DisambiguateBestMatch prompt")
|
|
resp, err := llm.openAICompletion(ctx, prompt, format)
|
|
logrus.WithFields(logrus.Fields{"response": resp, "err": err}).Info("[LLM] DisambiguateBestMatch response")
|
|
if err != nil {
|
|
return resp, "", err
|
|
}
|
|
var parsed map[string]string
|
|
if err := json.Unmarshal([]byte(resp), &parsed); err != nil {
|
|
return resp, "", fmt.Errorf("failed to unmarshal disambiguation response: %w", err)
|
|
}
|
|
visitReason := strings.TrimSpace(parsed["visitReason"])
|
|
if visitReason == "" {
|
|
return resp, "", fmt.Errorf("visitReason not found in response")
|
|
}
|
|
return resp, visitReason, nil
|
|
}
|
|
|
|
func (llm *OpenAIClient) openAICompletion(ctx context.Context, prompt string, format map[string]interface{}) (string, error) {
|
|
apiURL := llm.BaseURL
|
|
if apiURL == "" {
|
|
apiURL = "https://api.openai.com/v1/chat/completions"
|
|
}
|
|
|
|
isOpenAIStyle := strings.Contains(apiURL, "openrouter.ai") || strings.Contains(apiURL, "/v1/")
|
|
|
|
// Helper to stringify the expected JSON schema for instructions
|
|
schemaDesc := func() string {
|
|
b, _ := json.MarshalIndent(format, "", " ")
|
|
return string(b)
|
|
}
|
|
|
|
truncate := func(s string, n int) string {
|
|
if len(s) <= n {
|
|
return s
|
|
}
|
|
return s[:n] + "...<truncated>"
|
|
}
|
|
|
|
buildBody := func() map[string]interface{} {
|
|
if isOpenAIStyle {
|
|
return map[string]interface{}{
|
|
"model": llm.Model,
|
|
"messages": []map[string]string{
|
|
{"role": "system", "content": "You are a strict JSON generator. ONLY output valid JSON matching this schema: " + schemaDesc() + " Do not add explanations."},
|
|
{"role": "user", "content": prompt},
|
|
},
|
|
"response_format": map[string]interface{}{"type": "json_object"},
|
|
}
|
|
}
|
|
// This should never be reached in OpenAI client but keeping for safety
|
|
return map[string]interface{}{
|
|
"model": llm.Model,
|
|
"messages": []map[string]string{{"role": "user", "content": prompt}},
|
|
"stream": false,
|
|
"format": format,
|
|
}
|
|
}
|
|
|
|
body := buildBody()
|
|
|
|
// Enhanced logging similar to the unified client
|
|
jsonBody, _ := json.Marshal(body)
|
|
bodySize := len(jsonBody)
|
|
logrus.WithFields(logrus.Fields{
|
|
"event": "llm_request",
|
|
"api_url": apiURL,
|
|
"model": llm.Model,
|
|
"is_openai_style": isOpenAIStyle,
|
|
"prompt_len": len(prompt),
|
|
"body_size": bodySize,
|
|
}).Info("[LLM] sending request")
|
|
|
|
req, _ := http.NewRequestWithContext(ctx, http.MethodPost, apiURL, bytes.NewBuffer(jsonBody))
|
|
if llm.APIKey != "" {
|
|
req.Header.Set("Authorization", "Bearer "+llm.APIKey)
|
|
}
|
|
req.Header.Set("Content-Type", "application/json")
|
|
req.Header.Set("Accept", "application/json")
|
|
if strings.Contains(apiURL, "openrouter.ai") {
|
|
req.Header.Set("Referer", "https://github.com/")
|
|
req.Header.Set("X-Title", "vetrag-app")
|
|
}
|
|
|
|
start := time.Now()
|
|
client := &http.Client{}
|
|
resp, err := client.Do(req)
|
|
dur := time.Since(start)
|
|
|
|
if err != nil {
|
|
logrus.WithFields(logrus.Fields{
|
|
"event": "llm_response",
|
|
"status": 0,
|
|
"latency_ms": dur.Milliseconds(),
|
|
"error": err,
|
|
}).Error("[LLM] request failed")
|
|
return "", err
|
|
}
|
|
|
|
defer resp.Body.Close()
|
|
raw, err := io.ReadAll(resp.Body)
|
|
if err != nil {
|
|
return "", err
|
|
}
|
|
|
|
logrus.WithFields(logrus.Fields{
|
|
"event": "llm_raw_response",
|
|
"status": resp.StatusCode,
|
|
"latency_ms": dur.Milliseconds(),
|
|
"raw_trunc": truncate(string(raw), 600),
|
|
"raw_len": len(raw),
|
|
}).Debug("[LLM] raw response body")
|
|
|
|
parseVariant := "unknown"
|
|
|
|
// Attempt OpenAI/OpenRouter style parse first
|
|
var openAI struct {
|
|
Choices []struct {
|
|
Message struct {
|
|
Content string `json:"content"`
|
|
} `json:"message"`
|
|
} `json:"choices"`
|
|
Error *struct {
|
|
Message string `json:"message"`
|
|
Type string `json:"type"`
|
|
} `json:"error"`
|
|
}
|
|
if err := json.Unmarshal(raw, &openAI); err == nil {
|
|
if openAI.Error != nil || resp.StatusCode >= 400 {
|
|
parseVariant = "openai"
|
|
var msg string
|
|
if openAI.Error != nil {
|
|
msg = openAI.Error.Message
|
|
} else {
|
|
msg = string(raw)
|
|
}
|
|
logrus.WithFields(logrus.Fields{
|
|
"event": "llm_response",
|
|
"status": resp.StatusCode,
|
|
"latency_ms": dur.Milliseconds(),
|
|
"parse_variant": parseVariant,
|
|
"error": msg,
|
|
}).Error("[LLM] provider error")
|
|
return "", fmt.Errorf("provider error: %s", msg)
|
|
}
|
|
if len(openAI.Choices) > 0 && openAI.Choices[0].Message.Content != "" {
|
|
parseVariant = "openai"
|
|
content := openAI.Choices[0].Message.Content
|
|
logrus.WithFields(logrus.Fields{
|
|
"event": "llm_response",
|
|
"status": resp.StatusCode,
|
|
"latency_ms": dur.Milliseconds(),
|
|
"parse_variant": parseVariant,
|
|
"content_len": len(content),
|
|
"content_snip": truncate(content, 300),
|
|
}).Info("[LLM] parsed response")
|
|
return content, nil
|
|
}
|
|
}
|
|
|
|
// As a fallback, attempt Ollama format parse
|
|
var ollama struct {
|
|
Message struct {
|
|
Content string `json:"content"`
|
|
} `json:"message"`
|
|
Error string `json:"error"`
|
|
}
|
|
if err := json.Unmarshal(raw, &ollama); err == nil && ollama.Message.Content != "" {
|
|
parseVariant = "ollama"
|
|
content := ollama.Message.Content
|
|
logrus.WithFields(logrus.Fields{
|
|
"event": "llm_response",
|
|
"status": resp.StatusCode,
|
|
"latency_ms": dur.Milliseconds(),
|
|
"parse_variant": parseVariant,
|
|
"content_len": len(content),
|
|
"content_snip": truncate(content, 300),
|
|
}).Info("[LLM] parsed response")
|
|
return content, nil
|
|
}
|
|
|
|
logrus.WithFields(logrus.Fields{
|
|
"event": "llm_response",
|
|
"status": resp.StatusCode,
|
|
"latency_ms": dur.Milliseconds(),
|
|
"parse_variant": parseVariant,
|
|
"raw_snip": truncate(string(raw), 300),
|
|
}).Error("[LLM] unrecognized response format")
|
|
|
|
return "", fmt.Errorf("unrecognized LLM response format: %.200s", string(raw))
|
|
}
|
|
|
|
func (llm *OpenAIClient) GetEmbeddings(ctx context.Context, input string) ([]float64, error) {
|
|
apiURL := llm.BaseURL
|
|
if apiURL == "" {
|
|
apiURL = "https://api.openai.com/v1/embeddings"
|
|
}
|
|
body := map[string]interface{}{
|
|
"model": llm.Model,
|
|
"input": input,
|
|
}
|
|
jsonBody, _ := json.Marshal(body)
|
|
req, _ := http.NewRequestWithContext(ctx, http.MethodPost, apiURL, bytes.NewBuffer(jsonBody))
|
|
if llm.APIKey != "" {
|
|
req.Header.Set("Authorization", "Bearer "+llm.APIKey)
|
|
}
|
|
req.Header.Set("Content-Type", "application/json")
|
|
req.Header.Set("Accept", "application/json")
|
|
if strings.Contains(apiURL, "openrouter.ai") {
|
|
req.Header.Set("Referer", "https://github.com/")
|
|
req.Header.Set("X-Title", "vetrag-app")
|
|
}
|
|
client := &http.Client{}
|
|
resp, err := client.Do(req)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer resp.Body.Close()
|
|
raw, err := io.ReadAll(resp.Body)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
var openAI struct {
|
|
Data []struct {
|
|
Embedding []float64 `json:"embedding"`
|
|
} `json:"data"`
|
|
Error *struct {
|
|
Message string `json:"message"`
|
|
} `json:"error"`
|
|
}
|
|
if err := json.Unmarshal(raw, &openAI); err == nil && len(openAI.Data) > 0 {
|
|
return openAI.Data[0].Embedding, nil
|
|
}
|
|
if openAI.Error != nil {
|
|
return nil, fmt.Errorf("embedding error: %s", openAI.Error.Message)
|
|
}
|
|
return nil, fmt.Errorf("unrecognized embedding response: %.200s", string(raw))
|
|
}
|
|
|
|
func (llm *OpenAIClient) TranslateToEnglish(ctx context.Context, message string) (string, error) {
|
|
prompt, err := renderPrompt(appConfig.LLM.TranslatePrompt, map[string]string{"Message": message})
|
|
if err != nil {
|
|
logrus.WithError(err).Error("[CONFIG] Failed to render Translate prompt")
|
|
return "", err
|
|
}
|
|
logrus.WithField("prompt", prompt).Info("[LLM] TranslateToEnglish prompt")
|
|
|
|
resp, err := llm.openAICompletion(ctx, prompt, nil)
|
|
logrus.WithFields(logrus.Fields{"response": resp, "err": err}).Info("[LLM] TranslateToEnglish response")
|
|
if err != nil {
|
|
return resp, err
|
|
}
|
|
return strings.TrimSpace(resp), nil
|
|
}
|