package llm import ( "bytes" "context" "encoding/json" "fmt" "io" "net/http" ) type GeminiProvider struct { apiKey string baseURL string client *http.Client } func NewGeminiProvider(apiKey, baseURL string) *GeminiProvider { if baseURL == "" { baseURL = "https://generativelanguage.googleapis.com/v1beta/models" } return &GeminiProvider{ apiKey: apiKey, baseURL: baseURL, client: &http.Client{}, } } func (p *GeminiProvider) Name() string { return "gemini" } func (p *GeminiProvider) GenerateReply(ctx context.Context, model string, systemPrompt, userPrompt string) (string, error) { url := fmt.Sprintf("%s/%s:generateContent?key=%s", p.baseURL, model, p.apiKey) reqBody := map[string]interface{}{ "systemInstruction": map[string]interface{}{ "parts": []map[string]interface{}{ {"text": systemPrompt}, }, }, "contents": []map[string]interface{}{ { "role": "user", "parts": []map[string]interface{}{ {"text": userPrompt}, }, }, }, } bs, _ := json.Marshal(reqBody) req, err := http.NewRequestWithContext(ctx, "POST", url, bytes.NewReader(bs)) if err != nil { return "", err } req.Header.Set("Content-Type", "application/json") resp, err := p.client.Do(req) if err != nil { return "", err } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { body, _ := io.ReadAll(resp.Body) return "", fmt.Errorf("gemini error %d: %s", resp.StatusCode, string(body)) } var result struct { Candidates []struct { Content struct { Parts []struct { Text string `json:"text"` } `json:"parts"` } `json:"content"` } `json:"candidates"` } if err := json.NewDecoder(resp.Body).Decode(&result); err != nil { return "", err } if len(result.Candidates) == 0 || len(result.Candidates[0].Content.Parts) == 0 { return "", fmt.Errorf("gemini returned empty content") } return result.Candidates[0].Content.Parts[0].Text, nil }