213 lines
7.2 KiB
Go
213 lines
7.2 KiB
Go
package service
|
|
|
|
import (
|
|
"context"
|
|
"fmt"
|
|
"log"
|
|
"os"
|
|
"time"
|
|
|
|
"github.com/sony/gobreaker/v2"
|
|
"github.com/zs/InsightReply/internal/service/llm"
|
|
)
|
|
|
|
type AIService struct {
|
|
providers map[string]llm.Provider
|
|
breakers map[string]*gobreaker.CircuitBreaker[string]
|
|
|
|
defaultProvider string
|
|
defaultModel string
|
|
}
|
|
|
|
func NewAIService() *AIService {
|
|
s := &AIService{
|
|
providers: make(map[string]llm.Provider),
|
|
breakers: make(map[string]*gobreaker.CircuitBreaker[string]),
|
|
}
|
|
|
|
// 1. Initialize Providers based on ENV
|
|
if key := os.Getenv("OPENAI_API_KEY"); key != "" {
|
|
s.providers["openai"] = llm.NewOpenAIProvider(key, os.Getenv("OPENAI_BASE_URL"), "openai")
|
|
}
|
|
if key := os.Getenv("ANTHROPIC_API_KEY"); key != "" {
|
|
s.providers["anthropic"] = llm.NewAnthropicProvider(key, os.Getenv("ANTHROPIC_BASE_URL"))
|
|
}
|
|
if key := os.Getenv("DEEPSEEK_API_KEY"); key != "" {
|
|
baseURL := os.Getenv("DEEPSEEK_BASE_URL")
|
|
if baseURL == "" {
|
|
baseURL = "https://api.deepseek.com/v1" // Add v1 as expected by OpenAI SDK compatibility
|
|
}
|
|
s.providers["deepseek"] = llm.NewOpenAIProvider(key, baseURL, "deepseek")
|
|
}
|
|
if key := os.Getenv("GEMINI_API_KEY"); key != "" {
|
|
s.providers["gemini"] = llm.NewGeminiProvider(key, os.Getenv("GEMINI_BASE_URL"))
|
|
}
|
|
|
|
// 2. Initialize Circuit Breakers for each provider
|
|
for name := range s.providers {
|
|
st := gobreaker.Settings{
|
|
Name: name + "_cb",
|
|
MaxRequests: 3, // Requests allowed in half-open state
|
|
Interval: 10 * time.Minute, // Cyclic period for closed state counters
|
|
Timeout: 60 * time.Second, // Open state duration
|
|
ReadyToTrip: func(counts gobreaker.Counts) bool {
|
|
failureRatio := float64(counts.TotalFailures) / float64(counts.Requests)
|
|
return counts.Requests >= 5 && failureRatio >= 0.6 // Trip if 60% fail after 5 reqs
|
|
},
|
|
}
|
|
s.breakers[name] = gobreaker.NewCircuitBreaker[string](st)
|
|
}
|
|
|
|
s.defaultProvider = os.Getenv("LLM_PROVIDER")
|
|
if s.defaultProvider == "" {
|
|
s.defaultProvider = "openai"
|
|
}
|
|
s.defaultModel = os.Getenv("LLM_MODEL")
|
|
if s.defaultModel == "" {
|
|
s.defaultModel = "gpt-4o-mini"
|
|
}
|
|
|
|
return s
|
|
}
|
|
|
|
func (s *AIService) TestConnection(ctx context.Context) (string, error) {
|
|
if len(s.providers) == 0 {
|
|
return "", fmt.Errorf("no LLM providers configured")
|
|
}
|
|
return "Ready (Multi-LLM configured)", nil
|
|
}
|
|
|
|
// GenerateReply dynamically routes to the preferred LLM and uses a fallback chain if it fails.
|
|
func (s *AIService) GenerateReply(ctx context.Context, tweetContent, productContext, userIdentity string, preferredProvider, preferredModel string) (string, error) {
|
|
systemPrompt := "You are a professional X (Twitter) ghostwriter. You MUST respond with valid JSON."
|
|
userPrompt := fmt.Sprintf(`
|
|
You are a social media expert.
|
|
User Identity: %s
|
|
%s
|
|
Target Tweet: "%s"
|
|
|
|
Generate 3 high-quality, distinct replies for X (Twitter) using different strategic angles.
|
|
Suggested angles depending on context: Contrarian, Analytical, Supportive, Data-driven, Founder's Experience, Quote Tweet.
|
|
IMPORTANT: If "Available User Custom Strategies" are provided above, you MUST prioritize using those custom strategy angles for your replies.
|
|
IMPORTANT: If a specific "IMMITATE STYLE" instruction is provided in the Identity or Context, you MUST perfectly clone that linguistic tone.
|
|
|
|
Keep each reply natural, engaging, and under 280 characters. No hashtags unless highly relevant.
|
|
|
|
Respond ONLY with a JSON array in the exact following format, without any markdown formatting wrappers (like markdown code blocks):
|
|
[
|
|
{"strategy": "Name of Strategy 1", "content": "Reply content 1"},
|
|
{"strategy": "Name of Strategy 2", "content": "Reply content 2"},
|
|
{"strategy": "Name of Strategy 3", "content": "Reply content 3"}
|
|
]
|
|
`, userIdentity, productContext, tweetContent)
|
|
|
|
targetProvider := preferredProvider
|
|
if targetProvider == "" {
|
|
targetProvider = s.defaultProvider
|
|
}
|
|
targetModel := preferredModel
|
|
if targetModel == "" {
|
|
targetModel = s.defaultModel
|
|
}
|
|
|
|
// Fallback chain (as designed in IMPLEMENTATION_PLAN: current -> Anthropic -> OpenAI -> Gemini -> DeepSeek)
|
|
fallbackChain := []string{targetProvider, "anthropic", "openai", "gemini", "deepseek"}
|
|
|
|
for _, pName := range fallbackChain {
|
|
provider, ok := s.providers[pName]
|
|
if !ok {
|
|
log.Printf("Provider %s bypassed (not configured)", pName)
|
|
continue
|
|
}
|
|
breaker, ok := s.breakers[pName]
|
|
if !ok {
|
|
continue // Should never happen
|
|
}
|
|
|
|
// Use the target model only on the initially requested provider. For fallbacks, use a safe default model.
|
|
modelToUse := targetModel
|
|
if pName != targetProvider {
|
|
modelToUse = getDefaultModelFor(pName)
|
|
}
|
|
|
|
log.Printf("Routing request to LLM Provider: %s (Model: %s)", pName, modelToUse)
|
|
|
|
// Execute through circuit breaker
|
|
reply, err := breaker.Execute(func() (string, error) {
|
|
// Add a simple 30s timeout per call
|
|
callCtx, cancel := context.WithTimeout(ctx, 30*time.Second)
|
|
defer cancel()
|
|
return provider.GenerateReply(callCtx, modelToUse, systemPrompt, userPrompt)
|
|
})
|
|
|
|
if err == nil {
|
|
return reply, nil // Success
|
|
}
|
|
|
|
log.Printf("Provider %s failed: %v. Attempting next in fallback chain...", pName, err)
|
|
}
|
|
|
|
return "", fmt.Errorf("all providers failed to generate reply")
|
|
}
|
|
|
|
// ExtractStyle consumes a viral AI reply and uses the LLM to reverse-engineer its linguistic fingerprint
|
|
func (s *AIService) ExtractStyle(ctx context.Context, viralReplyContent string) (string, error) {
|
|
systemPrompt := "You are a master linguistic analyst and copywriter."
|
|
userPrompt := fmt.Sprintf(`
|
|
Analyze the following highly successful social media reply:
|
|
"%s"
|
|
|
|
Extract the core stylistic elements that made it successful. Focus on:
|
|
1. Tone (e.g., witty, provocative, deadpan, empathetic)
|
|
2. Sentence structure (e.g., short punchy sentences, questions, bullet points)
|
|
3. Key jargon or vocabulary patterns
|
|
|
|
Provide ONLY a concise, 2-3 sentence description of the style profile that another AI should imitate in the future.
|
|
No conversational filler, just the exact instruction string to append to future system prompts.
|
|
`, viralReplyContent)
|
|
|
|
// Route through our Multi-LLM fallback logic
|
|
// Try OpenAI first, fallback to Anthropic
|
|
providers := []string{"openai", "anthropic", "gemini", "deepseek"}
|
|
|
|
for _, pName := range providers {
|
|
pConf, exists := s.providers[pName]
|
|
cb, cbExists := s.breakers[pName]
|
|
|
|
if !exists || !cbExists {
|
|
continue
|
|
}
|
|
|
|
styleDesc, err := cb.Execute(func() (string, error) {
|
|
// Use a default model for style extraction, as it's not user-facing and can be optimized for cost/speed
|
|
modelToUse := getDefaultModelFor(pName)
|
|
if modelToUse == "" { // Fallback if getDefaultModelFor doesn't have an entry
|
|
modelToUse = "gpt-4o-mini" // A safe default
|
|
}
|
|
return pConf.GenerateReply(ctx, modelToUse, systemPrompt, userPrompt)
|
|
})
|
|
|
|
if err == nil && styleDesc != "" {
|
|
return styleDesc, nil
|
|
}
|
|
log.Printf("Provider %s failed to extract style: %v. Attempting next...", pName, err)
|
|
}
|
|
|
|
return "", fmt.Errorf("failed to extract style from any provider")
|
|
}
|
|
|
|
func getDefaultModelFor(provider string) string {
|
|
switch provider {
|
|
case "openai":
|
|
return "gpt-4o-mini"
|
|
case "anthropic":
|
|
return "claude-3-5-haiku-latest"
|
|
case "deepseek":
|
|
return "deepseek-chat"
|
|
case "gemini":
|
|
return "gemini-2.5-flash"
|
|
default:
|
|
return ""
|
|
}
|
|
}
|