feat: 部署初版测试
Some checks failed
Extension Build & Release / build (push) Failing after 1m5s
Backend Deploy (Go + Docker) / deploy (push) Failing after 1m40s
Web Console Deploy (Vue 3 + Vite) / deploy (push) Has been cancelled

This commit is contained in:
zs
2026-03-02 21:25:21 +08:00
parent db3abb3174
commit 8cf6cb944b
97 changed files with 10250 additions and 209 deletions

View File

@@ -3,57 +3,210 @@ package service
import (
"context"
"fmt"
"log"
"os"
"time"
"github.com/sashabaranov/go-openai"
"github.com/sony/gobreaker/v2"
"github.com/zs/InsightReply/internal/service/llm"
)
type AIService struct {
client *openai.Client
providers map[string]llm.Provider
breakers map[string]*gobreaker.CircuitBreaker[string]
defaultProvider string
defaultModel string
}
func NewAIService(apiKey string) *AIService {
return &AIService{
client: openai.NewClient(apiKey),
func NewAIService() *AIService {
s := &AIService{
providers: make(map[string]llm.Provider),
breakers: make(map[string]*gobreaker.CircuitBreaker[string]),
}
// 1. Initialize Providers based on ENV
if key := os.Getenv("OPENAI_API_KEY"); key != "" {
s.providers["openai"] = llm.NewOpenAIProvider(key, os.Getenv("OPENAI_BASE_URL"), "openai")
}
if key := os.Getenv("ANTHROPIC_API_KEY"); key != "" {
s.providers["anthropic"] = llm.NewAnthropicProvider(key, os.Getenv("ANTHROPIC_BASE_URL"))
}
if key := os.Getenv("DEEPSEEK_API_KEY"); key != "" {
baseURL := os.Getenv("DEEPSEEK_BASE_URL")
if baseURL == "" {
baseURL = "https://api.deepseek.com/v1" // Add v1 as expected by OpenAI SDK compatibility
}
s.providers["deepseek"] = llm.NewOpenAIProvider(key, baseURL, "deepseek")
}
if key := os.Getenv("GEMINI_API_KEY"); key != "" {
s.providers["gemini"] = llm.NewGeminiProvider(key, os.Getenv("GEMINI_BASE_URL"))
}
// 2. Initialize Circuit Breakers for each provider
for name := range s.providers {
st := gobreaker.Settings{
Name: name + "_cb",
MaxRequests: 3, // Requests allowed in half-open state
Interval: 10 * time.Minute, // Cyclic period for closed state counters
Timeout: 60 * time.Second, // Open state duration
ReadyToTrip: func(counts gobreaker.Counts) bool {
failureRatio := float64(counts.TotalFailures) / float64(counts.Requests)
return counts.Requests >= 5 && failureRatio >= 0.6 // Trip if 60% fail after 5 reqs
},
}
s.breakers[name] = gobreaker.NewCircuitBreaker[string](st)
}
s.defaultProvider = os.Getenv("LLM_PROVIDER")
if s.defaultProvider == "" {
s.defaultProvider = "openai"
}
s.defaultModel = os.Getenv("LLM_MODEL")
if s.defaultModel == "" {
s.defaultModel = "gpt-4o-mini"
}
return s
}
func (s *AIService) TestConnection(ctx context.Context) (string, error) {
// ... (same as before)
return "Ready", nil // Simplified for brevity in this edit, but I'll keep the logic if needed
if len(s.providers) == 0 {
return "", fmt.Errorf("no LLM providers configured")
}
return "Ready (Multi-LLM configured)", nil
}
func (s *AIService) GenerateReply(ctx context.Context, tweetContent string, strategy string, userIdentity string) (string, error) {
prompt := fmt.Sprintf(`
// GenerateReply dynamically routes to the preferred LLM and uses a fallback chain if it fails.
func (s *AIService) GenerateReply(ctx context.Context, tweetContent, productContext, userIdentity string, preferredProvider, preferredModel string) (string, error) {
systemPrompt := "You are a professional X (Twitter) ghostwriter. You MUST respond with valid JSON."
userPrompt := fmt.Sprintf(`
You are a social media expert.
User Identity: %s
%s
Target Tweet: "%s"
Strategy: %s
Generate a high-quality reply for X (Twitter).
Keep it natural, engaging, and under 280 characters.
Do not use quotes around the reply.
`, userIdentity, tweetContent, strategy)
Generate 3 high-quality, distinct replies for X (Twitter) using different strategic angles.
Suggested angles depending on context: Contrarian, Analytical, Supportive, Data-driven, Founder's Experience, Quote Tweet.
IMPORTANT: If "Available User Custom Strategies" are provided above, you MUST prioritize using those custom strategy angles for your replies.
IMPORTANT: If a specific "IMMITATE STYLE" instruction is provided in the Identity or Context, you MUST perfectly clone that linguistic tone.
Keep each reply natural, engaging, and under 280 characters. No hashtags unless highly relevant.
Respond ONLY with a JSON array in the exact following format, without any markdown formatting wrappers (like markdown code blocks):
[
{"strategy": "Name of Strategy 1", "content": "Reply content 1"},
{"strategy": "Name of Strategy 2", "content": "Reply content 2"},
{"strategy": "Name of Strategy 3", "content": "Reply content 3"}
]
`, userIdentity, productContext, tweetContent)
resp, err := s.client.CreateChatCompletion(
ctx,
openai.ChatCompletionRequest{
Model: openai.GPT4oMini,
Messages: []openai.ChatCompletionMessage{
{
Role: openai.ChatMessageRoleSystem,
Content: "You are a professional X (Twitter) ghostwriter.",
},
{
Role: openai.ChatMessageRoleUser,
Content: prompt,
},
},
},
)
if err != nil {
return "", fmt.Errorf("failed to generate reply: %w", err)
targetProvider := preferredProvider
if targetProvider == "" {
targetProvider = s.defaultProvider
}
targetModel := preferredModel
if targetModel == "" {
targetModel = s.defaultModel
}
return resp.Choices[0].Message.Content, nil
// Fallback chain (as designed in IMPLEMENTATION_PLAN: current -> Anthropic -> OpenAI -> Gemini -> DeepSeek)
fallbackChain := []string{targetProvider, "anthropic", "openai", "gemini", "deepseek"}
for _, pName := range fallbackChain {
provider, ok := s.providers[pName]
if !ok {
log.Printf("Provider %s bypassed (not configured)", pName)
continue
}
breaker, ok := s.breakers[pName]
if !ok {
continue // Should never happen
}
// Use the target model only on the initially requested provider. For fallbacks, use a safe default model.
modelToUse := targetModel
if pName != targetProvider {
modelToUse = getDefaultModelFor(pName)
}
log.Printf("Routing request to LLM Provider: %s (Model: %s)", pName, modelToUse)
// Execute through circuit breaker
reply, err := breaker.Execute(func() (string, error) {
// Add a simple 30s timeout per call
callCtx, cancel := context.WithTimeout(ctx, 30*time.Second)
defer cancel()
return provider.GenerateReply(callCtx, modelToUse, systemPrompt, userPrompt)
})
if err == nil {
return reply, nil // Success
}
log.Printf("Provider %s failed: %v. Attempting next in fallback chain...", pName, err)
}
return "", fmt.Errorf("all providers failed to generate reply")
}
// ExtractStyle consumes a viral AI reply and uses the LLM to reverse-engineer its linguistic fingerprint
func (s *AIService) ExtractStyle(ctx context.Context, viralReplyContent string) (string, error) {
systemPrompt := "You are a master linguistic analyst and copywriter."
userPrompt := fmt.Sprintf(`
Analyze the following highly successful social media reply:
"%s"
Extract the core stylistic elements that made it successful. Focus on:
1. Tone (e.g., witty, provocative, deadpan, empathetic)
2. Sentence structure (e.g., short punchy sentences, questions, bullet points)
3. Key jargon or vocabulary patterns
Provide ONLY a concise, 2-3 sentence description of the style profile that another AI should imitate in the future.
No conversational filler, just the exact instruction string to append to future system prompts.
`, viralReplyContent)
// Route through our Multi-LLM fallback logic
// Try OpenAI first, fallback to Anthropic
providers := []string{"openai", "anthropic", "gemini", "deepseek"}
for _, pName := range providers {
pConf, exists := s.providers[pName]
cb, cbExists := s.breakers[pName]
if !exists || !cbExists {
continue
}
styleDesc, err := cb.Execute(func() (string, error) {
// Use a default model for style extraction, as it's not user-facing and can be optimized for cost/speed
modelToUse := getDefaultModelFor(pName)
if modelToUse == "" { // Fallback if getDefaultModelFor doesn't have an entry
modelToUse = "gpt-4o-mini" // A safe default
}
return pConf.GenerateReply(ctx, modelToUse, systemPrompt, userPrompt)
})
if err == nil && styleDesc != "" {
return styleDesc, nil
}
log.Printf("Provider %s failed to extract style: %v. Attempting next...", pName, err)
}
return "", fmt.Errorf("failed to extract style from any provider")
}
func getDefaultModelFor(provider string) string {
switch provider {
case "openai":
return "gpt-4o-mini"
case "anthropic":
return "claude-3-5-haiku-latest"
case "deepseek":
return "deepseek-chat"
case "gemini":
return "gemini-2.5-flash"
default:
return ""
}
}