feat: 部署初版测试
This commit is contained in:
@@ -3,57 +3,210 @@ package service
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/sashabaranov/go-openai"
|
||||
"github.com/sony/gobreaker/v2"
|
||||
"github.com/zs/InsightReply/internal/service/llm"
|
||||
)
|
||||
|
||||
type AIService struct {
|
||||
client *openai.Client
|
||||
providers map[string]llm.Provider
|
||||
breakers map[string]*gobreaker.CircuitBreaker[string]
|
||||
|
||||
defaultProvider string
|
||||
defaultModel string
|
||||
}
|
||||
|
||||
func NewAIService(apiKey string) *AIService {
|
||||
return &AIService{
|
||||
client: openai.NewClient(apiKey),
|
||||
func NewAIService() *AIService {
|
||||
s := &AIService{
|
||||
providers: make(map[string]llm.Provider),
|
||||
breakers: make(map[string]*gobreaker.CircuitBreaker[string]),
|
||||
}
|
||||
|
||||
// 1. Initialize Providers based on ENV
|
||||
if key := os.Getenv("OPENAI_API_KEY"); key != "" {
|
||||
s.providers["openai"] = llm.NewOpenAIProvider(key, os.Getenv("OPENAI_BASE_URL"), "openai")
|
||||
}
|
||||
if key := os.Getenv("ANTHROPIC_API_KEY"); key != "" {
|
||||
s.providers["anthropic"] = llm.NewAnthropicProvider(key, os.Getenv("ANTHROPIC_BASE_URL"))
|
||||
}
|
||||
if key := os.Getenv("DEEPSEEK_API_KEY"); key != "" {
|
||||
baseURL := os.Getenv("DEEPSEEK_BASE_URL")
|
||||
if baseURL == "" {
|
||||
baseURL = "https://api.deepseek.com/v1" // Add v1 as expected by OpenAI SDK compatibility
|
||||
}
|
||||
s.providers["deepseek"] = llm.NewOpenAIProvider(key, baseURL, "deepseek")
|
||||
}
|
||||
if key := os.Getenv("GEMINI_API_KEY"); key != "" {
|
||||
s.providers["gemini"] = llm.NewGeminiProvider(key, os.Getenv("GEMINI_BASE_URL"))
|
||||
}
|
||||
|
||||
// 2. Initialize Circuit Breakers for each provider
|
||||
for name := range s.providers {
|
||||
st := gobreaker.Settings{
|
||||
Name: name + "_cb",
|
||||
MaxRequests: 3, // Requests allowed in half-open state
|
||||
Interval: 10 * time.Minute, // Cyclic period for closed state counters
|
||||
Timeout: 60 * time.Second, // Open state duration
|
||||
ReadyToTrip: func(counts gobreaker.Counts) bool {
|
||||
failureRatio := float64(counts.TotalFailures) / float64(counts.Requests)
|
||||
return counts.Requests >= 5 && failureRatio >= 0.6 // Trip if 60% fail after 5 reqs
|
||||
},
|
||||
}
|
||||
s.breakers[name] = gobreaker.NewCircuitBreaker[string](st)
|
||||
}
|
||||
|
||||
s.defaultProvider = os.Getenv("LLM_PROVIDER")
|
||||
if s.defaultProvider == "" {
|
||||
s.defaultProvider = "openai"
|
||||
}
|
||||
s.defaultModel = os.Getenv("LLM_MODEL")
|
||||
if s.defaultModel == "" {
|
||||
s.defaultModel = "gpt-4o-mini"
|
||||
}
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
func (s *AIService) TestConnection(ctx context.Context) (string, error) {
|
||||
// ... (same as before)
|
||||
return "Ready", nil // Simplified for brevity in this edit, but I'll keep the logic if needed
|
||||
if len(s.providers) == 0 {
|
||||
return "", fmt.Errorf("no LLM providers configured")
|
||||
}
|
||||
return "Ready (Multi-LLM configured)", nil
|
||||
}
|
||||
|
||||
func (s *AIService) GenerateReply(ctx context.Context, tweetContent string, strategy string, userIdentity string) (string, error) {
|
||||
prompt := fmt.Sprintf(`
|
||||
// GenerateReply dynamically routes to the preferred LLM and uses a fallback chain if it fails.
|
||||
func (s *AIService) GenerateReply(ctx context.Context, tweetContent, productContext, userIdentity string, preferredProvider, preferredModel string) (string, error) {
|
||||
systemPrompt := "You are a professional X (Twitter) ghostwriter. You MUST respond with valid JSON."
|
||||
userPrompt := fmt.Sprintf(`
|
||||
You are a social media expert.
|
||||
User Identity: %s
|
||||
%s
|
||||
Target Tweet: "%s"
|
||||
Strategy: %s
|
||||
|
||||
Generate a high-quality reply for X (Twitter).
|
||||
Keep it natural, engaging, and under 280 characters.
|
||||
Do not use quotes around the reply.
|
||||
`, userIdentity, tweetContent, strategy)
|
||||
Generate 3 high-quality, distinct replies for X (Twitter) using different strategic angles.
|
||||
Suggested angles depending on context: Contrarian, Analytical, Supportive, Data-driven, Founder's Experience, Quote Tweet.
|
||||
IMPORTANT: If "Available User Custom Strategies" are provided above, you MUST prioritize using those custom strategy angles for your replies.
|
||||
IMPORTANT: If a specific "IMMITATE STYLE" instruction is provided in the Identity or Context, you MUST perfectly clone that linguistic tone.
|
||||
|
||||
Keep each reply natural, engaging, and under 280 characters. No hashtags unless highly relevant.
|
||||
|
||||
Respond ONLY with a JSON array in the exact following format, without any markdown formatting wrappers (like markdown code blocks):
|
||||
[
|
||||
{"strategy": "Name of Strategy 1", "content": "Reply content 1"},
|
||||
{"strategy": "Name of Strategy 2", "content": "Reply content 2"},
|
||||
{"strategy": "Name of Strategy 3", "content": "Reply content 3"}
|
||||
]
|
||||
`, userIdentity, productContext, tweetContent)
|
||||
|
||||
resp, err := s.client.CreateChatCompletion(
|
||||
ctx,
|
||||
openai.ChatCompletionRequest{
|
||||
Model: openai.GPT4oMini,
|
||||
Messages: []openai.ChatCompletionMessage{
|
||||
{
|
||||
Role: openai.ChatMessageRoleSystem,
|
||||
Content: "You are a professional X (Twitter) ghostwriter.",
|
||||
},
|
||||
{
|
||||
Role: openai.ChatMessageRoleUser,
|
||||
Content: prompt,
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to generate reply: %w", err)
|
||||
targetProvider := preferredProvider
|
||||
if targetProvider == "" {
|
||||
targetProvider = s.defaultProvider
|
||||
}
|
||||
targetModel := preferredModel
|
||||
if targetModel == "" {
|
||||
targetModel = s.defaultModel
|
||||
}
|
||||
|
||||
return resp.Choices[0].Message.Content, nil
|
||||
// Fallback chain (as designed in IMPLEMENTATION_PLAN: current -> Anthropic -> OpenAI -> Gemini -> DeepSeek)
|
||||
fallbackChain := []string{targetProvider, "anthropic", "openai", "gemini", "deepseek"}
|
||||
|
||||
for _, pName := range fallbackChain {
|
||||
provider, ok := s.providers[pName]
|
||||
if !ok {
|
||||
log.Printf("Provider %s bypassed (not configured)", pName)
|
||||
continue
|
||||
}
|
||||
breaker, ok := s.breakers[pName]
|
||||
if !ok {
|
||||
continue // Should never happen
|
||||
}
|
||||
|
||||
// Use the target model only on the initially requested provider. For fallbacks, use a safe default model.
|
||||
modelToUse := targetModel
|
||||
if pName != targetProvider {
|
||||
modelToUse = getDefaultModelFor(pName)
|
||||
}
|
||||
|
||||
log.Printf("Routing request to LLM Provider: %s (Model: %s)", pName, modelToUse)
|
||||
|
||||
// Execute through circuit breaker
|
||||
reply, err := breaker.Execute(func() (string, error) {
|
||||
// Add a simple 30s timeout per call
|
||||
callCtx, cancel := context.WithTimeout(ctx, 30*time.Second)
|
||||
defer cancel()
|
||||
return provider.GenerateReply(callCtx, modelToUse, systemPrompt, userPrompt)
|
||||
})
|
||||
|
||||
if err == nil {
|
||||
return reply, nil // Success
|
||||
}
|
||||
|
||||
log.Printf("Provider %s failed: %v. Attempting next in fallback chain...", pName, err)
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("all providers failed to generate reply")
|
||||
}
|
||||
|
||||
// ExtractStyle consumes a viral AI reply and uses the LLM to reverse-engineer its linguistic fingerprint
|
||||
func (s *AIService) ExtractStyle(ctx context.Context, viralReplyContent string) (string, error) {
|
||||
systemPrompt := "You are a master linguistic analyst and copywriter."
|
||||
userPrompt := fmt.Sprintf(`
|
||||
Analyze the following highly successful social media reply:
|
||||
"%s"
|
||||
|
||||
Extract the core stylistic elements that made it successful. Focus on:
|
||||
1. Tone (e.g., witty, provocative, deadpan, empathetic)
|
||||
2. Sentence structure (e.g., short punchy sentences, questions, bullet points)
|
||||
3. Key jargon or vocabulary patterns
|
||||
|
||||
Provide ONLY a concise, 2-3 sentence description of the style profile that another AI should imitate in the future.
|
||||
No conversational filler, just the exact instruction string to append to future system prompts.
|
||||
`, viralReplyContent)
|
||||
|
||||
// Route through our Multi-LLM fallback logic
|
||||
// Try OpenAI first, fallback to Anthropic
|
||||
providers := []string{"openai", "anthropic", "gemini", "deepseek"}
|
||||
|
||||
for _, pName := range providers {
|
||||
pConf, exists := s.providers[pName]
|
||||
cb, cbExists := s.breakers[pName]
|
||||
|
||||
if !exists || !cbExists {
|
||||
continue
|
||||
}
|
||||
|
||||
styleDesc, err := cb.Execute(func() (string, error) {
|
||||
// Use a default model for style extraction, as it's not user-facing and can be optimized for cost/speed
|
||||
modelToUse := getDefaultModelFor(pName)
|
||||
if modelToUse == "" { // Fallback if getDefaultModelFor doesn't have an entry
|
||||
modelToUse = "gpt-4o-mini" // A safe default
|
||||
}
|
||||
return pConf.GenerateReply(ctx, modelToUse, systemPrompt, userPrompt)
|
||||
})
|
||||
|
||||
if err == nil && styleDesc != "" {
|
||||
return styleDesc, nil
|
||||
}
|
||||
log.Printf("Provider %s failed to extract style: %v. Attempting next...", pName, err)
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("failed to extract style from any provider")
|
||||
}
|
||||
|
||||
func getDefaultModelFor(provider string) string {
|
||||
switch provider {
|
||||
case "openai":
|
||||
return "gpt-4o-mini"
|
||||
case "anthropic":
|
||||
return "claude-3-5-haiku-latest"
|
||||
case "deepseek":
|
||||
return "deepseek-chat"
|
||||
case "gemini":
|
||||
return "gemini-2.5-flash"
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
41
server/internal/service/ai_service_test.go
Normal file
41
server/internal/service/ai_service_test.go
Normal file
@@ -0,0 +1,41 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"os"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// TestAIService_Initialization verifies that the AIService parses environment variables
|
||||
// correctly and initializes the required fallback strategies and default settings.
|
||||
func TestAIService_Initialization(t *testing.T) {
|
||||
// Temporarily set testing ENVs to avoid depending on local .env
|
||||
os.Setenv("LLM_PROVIDER", "anthropic")
|
||||
os.Setenv("LLM_MODEL", "claude-3-5-haiku-latest")
|
||||
os.Setenv("OPENAI_API_KEY", "test-key-openai")
|
||||
defer os.Clearenv() // Clean up after test
|
||||
|
||||
svc := NewAIService()
|
||||
if svc == nil {
|
||||
t.Fatal("Expected AIService to be initialized, got nil")
|
||||
}
|
||||
|
||||
if svc.defaultProvider != "anthropic" {
|
||||
t.Errorf("Expected default provider 'anthropic', got '%s'", svc.defaultProvider)
|
||||
}
|
||||
|
||||
if svc.defaultModel != "claude-3-5-haiku-latest" {
|
||||
t.Errorf("Expected default model 'claude-3-5-haiku-latest', got '%s'", svc.defaultModel)
|
||||
}
|
||||
|
||||
// Verify that OpenAI provider was initialized because OPENAI_API_KEY was present
|
||||
_, hasOpenAI := svc.providers["openai"]
|
||||
if !hasOpenAI {
|
||||
t.Error("Expected OpenAI provider to be initialized, but it was not found")
|
||||
}
|
||||
|
||||
// Verify that circuit breakers were initialized
|
||||
_, hasBreaker := svc.breakers["openai"]
|
||||
if !hasBreaker {
|
||||
t.Error("Expected circuit breaker for setup provider, but it was not found")
|
||||
}
|
||||
}
|
||||
26
server/internal/service/competitor_monitor_service.go
Normal file
26
server/internal/service/competitor_monitor_service.go
Normal file
@@ -0,0 +1,26 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"github.com/zs/InsightReply/internal/model"
|
||||
"github.com/zs/InsightReply/internal/repository"
|
||||
)
|
||||
|
||||
type CompetitorMonitorService struct {
|
||||
repo *repository.CompetitorMonitorRepository
|
||||
}
|
||||
|
||||
func NewCompetitorMonitorService(repo *repository.CompetitorMonitorRepository) *CompetitorMonitorService {
|
||||
return &CompetitorMonitorService{repo: repo}
|
||||
}
|
||||
|
||||
func (s *CompetitorMonitorService) ListMonitors(userID string) ([]model.CompetitorMonitor, error) {
|
||||
return s.repo.ListByUserID(userID)
|
||||
}
|
||||
|
||||
func (s *CompetitorMonitorService) CreateMonitor(monitor *model.CompetitorMonitor) error {
|
||||
return s.repo.Create(monitor)
|
||||
}
|
||||
|
||||
func (s *CompetitorMonitorService) DeleteMonitor(id string, userID string) error {
|
||||
return s.repo.Delete(id, userID)
|
||||
}
|
||||
30
server/internal/service/custom_strategy_service.go
Normal file
30
server/internal/service/custom_strategy_service.go
Normal file
@@ -0,0 +1,30 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"github.com/zs/InsightReply/internal/model"
|
||||
"github.com/zs/InsightReply/internal/repository"
|
||||
)
|
||||
|
||||
type CustomStrategyService struct {
|
||||
repo *repository.CustomStrategyRepository
|
||||
}
|
||||
|
||||
func NewCustomStrategyService(repo *repository.CustomStrategyRepository) *CustomStrategyService {
|
||||
return &CustomStrategyService{repo: repo}
|
||||
}
|
||||
|
||||
func (s *CustomStrategyService) ListStrategies(userID string) ([]model.UserCustomStrategy, error) {
|
||||
return s.repo.ListByUserID(userID)
|
||||
}
|
||||
|
||||
func (s *CustomStrategyService) CreateStrategy(strategy *model.UserCustomStrategy) error {
|
||||
return s.repo.Create(strategy)
|
||||
}
|
||||
|
||||
func (s *CustomStrategyService) UpdateStrategy(strategy *model.UserCustomStrategy) error {
|
||||
return s.repo.Update(strategy)
|
||||
}
|
||||
|
||||
func (s *CustomStrategyService) DeleteStrategy(id string, userID string) error {
|
||||
return s.repo.Delete(id, userID)
|
||||
}
|
||||
75
server/internal/service/llm/anthropic.go
Normal file
75
server/internal/service/llm/anthropic.go
Normal file
@@ -0,0 +1,75 @@
|
||||
package llm
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
type AnthropicProvider struct {
|
||||
apiKey string
|
||||
baseURL string
|
||||
client *http.Client
|
||||
}
|
||||
|
||||
func NewAnthropicProvider(apiKey, baseURL string) *AnthropicProvider {
|
||||
if baseURL == "" {
|
||||
baseURL = "https://api.anthropic.com/v1"
|
||||
}
|
||||
return &AnthropicProvider{
|
||||
apiKey: apiKey,
|
||||
baseURL: baseURL,
|
||||
client: &http.Client{},
|
||||
}
|
||||
}
|
||||
|
||||
func (p *AnthropicProvider) Name() string {
|
||||
return "anthropic"
|
||||
}
|
||||
|
||||
func (p *AnthropicProvider) GenerateReply(ctx context.Context, model string, systemPrompt, userPrompt string) (string, error) {
|
||||
reqBody := map[string]interface{}{
|
||||
"model": model,
|
||||
"max_tokens": 1024,
|
||||
"system": systemPrompt,
|
||||
"messages": []map[string]string{
|
||||
{"role": "user", "content": userPrompt},
|
||||
},
|
||||
}
|
||||
bs, _ := json.Marshal(reqBody)
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, "POST", p.baseURL+"/messages", bytes.NewReader(bs))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
req.Header.Set("x-api-key", p.apiKey)
|
||||
req.Header.Set("anthropic-version", "2023-06-01")
|
||||
req.Header.Set("content-type", "application/json")
|
||||
|
||||
resp, err := p.client.Do(req)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
body, _ := io.ReadAll(resp.Body)
|
||||
return "", fmt.Errorf("anthropic error %d: %s", resp.StatusCode, string(body))
|
||||
}
|
||||
|
||||
var result struct {
|
||||
Content []struct {
|
||||
Text string `json:"text"`
|
||||
} `json:"content"`
|
||||
}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
|
||||
return "", err
|
||||
}
|
||||
if len(result.Content) == 0 {
|
||||
return "", fmt.Errorf("anthropic returned empty content")
|
||||
}
|
||||
return result.Content[0].Text, nil
|
||||
}
|
||||
86
server/internal/service/llm/gemini.go
Normal file
86
server/internal/service/llm/gemini.go
Normal file
@@ -0,0 +1,86 @@
|
||||
package llm
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
type GeminiProvider struct {
|
||||
apiKey string
|
||||
baseURL string
|
||||
client *http.Client
|
||||
}
|
||||
|
||||
func NewGeminiProvider(apiKey, baseURL string) *GeminiProvider {
|
||||
if baseURL == "" {
|
||||
baseURL = "https://generativelanguage.googleapis.com/v1beta/models"
|
||||
}
|
||||
return &GeminiProvider{
|
||||
apiKey: apiKey,
|
||||
baseURL: baseURL,
|
||||
client: &http.Client{},
|
||||
}
|
||||
}
|
||||
|
||||
func (p *GeminiProvider) Name() string {
|
||||
return "gemini"
|
||||
}
|
||||
|
||||
func (p *GeminiProvider) GenerateReply(ctx context.Context, model string, systemPrompt, userPrompt string) (string, error) {
|
||||
url := fmt.Sprintf("%s/%s:generateContent?key=%s", p.baseURL, model, p.apiKey)
|
||||
|
||||
reqBody := map[string]interface{}{
|
||||
"systemInstruction": map[string]interface{}{
|
||||
"parts": []map[string]interface{}{
|
||||
{"text": systemPrompt},
|
||||
},
|
||||
},
|
||||
"contents": []map[string]interface{}{
|
||||
{
|
||||
"role": "user",
|
||||
"parts": []map[string]interface{}{
|
||||
{"text": userPrompt},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
bs, _ := json.Marshal(reqBody)
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, "POST", url, bytes.NewReader(bs))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
resp, err := p.client.Do(req)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
body, _ := io.ReadAll(resp.Body)
|
||||
return "", fmt.Errorf("gemini error %d: %s", resp.StatusCode, string(body))
|
||||
}
|
||||
|
||||
var result struct {
|
||||
Candidates []struct {
|
||||
Content struct {
|
||||
Parts []struct {
|
||||
Text string `json:"text"`
|
||||
} `json:"parts"`
|
||||
} `json:"content"`
|
||||
} `json:"candidates"`
|
||||
}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
|
||||
return "", err
|
||||
}
|
||||
if len(result.Candidates) == 0 || len(result.Candidates[0].Content.Parts) == 0 {
|
||||
return "", fmt.Errorf("gemini returned empty content")
|
||||
}
|
||||
return result.Candidates[0].Content.Parts[0].Text, nil
|
||||
}
|
||||
50
server/internal/service/llm/openai.go
Normal file
50
server/internal/service/llm/openai.go
Normal file
@@ -0,0 +1,50 @@
|
||||
package llm
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/sashabaranov/go-openai"
|
||||
)
|
||||
|
||||
type OpenAIProvider struct {
|
||||
client *openai.Client
|
||||
name string
|
||||
}
|
||||
|
||||
// NewOpenAIProvider creates a new provider that uses the official or compatible OpenAI API.
|
||||
// It can also handle DeepSeek via a custom BaseURL.
|
||||
func NewOpenAIProvider(apiKey, baseURL, name string) *OpenAIProvider {
|
||||
config := openai.DefaultConfig(apiKey)
|
||||
if baseURL != "" {
|
||||
config.BaseURL = baseURL
|
||||
}
|
||||
return &OpenAIProvider{
|
||||
client: openai.NewClientWithConfig(config),
|
||||
name: name,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *OpenAIProvider) Name() string {
|
||||
return p.name
|
||||
}
|
||||
|
||||
func (p *OpenAIProvider) GenerateReply(ctx context.Context, model string, systemPrompt, userPrompt string) (string, error) {
|
||||
resp, err := p.client.CreateChatCompletion(
|
||||
ctx,
|
||||
openai.ChatCompletionRequest{
|
||||
Model: model,
|
||||
Messages: []openai.ChatCompletionMessage{
|
||||
{Role: openai.ChatMessageRoleSystem, Content: systemPrompt},
|
||||
{Role: openai.ChatMessageRoleUser, Content: userPrompt},
|
||||
},
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("%s api error: %w", p.name, err)
|
||||
}
|
||||
if len(resp.Choices) == 0 {
|
||||
return "", fmt.Errorf("%s returned no choices", p.name)
|
||||
}
|
||||
return resp.Choices[0].Message.Content, nil
|
||||
}
|
||||
8
server/internal/service/llm/provider.go
Normal file
8
server/internal/service/llm/provider.go
Normal file
@@ -0,0 +1,8 @@
|
||||
package llm
|
||||
|
||||
import "context"
|
||||
|
||||
type Provider interface {
|
||||
Name() string
|
||||
GenerateReply(ctx context.Context, model string, systemPrompt, userPrompt string) (string, error)
|
||||
}
|
||||
22
server/internal/service/product_profile_service.go
Normal file
22
server/internal/service/product_profile_service.go
Normal file
@@ -0,0 +1,22 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"github.com/zs/InsightReply/internal/model"
|
||||
"github.com/zs/InsightReply/internal/repository"
|
||||
)
|
||||
|
||||
type ProductProfileService struct {
|
||||
repo *repository.ProductProfileRepository
|
||||
}
|
||||
|
||||
func NewProductProfileService(repo *repository.ProductProfileRepository) *ProductProfileService {
|
||||
return &ProductProfileService{repo: repo}
|
||||
}
|
||||
|
||||
func (s *ProductProfileService) GetProfile(userID string) (*model.UserProductProfile, error) {
|
||||
return s.repo.GetByUserID(userID)
|
||||
}
|
||||
|
||||
func (s *ProductProfileService) SaveProfile(profile *model.UserProductProfile) error {
|
||||
return s.repo.Save(profile)
|
||||
}
|
||||
@@ -25,3 +25,24 @@ func (s *UserService) Register(email string, identity string) (*model.User, erro
|
||||
func (s *UserService) GetUser(email string) (*model.User, error) {
|
||||
return s.repo.GetByEmail(email)
|
||||
}
|
||||
|
||||
func (s *UserService) GetUserByID(id string) (*model.User, error) {
|
||||
return s.repo.GetByID(id)
|
||||
}
|
||||
|
||||
func (s *UserService) UpdatePreferences(id string, identity string, language string) (*model.User, error) {
|
||||
user, err := s.repo.GetByID(id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if identity != "" {
|
||||
user.IdentityLabel = identity
|
||||
}
|
||||
if language != "" {
|
||||
user.LanguagePreference = language
|
||||
}
|
||||
|
||||
err = s.repo.Update(user)
|
||||
return user, err
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user