feat: 部署初版测试
Some checks failed
Extension Build & Release / build (push) Failing after 1m5s
Backend Deploy (Go + Docker) / deploy (push) Failing after 1m40s
Web Console Deploy (Vue 3 + Vite) / deploy (push) Has been cancelled

This commit is contained in:
zs
2026-03-02 21:25:21 +08:00
parent db3abb3174
commit 8cf6cb944b
97 changed files with 10250 additions and 209 deletions

View File

@@ -0,0 +1,75 @@
package llm
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
)
type AnthropicProvider struct {
apiKey string
baseURL string
client *http.Client
}
func NewAnthropicProvider(apiKey, baseURL string) *AnthropicProvider {
if baseURL == "" {
baseURL = "https://api.anthropic.com/v1"
}
return &AnthropicProvider{
apiKey: apiKey,
baseURL: baseURL,
client: &http.Client{},
}
}
func (p *AnthropicProvider) Name() string {
return "anthropic"
}
func (p *AnthropicProvider) GenerateReply(ctx context.Context, model string, systemPrompt, userPrompt string) (string, error) {
reqBody := map[string]interface{}{
"model": model,
"max_tokens": 1024,
"system": systemPrompt,
"messages": []map[string]string{
{"role": "user", "content": userPrompt},
},
}
bs, _ := json.Marshal(reqBody)
req, err := http.NewRequestWithContext(ctx, "POST", p.baseURL+"/messages", bytes.NewReader(bs))
if err != nil {
return "", err
}
req.Header.Set("x-api-key", p.apiKey)
req.Header.Set("anthropic-version", "2023-06-01")
req.Header.Set("content-type", "application/json")
resp, err := p.client.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return "", fmt.Errorf("anthropic error %d: %s", resp.StatusCode, string(body))
}
var result struct {
Content []struct {
Text string `json:"text"`
} `json:"content"`
}
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
return "", err
}
if len(result.Content) == 0 {
return "", fmt.Errorf("anthropic returned empty content")
}
return result.Content[0].Text, nil
}

View File

@@ -0,0 +1,86 @@
package llm
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
)
type GeminiProvider struct {
apiKey string
baseURL string
client *http.Client
}
func NewGeminiProvider(apiKey, baseURL string) *GeminiProvider {
if baseURL == "" {
baseURL = "https://generativelanguage.googleapis.com/v1beta/models"
}
return &GeminiProvider{
apiKey: apiKey,
baseURL: baseURL,
client: &http.Client{},
}
}
func (p *GeminiProvider) Name() string {
return "gemini"
}
func (p *GeminiProvider) GenerateReply(ctx context.Context, model string, systemPrompt, userPrompt string) (string, error) {
url := fmt.Sprintf("%s/%s:generateContent?key=%s", p.baseURL, model, p.apiKey)
reqBody := map[string]interface{}{
"systemInstruction": map[string]interface{}{
"parts": []map[string]interface{}{
{"text": systemPrompt},
},
},
"contents": []map[string]interface{}{
{
"role": "user",
"parts": []map[string]interface{}{
{"text": userPrompt},
},
},
},
}
bs, _ := json.Marshal(reqBody)
req, err := http.NewRequestWithContext(ctx, "POST", url, bytes.NewReader(bs))
if err != nil {
return "", err
}
req.Header.Set("Content-Type", "application/json")
resp, err := p.client.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return "", fmt.Errorf("gemini error %d: %s", resp.StatusCode, string(body))
}
var result struct {
Candidates []struct {
Content struct {
Parts []struct {
Text string `json:"text"`
} `json:"parts"`
} `json:"content"`
} `json:"candidates"`
}
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
return "", err
}
if len(result.Candidates) == 0 || len(result.Candidates[0].Content.Parts) == 0 {
return "", fmt.Errorf("gemini returned empty content")
}
return result.Candidates[0].Content.Parts[0].Text, nil
}

View File

@@ -0,0 +1,50 @@
package llm
import (
"context"
"fmt"
"github.com/sashabaranov/go-openai"
)
type OpenAIProvider struct {
client *openai.Client
name string
}
// NewOpenAIProvider creates a new provider that uses the official or compatible OpenAI API.
// It can also handle DeepSeek via a custom BaseURL.
func NewOpenAIProvider(apiKey, baseURL, name string) *OpenAIProvider {
config := openai.DefaultConfig(apiKey)
if baseURL != "" {
config.BaseURL = baseURL
}
return &OpenAIProvider{
client: openai.NewClientWithConfig(config),
name: name,
}
}
func (p *OpenAIProvider) Name() string {
return p.name
}
func (p *OpenAIProvider) GenerateReply(ctx context.Context, model string, systemPrompt, userPrompt string) (string, error) {
resp, err := p.client.CreateChatCompletion(
ctx,
openai.ChatCompletionRequest{
Model: model,
Messages: []openai.ChatCompletionMessage{
{Role: openai.ChatMessageRoleSystem, Content: systemPrompt},
{Role: openai.ChatMessageRoleUser, Content: userPrompt},
},
},
)
if err != nil {
return "", fmt.Errorf("%s api error: %w", p.name, err)
}
if len(resp.Choices) == 0 {
return "", fmt.Errorf("%s returned no choices", p.name)
}
return resp.Choices[0].Message.Content, nil
}

View File

@@ -0,0 +1,8 @@
package llm
import "context"
type Provider interface {
Name() string
GenerateReply(ctx context.Context, model string, systemPrompt, userPrompt string) (string, error)
}