This commit is contained in:
Hua
2025-02-18 16:53:34 +08:00
parent 8b4b4b4181
commit 5cfdc92556
21 changed files with 3139 additions and 0 deletions

84
services/ai/ai.go Normal file
View File

@ -0,0 +1,84 @@
package ai
import (
"code-review/services"
"code-review/services/types"
"fmt"
"log"
"strings"
)
// AI 实现了 CodeReviewer 和 AIPool 接口
type AI struct {
model string
systemMsg string
client AIClient
}
// NewAI 创建新的 AI 实例
func NewAI(model, systemMsg string, client AIClient) *AI {
return &AI{
model: model,
systemMsg: systemMsg,
client: client,
}
}
// GetAI 实现 AIPool 接口
func (a *AI) GetAI() services.CodeReviewer {
return a
}
// Review 实现 CodeReviewer 接口
func (a *AI) Review(changes *types.CodeChanges) (*types.ReviewResult, error) {
log.Printf("AI 开始审查代码: model=%s", a.model)
// 构建审查请求
prompt := a.buildPrompt(changes)
// 调用 AI API
response, err := a.client.Chat(a.systemMsg, prompt)
if err != nil {
log.Printf("AI API 调用失败: model=%s, error=%v", a.model, err)
return nil, fmt.Errorf("AI API 调用失败: %w", err)
}
// 解析审查结果
result, err := a.parseResponse(response)
if err != nil {
log.Printf("解析 AI 响应失败: model=%s, error=%v", a.model, err)
return nil, fmt.Errorf("解析 AI 响应失败: %w", err)
}
log.Printf("AI 代码审查完成: model=%s", a.model)
return result, nil
}
// buildPrompt 构建代码审查的提示词
func (a *AI) buildPrompt(changes *types.CodeChanges) string {
var prompt strings.Builder
prompt.WriteString("请审查以下代码变更:\n\n")
for _, file := range changes.Files {
prompt.WriteString(fmt.Sprintf("文件: %s\n", file.Path))
prompt.WriteString("diff:\n")
prompt.WriteString(file.Content)
prompt.WriteString("\n---\n\n")
}
return prompt.String()
}
// parseResponse 解析 AI 的响应
func (a *AI) parseResponse(response string) (*types.ReviewResult, error) {
return &types.ReviewResult{
Comments: []types.Comment{
{
Path: "全局",
Content: response,
},
},
Summary: "代码审查完成",
}, nil
}

184
services/ai/client.go Normal file
View File

@ -0,0 +1,184 @@
package ai
import (
"bytes"
"encoding/json"
"fmt"
"io"
"log"
"net/http"
"regexp"
"strings"
)
// AIClient 定义了与 AI 服务交互的接口
type AIClient interface {
Chat(systemMsg, prompt string) (string, error)
}
// Client 实现了 AIClient 接口
type Client struct {
apiBase string
apiKey string
model string
aiType string
stream bool
temperature float64
client *http.Client
}
// NewClient 创建新的 AI 客户端
func NewClient(apiBase, apiKey, model, aiType string, temperature float64) *Client {
return &Client{
apiBase: apiBase,
apiKey: apiKey,
model: model,
aiType: aiType,
temperature: temperature,
client: &http.Client{},
}
}
// Chat 发送聊天请求到 AI 服务
func (c *Client) Chat(systemMsg, prompt string) (string, error) {
// 根据配置的类型判断使用哪个服务
var response string
var err error
if c.aiType == "ollama" {
response, err = c.ollamaChat(systemMsg, prompt)
} else {
response, err = c.openAIChat(systemMsg, prompt)
}
if err != nil {
log.Printf("AI 聊天请求失败: error=%v", err)
return "", fmt.Errorf("AI 聊天请求失败: %w", err)
}
return response, nil
}
// ollamaChat 发送请求到 Ollama API
func (c *Client) ollamaChat(systemMsg, prompt string) (string, error) {
// 组合系统提示词和用户提示词
fullPrompt := fmt.Sprintf("%s\n\n%s", systemMsg, prompt)
reqBody := map[string]interface{}{
"model": c.model,
"prompt": fullPrompt,
"stream": c.stream,
"temperature": c.temperature,
}
jsonData, err := json.Marshal(reqBody)
if err != nil {
return "", fmt.Errorf("序列化请求失败: %w", err)
}
req, err := http.NewRequest("POST", c.apiBase+"/api/generate", bytes.NewBuffer(jsonData))
if err != nil {
return "", fmt.Errorf("创建请求失败: %w", err)
}
req.Header.Set("Content-Type", "application/json")
resp, err := c.client.Do(req)
if err != nil {
return "", fmt.Errorf("发送请求失败: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return "", fmt.Errorf("API 请求失败: status=%d, body=%s", resp.StatusCode, string(body))
}
var result struct {
Response string `json:"response"`
Done bool `json:"done"`
}
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
body, _ := io.ReadAll(resp.Body)
log.Printf("响应内容: %s", string(body))
return "", fmt.Errorf("解析响应失败: %w", err)
}
if !result.Done {
return "", fmt.Errorf("AI 响应未完成")
}
pattern := "(?s)<think>(.*?)</think>"
reg := regexp.MustCompile(pattern)
matches := reg.ReplaceAllString(result.Response, "")
return strings.TrimSpace(matches), nil
}
// openAIChat 发送请求到 OpenAI API
func (c *Client) openAIChat(systemMsg, prompt string) (string, error) {
reqBody := map[string]interface{}{
"model": c.model,
"messages": []map[string]string{
{
"role": "system",
"content": systemMsg,
},
{
"role": "user",
"content": prompt,
},
},
"stream": false,
"temperature": c.temperature,
}
jsonData, err := json.Marshal(reqBody)
if err != nil {
return "", fmt.Errorf("序列化请求失败: %w", err)
}
req, err := http.NewRequest("POST", c.apiBase+"/v1/chat/completions", bytes.NewBuffer(jsonData))
if err != nil {
return "", fmt.Errorf("创建请求失败: %w", err)
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", "Bearer "+c.apiKey)
resp, err := c.client.Do(req)
if err != nil {
return "", fmt.Errorf("发送请求失败: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return "", fmt.Errorf("API 请求失败: status=%d, body=%s", resp.StatusCode, string(body))
}
var result struct {
Choices []struct {
Message struct {
Content string `json:"content"`
} `json:"message"`
} `json:"choices"`
}
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
return "", fmt.Errorf("解析响应失败: %w", err)
}
if len(result.Choices) == 0 {
return "", fmt.Errorf("AI 响应为空")
}
pattern := "(?s)<think>(.*?)</think>"
reg := regexp.MustCompile(pattern)
matches := reg.ReplaceAllString(result.Choices[0].Message.Content, "")
return strings.TrimSpace(matches), nil
}