mirror of
https://gitee.com/blackfox/geekai.git
synced 2025-12-06 08:48:26 +08:00
支持 DeepSeek 原生推理模型
This commit is contained in:
parent
f080425ee6
commit
cd31333d0c
@ -3,6 +3,7 @@
|
||||
## v4.2.2
|
||||
|
||||
- 功能优化:开启图形验证码功能的时候现检查是否配置了 API 服务,防止开启之后没法登录的 Bug。
|
||||
- 功能优化:支持原生的 DeepSeek 推理模型 API,聊天 API KEY 支持设置完整的 API 路径,比如 https://api.geekai.pro/v1/chat/completions
|
||||
- 功能新增:对话页面支持AI输出语音播报(TTS)。
|
||||
- 功能新增:支持 Goole 账号登录。
|
||||
|
||||
|
||||
@ -14,15 +14,15 @@ type ApiRequest struct {
|
||||
MaxTokens int `json:"max_tokens,omitempty"`
|
||||
MaxCompletionTokens int `json:"max_completion_tokens,omitempty"` // 兼容GPT O1 模型
|
||||
Stream bool `json:"stream,omitempty"`
|
||||
Messages []interface{} `json:"messages,omitempty"`
|
||||
Messages []any `json:"messages,omitempty"`
|
||||
Tools []Tool `json:"tools,omitempty"`
|
||||
Functions []interface{} `json:"functions,omitempty"` // 兼容中转平台
|
||||
ResponseFormat interface{} `json:"response_format,omitempty"` // 响应格式
|
||||
Functions []any `json:"functions,omitempty"` // 兼容中转平台
|
||||
ResponseFormat any `json:"response_format,omitempty"` // 响应格式
|
||||
|
||||
ToolChoice string `json:"tool_choice,omitempty"`
|
||||
|
||||
Input map[string]interface{} `json:"input,omitempty"` //兼容阿里通义千问
|
||||
Parameters map[string]interface{} `json:"parameters,omitempty"` //兼容阿里通义千问
|
||||
Input map[string]any `json:"input,omitempty"` //兼容阿里通义千问
|
||||
Parameters map[string]any `json:"parameters,omitempty"` //兼容阿里通义千问
|
||||
}
|
||||
|
||||
type Message struct {
|
||||
@ -43,7 +43,8 @@ type ChoiceItem struct {
|
||||
type Delta struct {
|
||||
Role string `json:"role"`
|
||||
Name string `json:"name"`
|
||||
Content interface{} `json:"content"`
|
||||
Content any `json:"content"`
|
||||
ReasoningContent string `json:"reasoning_content,omitempty"`
|
||||
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
|
||||
FunctionCall struct {
|
||||
Name string `json:"name,omitempty"`
|
||||
|
||||
@ -16,7 +16,7 @@ type MKey interface {
|
||||
string | int | uint
|
||||
}
|
||||
type MValue interface {
|
||||
*WsClient | *ChatSession | context.CancelFunc | []interface{}
|
||||
*WsClient | *ChatSession | context.CancelFunc | []any
|
||||
}
|
||||
type LMap[K MKey, T MValue] struct {
|
||||
lock sync.RWMutex
|
||||
|
||||
@ -40,7 +40,7 @@ type ChatHandler struct {
|
||||
uploadManager *oss.UploaderManager
|
||||
licenseService *service.LicenseService
|
||||
ReqCancelFunc *types.LMap[string, context.CancelFunc] // HttpClient 请求取消 handle function
|
||||
ChatContexts *types.LMap[string, []interface{}] // 聊天上下文 Map [chatId] => []Message
|
||||
ChatContexts *types.LMap[string, []any] // 聊天上下文 Map [chatId] => []Message
|
||||
userService *service.UserService
|
||||
}
|
||||
|
||||
@ -51,7 +51,7 @@ func NewChatHandler(app *core.AppServer, db *gorm.DB, redis *redis.Client, manag
|
||||
uploadManager: manager,
|
||||
licenseService: licenseService,
|
||||
ReqCancelFunc: types.NewLMap[string, context.CancelFunc](),
|
||||
ChatContexts: types.NewLMap[string, []interface{}](),
|
||||
ChatContexts: types.NewLMap[string, []any](),
|
||||
userService: userService,
|
||||
}
|
||||
}
|
||||
@ -348,8 +348,14 @@ func (h *ChatHandler) doRequest(ctx context.Context, req types.ApiRequest, sessi
|
||||
return nil, err
|
||||
}
|
||||
logger.Debugf("对话请求消息体:%+v", req)
|
||||
|
||||
apiURL := fmt.Sprintf("%s/v1/chat/completions", apiKey.ApiURL)
|
||||
var apiURL string
|
||||
p, _ := url.Parse(apiKey.ApiURL)
|
||||
// 如果设置的是 BASE_URL 没有路径,则添加 /v1/chat/completions
|
||||
if p.Path == "" {
|
||||
apiURL = fmt.Sprintf("%s/v1/chat/completions", apiKey.ApiURL)
|
||||
} else {
|
||||
apiURL = apiKey.ApiURL
|
||||
}
|
||||
// 创建 HttpClient 请求对象
|
||||
var client *http.Client
|
||||
requestBody, err := json.Marshal(req)
|
||||
|
||||
@ -89,6 +89,7 @@ func (h *ChatHandler) sendOpenAiMessage(
|
||||
var function model.Function
|
||||
var toolCall = false
|
||||
var arguments = make([]string, 0)
|
||||
var reasoning = false
|
||||
|
||||
scanner := bufio.NewScanner(response.Body)
|
||||
for scanner.Scan() {
|
||||
@ -104,7 +105,9 @@ func (h *ChatHandler) sendOpenAiMessage(
|
||||
if len(responseBody.Choices) == 0 { // Fixed: 兼容 Azure API 第一个输出空行
|
||||
continue
|
||||
}
|
||||
if responseBody.Choices[0].Delta.Content == nil && responseBody.Choices[0].Delta.ToolCalls == nil {
|
||||
if responseBody.Choices[0].Delta.Content == nil &&
|
||||
responseBody.Choices[0].Delta.ToolCalls == nil &&
|
||||
responseBody.Choices[0].Delta.ReasoningContent == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
@ -152,9 +155,25 @@ func (h *ChatHandler) sendOpenAiMessage(
|
||||
if responseBody.Choices[0].FinishReason != "" {
|
||||
break // 输出完成或者输出中断了
|
||||
} else { // 正常输出结果
|
||||
content := responseBody.Choices[0].Delta.Content
|
||||
contents = append(contents, utils.InterfaceToString(content))
|
||||
utils.SendChunkMsg(ws, content)
|
||||
// 兼容思考过程
|
||||
if responseBody.Choices[0].Delta.ReasoningContent != "" {
|
||||
reasoningContent := responseBody.Choices[0].Delta.ReasoningContent
|
||||
if !reasoning {
|
||||
reasoningContent = fmt.Sprintf("<think>%s", reasoningContent)
|
||||
reasoning = true
|
||||
}
|
||||
|
||||
utils.SendChunkMsg(ws, reasoningContent)
|
||||
contents = append(contents, reasoningContent)
|
||||
} else if responseBody.Choices[0].Delta.Content != "" {
|
||||
finalContent := responseBody.Choices[0].Delta.Content
|
||||
if reasoning {
|
||||
finalContent = fmt.Sprintf("</think>%s", responseBody.Choices[0].Delta.Content)
|
||||
reasoning = false
|
||||
}
|
||||
contents = append(contents, utils.InterfaceToString(finalContent))
|
||||
utils.SendChunkMsg(ws, finalContent)
|
||||
}
|
||||
}
|
||||
} // end for
|
||||
|
||||
|
||||
@ -13,6 +13,7 @@ import (
|
||||
"geekai/core/types"
|
||||
"geekai/store/model"
|
||||
"io"
|
||||
"net/url"
|
||||
"time"
|
||||
|
||||
"github.com/imroc/req/v3"
|
||||
@ -47,7 +48,7 @@ type OpenAIResponse struct {
|
||||
}
|
||||
|
||||
func OpenAIRequest(db *gorm.DB, prompt string, modelId int) (string, error) {
|
||||
messages := make([]interface{}, 1)
|
||||
messages := make([]any, 1)
|
||||
messages[0] = types.Message{
|
||||
Role: "user",
|
||||
Content: prompt,
|
||||
@ -55,7 +56,7 @@ func OpenAIRequest(db *gorm.DB, prompt string, modelId int) (string, error) {
|
||||
return SendOpenAIMessage(db, messages, modelId)
|
||||
}
|
||||
|
||||
func SendOpenAIMessage(db *gorm.DB, messages []interface{}, modelId int) (string, error) {
|
||||
func SendOpenAIMessage(db *gorm.DB, messages []any, modelId int) (string, error) {
|
||||
var chatModel model.ChatModel
|
||||
db.Where("id", modelId).First(&chatModel)
|
||||
if chatModel.Value == "" {
|
||||
@ -74,10 +75,17 @@ func SendOpenAIMessage(db *gorm.DB, messages []interface{}, modelId int) (string
|
||||
var response OpenAIResponse
|
||||
client := req.C()
|
||||
if len(apiKey.ProxyURL) > 5 {
|
||||
client.SetProxyURL(apiKey.ApiURL)
|
||||
client.SetProxyURL(apiKey.ProxyURL)
|
||||
}
|
||||
apiURL := fmt.Sprintf("%s/v1/chat/completions", apiKey.ApiURL)
|
||||
logger.Infof("Sending %s request, API KEY:%s, PROXY: %s, Model: %s", apiKey.ApiURL, apiURL, apiKey.ProxyURL, chatModel.Name)
|
||||
var apiURL string
|
||||
p, _ := url.Parse(apiKey.ApiURL)
|
||||
// 如果设置的是 BASE_URL 没有路径,则添加 /v1/chat/completions
|
||||
if p.Path == "" {
|
||||
apiURL = fmt.Sprintf("%s/v1/chat/completions", apiKey.ApiURL)
|
||||
} else {
|
||||
apiURL = apiKey.ApiURL
|
||||
}
|
||||
logger.Infof("Sending %s request, API KEY:%s, PROXY: %s, Model: %s", apiURL, apiKey.ApiURL, apiKey.ProxyURL, chatModel.Name)
|
||||
r, err := client.R().SetHeader("Body-Type", "application/json").
|
||||
SetHeader("Authorization", "Bearer "+apiKey.Value).
|
||||
SetBody(types.ApiRequest{
|
||||
|
||||
@ -265,7 +265,7 @@ const reGenerate = (prompt) => {
|
||||
// 代码快
|
||||
|
||||
blockquote {
|
||||
margin 0
|
||||
margin 0 0 0.8rem 0
|
||||
background-color: var(--quote-bg-color);
|
||||
padding: 0.8rem 1.5rem;
|
||||
color: var(--quote-text-color);
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user