mirror of
https://gitee.com/blackfox/geekai.git
synced 2025-12-06 16:58:24 +08:00
支持 DeepSeek 原生推理模型
This commit is contained in:
parent
f080425ee6
commit
cd31333d0c
@ -3,6 +3,7 @@
|
|||||||
## v4.2.2
|
## v4.2.2
|
||||||
|
|
||||||
- 功能优化:开启图形验证码功能的时候现检查是否配置了 API 服务,防止开启之后没法登录的 Bug。
|
- 功能优化:开启图形验证码功能的时候现检查是否配置了 API 服务,防止开启之后没法登录的 Bug。
|
||||||
|
- 功能优化:支持原生的 DeepSeek 推理模型 API,聊天 API KEY 支持设置完整的 API 路径,比如 https://api.geekai.pro/v1/chat/completions
|
||||||
- 功能新增:对话页面支持AI输出语音播报(TTS)。
|
- 功能新增:对话页面支持AI输出语音播报(TTS)。
|
||||||
- 功能新增:支持 Goole 账号登录。
|
- 功能新增:支持 Goole 账号登录。
|
||||||
|
|
||||||
|
|||||||
@ -9,20 +9,20 @@ package types
|
|||||||
|
|
||||||
// ApiRequest API 请求实体
|
// ApiRequest API 请求实体
|
||||||
type ApiRequest struct {
|
type ApiRequest struct {
|
||||||
Model string `json:"model,omitempty"`
|
Model string `json:"model,omitempty"`
|
||||||
Temperature float32 `json:"temperature"`
|
Temperature float32 `json:"temperature"`
|
||||||
MaxTokens int `json:"max_tokens,omitempty"`
|
MaxTokens int `json:"max_tokens,omitempty"`
|
||||||
MaxCompletionTokens int `json:"max_completion_tokens,omitempty"` // 兼容GPT O1 模型
|
MaxCompletionTokens int `json:"max_completion_tokens,omitempty"` // 兼容GPT O1 模型
|
||||||
Stream bool `json:"stream,omitempty"`
|
Stream bool `json:"stream,omitempty"`
|
||||||
Messages []interface{} `json:"messages,omitempty"`
|
Messages []any `json:"messages,omitempty"`
|
||||||
Tools []Tool `json:"tools,omitempty"`
|
Tools []Tool `json:"tools,omitempty"`
|
||||||
Functions []interface{} `json:"functions,omitempty"` // 兼容中转平台
|
Functions []any `json:"functions,omitempty"` // 兼容中转平台
|
||||||
ResponseFormat interface{} `json:"response_format,omitempty"` // 响应格式
|
ResponseFormat any `json:"response_format,omitempty"` // 响应格式
|
||||||
|
|
||||||
ToolChoice string `json:"tool_choice,omitempty"`
|
ToolChoice string `json:"tool_choice,omitempty"`
|
||||||
|
|
||||||
Input map[string]interface{} `json:"input,omitempty"` //兼容阿里通义千问
|
Input map[string]any `json:"input,omitempty"` //兼容阿里通义千问
|
||||||
Parameters map[string]interface{} `json:"parameters,omitempty"` //兼容阿里通义千问
|
Parameters map[string]any `json:"parameters,omitempty"` //兼容阿里通义千问
|
||||||
}
|
}
|
||||||
|
|
||||||
type Message struct {
|
type Message struct {
|
||||||
@ -41,11 +41,12 @@ type ChoiceItem struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type Delta struct {
|
type Delta struct {
|
||||||
Role string `json:"role"`
|
Role string `json:"role"`
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
Content interface{} `json:"content"`
|
Content any `json:"content"`
|
||||||
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
|
ReasoningContent string `json:"reasoning_content,omitempty"`
|
||||||
FunctionCall struct {
|
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
|
||||||
|
FunctionCall struct {
|
||||||
Name string `json:"name,omitempty"`
|
Name string `json:"name,omitempty"`
|
||||||
Arguments string `json:"arguments,omitempty"`
|
Arguments string `json:"arguments,omitempty"`
|
||||||
} `json:"function_call,omitempty"`
|
} `json:"function_call,omitempty"`
|
||||||
|
|||||||
@ -16,7 +16,7 @@ type MKey interface {
|
|||||||
string | int | uint
|
string | int | uint
|
||||||
}
|
}
|
||||||
type MValue interface {
|
type MValue interface {
|
||||||
*WsClient | *ChatSession | context.CancelFunc | []interface{}
|
*WsClient | *ChatSession | context.CancelFunc | []any
|
||||||
}
|
}
|
||||||
type LMap[K MKey, T MValue] struct {
|
type LMap[K MKey, T MValue] struct {
|
||||||
lock sync.RWMutex
|
lock sync.RWMutex
|
||||||
|
|||||||
@ -40,7 +40,7 @@ type ChatHandler struct {
|
|||||||
uploadManager *oss.UploaderManager
|
uploadManager *oss.UploaderManager
|
||||||
licenseService *service.LicenseService
|
licenseService *service.LicenseService
|
||||||
ReqCancelFunc *types.LMap[string, context.CancelFunc] // HttpClient 请求取消 handle function
|
ReqCancelFunc *types.LMap[string, context.CancelFunc] // HttpClient 请求取消 handle function
|
||||||
ChatContexts *types.LMap[string, []interface{}] // 聊天上下文 Map [chatId] => []Message
|
ChatContexts *types.LMap[string, []any] // 聊天上下文 Map [chatId] => []Message
|
||||||
userService *service.UserService
|
userService *service.UserService
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -51,7 +51,7 @@ func NewChatHandler(app *core.AppServer, db *gorm.DB, redis *redis.Client, manag
|
|||||||
uploadManager: manager,
|
uploadManager: manager,
|
||||||
licenseService: licenseService,
|
licenseService: licenseService,
|
||||||
ReqCancelFunc: types.NewLMap[string, context.CancelFunc](),
|
ReqCancelFunc: types.NewLMap[string, context.CancelFunc](),
|
||||||
ChatContexts: types.NewLMap[string, []interface{}](),
|
ChatContexts: types.NewLMap[string, []any](),
|
||||||
userService: userService,
|
userService: userService,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -348,8 +348,14 @@ func (h *ChatHandler) doRequest(ctx context.Context, req types.ApiRequest, sessi
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
logger.Debugf("对话请求消息体:%+v", req)
|
logger.Debugf("对话请求消息体:%+v", req)
|
||||||
|
var apiURL string
|
||||||
apiURL := fmt.Sprintf("%s/v1/chat/completions", apiKey.ApiURL)
|
p, _ := url.Parse(apiKey.ApiURL)
|
||||||
|
// 如果设置的是 BASE_URL 没有路径,则添加 /v1/chat/completions
|
||||||
|
if p.Path == "" {
|
||||||
|
apiURL = fmt.Sprintf("%s/v1/chat/completions", apiKey.ApiURL)
|
||||||
|
} else {
|
||||||
|
apiURL = apiKey.ApiURL
|
||||||
|
}
|
||||||
// 创建 HttpClient 请求对象
|
// 创建 HttpClient 请求对象
|
||||||
var client *http.Client
|
var client *http.Client
|
||||||
requestBody, err := json.Marshal(req)
|
requestBody, err := json.Marshal(req)
|
||||||
|
|||||||
@ -89,6 +89,7 @@ func (h *ChatHandler) sendOpenAiMessage(
|
|||||||
var function model.Function
|
var function model.Function
|
||||||
var toolCall = false
|
var toolCall = false
|
||||||
var arguments = make([]string, 0)
|
var arguments = make([]string, 0)
|
||||||
|
var reasoning = false
|
||||||
|
|
||||||
scanner := bufio.NewScanner(response.Body)
|
scanner := bufio.NewScanner(response.Body)
|
||||||
for scanner.Scan() {
|
for scanner.Scan() {
|
||||||
@ -104,7 +105,9 @@ func (h *ChatHandler) sendOpenAiMessage(
|
|||||||
if len(responseBody.Choices) == 0 { // Fixed: 兼容 Azure API 第一个输出空行
|
if len(responseBody.Choices) == 0 { // Fixed: 兼容 Azure API 第一个输出空行
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if responseBody.Choices[0].Delta.Content == nil && responseBody.Choices[0].Delta.ToolCalls == nil {
|
if responseBody.Choices[0].Delta.Content == nil &&
|
||||||
|
responseBody.Choices[0].Delta.ToolCalls == nil &&
|
||||||
|
responseBody.Choices[0].Delta.ReasoningContent == "" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -152,9 +155,25 @@ func (h *ChatHandler) sendOpenAiMessage(
|
|||||||
if responseBody.Choices[0].FinishReason != "" {
|
if responseBody.Choices[0].FinishReason != "" {
|
||||||
break // 输出完成或者输出中断了
|
break // 输出完成或者输出中断了
|
||||||
} else { // 正常输出结果
|
} else { // 正常输出结果
|
||||||
content := responseBody.Choices[0].Delta.Content
|
// 兼容思考过程
|
||||||
contents = append(contents, utils.InterfaceToString(content))
|
if responseBody.Choices[0].Delta.ReasoningContent != "" {
|
||||||
utils.SendChunkMsg(ws, content)
|
reasoningContent := responseBody.Choices[0].Delta.ReasoningContent
|
||||||
|
if !reasoning {
|
||||||
|
reasoningContent = fmt.Sprintf("<think>%s", reasoningContent)
|
||||||
|
reasoning = true
|
||||||
|
}
|
||||||
|
|
||||||
|
utils.SendChunkMsg(ws, reasoningContent)
|
||||||
|
contents = append(contents, reasoningContent)
|
||||||
|
} else if responseBody.Choices[0].Delta.Content != "" {
|
||||||
|
finalContent := responseBody.Choices[0].Delta.Content
|
||||||
|
if reasoning {
|
||||||
|
finalContent = fmt.Sprintf("</think>%s", responseBody.Choices[0].Delta.Content)
|
||||||
|
reasoning = false
|
||||||
|
}
|
||||||
|
contents = append(contents, utils.InterfaceToString(finalContent))
|
||||||
|
utils.SendChunkMsg(ws, finalContent)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} // end for
|
} // end for
|
||||||
|
|
||||||
|
|||||||
@ -13,6 +13,7 @@ import (
|
|||||||
"geekai/core/types"
|
"geekai/core/types"
|
||||||
"geekai/store/model"
|
"geekai/store/model"
|
||||||
"io"
|
"io"
|
||||||
|
"net/url"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/imroc/req/v3"
|
"github.com/imroc/req/v3"
|
||||||
@ -47,7 +48,7 @@ type OpenAIResponse struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func OpenAIRequest(db *gorm.DB, prompt string, modelId int) (string, error) {
|
func OpenAIRequest(db *gorm.DB, prompt string, modelId int) (string, error) {
|
||||||
messages := make([]interface{}, 1)
|
messages := make([]any, 1)
|
||||||
messages[0] = types.Message{
|
messages[0] = types.Message{
|
||||||
Role: "user",
|
Role: "user",
|
||||||
Content: prompt,
|
Content: prompt,
|
||||||
@ -55,7 +56,7 @@ func OpenAIRequest(db *gorm.DB, prompt string, modelId int) (string, error) {
|
|||||||
return SendOpenAIMessage(db, messages, modelId)
|
return SendOpenAIMessage(db, messages, modelId)
|
||||||
}
|
}
|
||||||
|
|
||||||
func SendOpenAIMessage(db *gorm.DB, messages []interface{}, modelId int) (string, error) {
|
func SendOpenAIMessage(db *gorm.DB, messages []any, modelId int) (string, error) {
|
||||||
var chatModel model.ChatModel
|
var chatModel model.ChatModel
|
||||||
db.Where("id", modelId).First(&chatModel)
|
db.Where("id", modelId).First(&chatModel)
|
||||||
if chatModel.Value == "" {
|
if chatModel.Value == "" {
|
||||||
@ -74,10 +75,17 @@ func SendOpenAIMessage(db *gorm.DB, messages []interface{}, modelId int) (string
|
|||||||
var response OpenAIResponse
|
var response OpenAIResponse
|
||||||
client := req.C()
|
client := req.C()
|
||||||
if len(apiKey.ProxyURL) > 5 {
|
if len(apiKey.ProxyURL) > 5 {
|
||||||
client.SetProxyURL(apiKey.ApiURL)
|
client.SetProxyURL(apiKey.ProxyURL)
|
||||||
}
|
}
|
||||||
apiURL := fmt.Sprintf("%s/v1/chat/completions", apiKey.ApiURL)
|
var apiURL string
|
||||||
logger.Infof("Sending %s request, API KEY:%s, PROXY: %s, Model: %s", apiKey.ApiURL, apiURL, apiKey.ProxyURL, chatModel.Name)
|
p, _ := url.Parse(apiKey.ApiURL)
|
||||||
|
// 如果设置的是 BASE_URL 没有路径,则添加 /v1/chat/completions
|
||||||
|
if p.Path == "" {
|
||||||
|
apiURL = fmt.Sprintf("%s/v1/chat/completions", apiKey.ApiURL)
|
||||||
|
} else {
|
||||||
|
apiURL = apiKey.ApiURL
|
||||||
|
}
|
||||||
|
logger.Infof("Sending %s request, API KEY:%s, PROXY: %s, Model: %s", apiURL, apiKey.ApiURL, apiKey.ProxyURL, chatModel.Name)
|
||||||
r, err := client.R().SetHeader("Body-Type", "application/json").
|
r, err := client.R().SetHeader("Body-Type", "application/json").
|
||||||
SetHeader("Authorization", "Bearer "+apiKey.Value).
|
SetHeader("Authorization", "Bearer "+apiKey.Value).
|
||||||
SetBody(types.ApiRequest{
|
SetBody(types.ApiRequest{
|
||||||
|
|||||||
@ -265,7 +265,7 @@ const reGenerate = (prompt) => {
|
|||||||
// 代码快
|
// 代码快
|
||||||
|
|
||||||
blockquote {
|
blockquote {
|
||||||
margin 0
|
margin 0 0 0.8rem 0
|
||||||
background-color: var(--quote-bg-color);
|
background-color: var(--quote-bg-color);
|
||||||
padding: 0.8rem 1.5rem;
|
padding: 0.8rem 1.5rem;
|
||||||
color: var(--quote-text-color);
|
color: var(--quote-text-color);
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user