Sfoglia il codice sorgente

Merge branch 'feature/deepseek_rag_2.0' into debug

kobe6258 1 settimana fa
parent
commit
49d8ead956

+ 1 - 0
controllers/llm/llm_http/request.go

@@ -24,6 +24,7 @@ type UserChatRecordReq struct {
 type GenerateContentReq struct {
 	WechatArticleId int    `json:"WechatArticleId" description:"公众号Id"`
 	Promote         string `json:"Promote" description:"提示词"`
+	LLMModel        string `json:"LLMModel"`
 }
 type SaveContentReq struct {
 	WechatArticleId int             `json:"WechatArticleId" description:"公众号Id"`

+ 1 - 0
controllers/llm/promote_controller.go

@@ -70,6 +70,7 @@ func (pCtrl *PromoteController) GenerateContent() {
 	res, err := facade.AIGCBaseOnPromote(facade.AIGC{
 		Promote:   gcReq.Promote,
 		ArticleId: gcReq.WechatArticleId,
+		LLMModel:  gcReq.LLMModel,
 	})
 	if err != nil {
 		br.Msg = err.Error()

+ 41 - 4
services/llm/facade/llm_service.go

@@ -49,6 +49,8 @@ func LLMKnowledgeBaseSearchDocs(search LLMKnowledgeSearch) (resp bus_response.Se
 
 // AIGCBaseOnPromote aigc 生成内容
 func AIGCBaseOnPromote(aigc AIGC) (resp bus_response.AIGCEtaResponse, err error) {
+	param := make(map[string]interface{})
+	param["LLM"] =aigc.LLMModel
 	mapping, queryErr := rag.GetArticleKbMapping(aigc.ArticleId)
 	if queryErr != nil && !errors.Is(queryErr, gorm.ErrRecordNotFound) {
 		utils.FileLog.Error("获取文章知识库信息失败,err: %v", queryErr)
@@ -85,7 +87,7 @@ func AIGCBaseOnPromote(aigc AIGC) (resp bus_response.AIGCEtaResponse, err error)
 				utils.FileLog.Error("打开文件失败,err:", err)
 				return
 			}
-			uploadResp, httpErr := llmService.UploadFileToTemplate([]*os.File{file}, nil)
+			uploadResp, httpErr := llmService.UploadFileToTemplate([]*os.File{file}, param)
 			if httpErr != nil {
 				utils.FileLog.Error("上传文件失败,err:", err.Error())
 				err = fmt.Errorf("上传文件失败,err:%v", httpErr)
@@ -111,9 +113,9 @@ func AIGCBaseOnPromote(aigc AIGC) (resp bus_response.AIGCEtaResponse, err error)
 			kbId = mapping.KbId
 		}
 		//知识库对话
-		response, httpErr := llmService.FileChat(aigc.Promote, kbId, nil)
+		response, httpErr := llmService.FileChat(aigc.Promote, kbId, aigc.LLMModel, nil)
 		if httpErr != nil {
-			utils.FileLog.Error("内容生成失败,err:", err.Error())
+			utils.FileLog.Error("内容生成失败,err:", httpErr.Error())
 			err = fmt.Errorf("内容生成失败,err:%v", httpErr)
 			return
 		}
@@ -129,7 +131,41 @@ func AIGCBaseOnPromote(aigc AIGC) (resp bus_response.AIGCEtaResponse, err error)
 			return
 		}
 		if gcResp.Code == 404 {
-			response, httpErr = llmService.FileChat(aigc.Promote, kbId, nil)
+			param["PrevId"] = kbId
+			article, fileErr := rag.GetArticleById(aigc.ArticleId)
+			if fileErr != nil {
+				// 找不到就处理失败
+				utils.FileLog.Error("公众号文章不存在")
+				err = fmt.Errorf("公众号文章不存在")
+				return
+			}
+			if article.TextContent == "" {
+				utils.FileLog.Error("暂不支持纯文本以外的内容生成")
+				err = fmt.Errorf("暂不支持纯文本以外的内容生成")
+				return
+			}
+			// 文章加入到知识库
+			path, fileErr := localService.CreateArticleFile(article)
+			if fileErr != nil {
+				utils.FileLog.Error("创建文章文件失败,err: %v", fileErr)
+				err = fmt.Errorf("创建文章文件失败,err: %v", fileErr)
+				return
+			}
+			defer func() {
+				_ = os.Remove(path)
+			}()
+			file, err = os.Open(path)
+			if err != nil {
+				utils.FileLog.Error("打开文件失败,err:", err)
+				return
+			}
+			_, httpErr = llmService.UploadFileToTemplate([]*os.File{file}, param)
+			if httpErr != nil {
+				utils.FileLog.Error("上传文件失败,err:", err.Error())
+				err = fmt.Errorf("上传文件失败,err:%v", httpErr)
+				return
+			}
+			response, httpErr = llmService.FileChat(aigc.Promote, kbId, aigc.LLMModel, nil)
 			if httpErr != nil {
 				utils.FileLog.Error("内容生成失败,err:%v", httpErr.Error())
 				err = fmt.Errorf("内容生成失败,err:%v", httpErr)
@@ -164,6 +200,7 @@ type LLMKnowledgeSearch struct {
 type AIGC struct {
 	Promote   string
 	ArticleId int
+	LLMModel  string
 }
 
 func dealFileChatResp(response eta_llm_http.BaseResponse) (httpResponse bus_response.FileChatBaseResponse, err error) {

File diff suppressed because it is too large
+ 0 - 0
static/imgs/ai/article/【专题报告】关税来袭黑色怎么看.md


File diff suppressed because it is too large
+ 0 - 0
static/imgs/ai/article/【开源宏观】财政支出力度如何12月财政数据点评.md


File diff suppressed because it is too large
+ 0 - 0
static/imgs/ai/article/巴菲特2025股东信1000字精华版来了附全文.md


+ 21 - 5
utils/llm/eta_llm/eta_llm_client.go

@@ -22,6 +22,10 @@ var (
 	dsOnce sync.Once
 
 	etaLlmClient *ETALLMClient
+	modelRouter  = map[string]string{
+		"deepseek-r1:32b": "/dsr1",
+		"qwq:32b":         "/qwq",
+	}
 )
 
 const (
@@ -59,7 +63,7 @@ func GetInstance() llm.LLMService {
 		}
 		if etaLlmClient == nil {
 			etaLlmClient = &ETALLMClient{
-				LLMClient: llm.NewLLMClient(config.LlmAddress, 120),
+				LLMClient: llm.NewLLMClient(config.LlmAddress, 300),
 				LlmModel:  config.LlmModel,
 			}
 		}
@@ -140,7 +144,7 @@ func (ds *ETALLMClient) KnowledgeBaseChat(query string, KnowledgeBaseName string
 	return ds.DoStreamPost(KNOWLEDGE_BASE_CHAT_API, body)
 }
 
-func (ds *ETALLMClient) FileChat(query string, KnowledgeId string, history []json.RawMessage) (resp eta_llm_http.BaseResponse, err error) {
+func (ds *ETALLMClient) FileChat(query string, KnowledgeId string, llmModel string, history []json.RawMessage) (resp eta_llm_http.BaseResponse, err error) {
 	ChatHistory := make([]eta_llm_http.HistoryContent, 0)
 	for _, historyItemStr := range history {
 		var historyItem eta_llm_http.HistoryContentWeb
@@ -153,8 +157,14 @@ func (ds *ETALLMClient) FileChat(query string, KnowledgeId string, history []jso
 			Role:    historyItem.Role,
 		})
 	}
+	var model string
+	if llmModel != "" {
+		model = llmModel
+	} else {
+		model = ds.LlmModel
+	}
 	kbReq := eta_llm_http.DocumentChatRequest{
-		ModelName:      ds.LlmModel,
+		ModelName:      model,
 		Query:          query,
 		KnowledgeId:    KnowledgeId,
 		History:        ChatHistory,
@@ -171,7 +181,7 @@ func (ds *ETALLMClient) FileChat(query string, KnowledgeId string, history []jso
 		err = fmt.Errorf("内容生成失败,序列化请求参数失败,err:%v", err)
 		return
 	}
-	return ds.DoPost(DOCUMENT_CHAT_API, body)
+	return ds.DoPost(fmt.Sprintf("%s%s", modelRouter[model], DOCUMENT_CHAT_API), body)
 }
 
 func (ds *ETALLMClient) UploadFileToTemplate(files []*os.File, param map[string]interface{}) (data interface{}, err error) {
@@ -179,6 +189,12 @@ func (ds *ETALLMClient) UploadFileToTemplate(files []*os.File, param map[string]
 	if value, ok := param["PrevId"]; ok {
 		pervId = value.(string)
 	}
+	var model string
+	if value, ok := param["LLM"]; ok {
+		model = value.(string)
+	} else {
+		model = ds.LlmModel
+	}
 	docReq := eta_llm_http.UploadTempDocsRequest{
 		ChunkOverlap:   "150",
 		ChunkSize:      "750",
@@ -189,7 +205,7 @@ func (ds *ETALLMClient) UploadFileToTemplate(files []*os.File, param map[string]
 	if err != nil {
 		return
 	}
-	resp, err := ds.DoFile(UPLOAD_TEMP_DOCS_API, body, files)
+	resp, err := ds.DoFile(fmt.Sprintf("%s%s", modelRouter[model], UPLOAD_TEMP_DOCS_API), body, files)
 	if !resp.Success {
 		err = errors.New(resp.Msg)
 		return

+ 1 - 1
utils/llm/llm_client.go

@@ -27,5 +27,5 @@ type LLMService interface {
 	DocumentChat(query string, KnowledgeId string, history []json.RawMessage, stream bool) (llmRes *http.Response, err error)
 	SearchKbDocs(query string, KnowledgeBaseName string) (data interface{}, err error)
 	UploadFileToTemplate(files []*os.File, param map[string]interface{}) (data interface{}, err error)
-	FileChat(query string, KnowledgeId string, history []json.RawMessage) (resp eta_llm_http.BaseResponse, err error)
+	FileChat(query string, KnowledgeId string, llmModel string, history []json.RawMessage) (resp eta_llm_http.BaseResponse, err error)
 }

Some files were not shown because too many files changed in this diff