|
@@ -11,6 +11,7 @@ import (
|
|
|
"fmt"
|
|
|
"io"
|
|
|
"net/http"
|
|
|
+ "strings"
|
|
|
"sync"
|
|
|
)
|
|
|
|
|
@@ -37,7 +38,7 @@ func GetInstance() llm.LLMService {
|
|
|
dsOnce.Do(func() {
|
|
|
if etaLlmClient == nil {
|
|
|
etaLlmClient = &ETALLMClient{
|
|
|
- LLMClient: llm.NewLLMClient(utils.LLM_SERVER, 10),
|
|
|
+ LLMClient: llm.NewLLMClient(utils.LLM_SERVER, 120),
|
|
|
LlmModel: utils.LLM_MODEL,
|
|
|
}
|
|
|
}
|
|
@@ -45,18 +46,15 @@ func GetInstance() llm.LLMService {
|
|
|
return etaLlmClient
|
|
|
}
|
|
|
|
|
|
-func (ds *ETALLMClient) KnowledgeBaseChat(query string, KnowledgeBaseName string, history []interface{}) (llmRes *http.Response, err error) {
|
|
|
+func (ds *ETALLMClient) KnowledgeBaseChat(query string, KnowledgeBaseName string, history []string) (llmRes *http.Response, err error) {
|
|
|
ChatHistory := make([]eta_llm_http.HistoryContent, 0)
|
|
|
- ChatHistory = append(ChatHistory, eta_llm_http.HistoryContent{
|
|
|
- Content: query,
|
|
|
- Role: "user",
|
|
|
- })
|
|
|
- for _, historyItem := range history {
|
|
|
- historyItemMap := historyItem.(map[string]interface{})
|
|
|
- ChatHistory = append(ChatHistory, eta_llm_http.HistoryContent{
|
|
|
- Content: historyItemMap["content"].(string),
|
|
|
- Role: historyItemMap["role"].(string),
|
|
|
- })
|
|
|
+ for _, historyItemStr := range history {
|
|
|
+ str := strings.Split(historyItemStr, "-")
|
|
|
+ historyItem := eta_llm_http.HistoryContent{
|
|
|
+ Role: str[0],
|
|
|
+ Content: str[1],
|
|
|
+ }
|
|
|
+ ChatHistory = append(ChatHistory, historyItem)
|
|
|
}
|
|
|
kbReq := eta_llm_http.KbChatRequest{
|
|
|
Query: query,
|
|
@@ -72,7 +70,7 @@ func (ds *ETALLMClient) KnowledgeBaseChat(query string, KnowledgeBaseName string
|
|
|
PromptName: DEFALUT_PROMPT_NAME,
|
|
|
ReturnDirect: false,
|
|
|
}
|
|
|
-
|
|
|
+ fmt.Printf("%v", kbReq.History)
|
|
|
body, err := json.Marshal(kbReq)
|
|
|
if err != nil {
|
|
|
return
|
|
@@ -149,10 +147,6 @@ func parseResponse(response *http.Response) (baseResp eta_llm_http.BaseResponse,
|
|
|
return
|
|
|
}
|
|
|
func ParseStreamResponse(response *http.Response) (contentChan chan string, errChan chan error, closeChan chan struct{}) {
|
|
|
- defer func() {
|
|
|
- _ = response.Body.Close()
|
|
|
-
|
|
|
- }()
|
|
|
contentChan = make(chan string, 10)
|
|
|
errChan = make(chan error, 10)
|
|
|
closeChan = make(chan struct{})
|
|
@@ -167,8 +161,17 @@ func ParseStreamResponse(response *http.Response) (contentChan chan string, errC
|
|
|
if line == "" {
|
|
|
continue
|
|
|
}
|
|
|
+ // 忽略 "ping" 行
|
|
|
+ if strings.HasPrefix(line, ": ping") {
|
|
|
+ continue
|
|
|
+ }
|
|
|
+ // 去除 "data: " 前缀
|
|
|
+ if strings.HasPrefix(line, "data: ") {
|
|
|
+ line = strings.TrimPrefix(line, "data: ")
|
|
|
+ }
|
|
|
var chunk eta_llm_http.ChunkResponse
|
|
|
if err := json.Unmarshal([]byte(line), &chunk); err != nil {
|
|
|
+ fmt.Println("解析错误的line:" + line)
|
|
|
errChan <- fmt.Errorf("解析 JSON 块失败: %w", err)
|
|
|
return
|
|
|
}
|