kobe6258 3 päivää sitten
vanhempi
commit
76dc820b11

+ 31 - 0
utils/llm/eta_llm/eta_llm_client.go

@@ -27,6 +27,7 @@ const (
 	DEFALUT_PROMPT_NAME            = "default"
 	CONTENT_TYPE_JSON              = "application/json"
 	KNOWLEDGE_BASE_CHAT_API        = "/chat/kb_chat"
+	DOCUMENT_CHAT_API        = "/chat/file_chat"
 	KNOWLEDGE_BASE_SEARCH_DOCS_API = "/knowledge_base/search_docs"
 )
 
@@ -34,6 +35,7 @@ type ETALLMClient struct {
 	*llm.LLMClient
 	LlmModel string
 }
+
 type LLMConfig struct {
 	LlmAddress string `json:"llm_server"`
 	LlmModel   string `json:"llm_model"`
@@ -62,6 +64,35 @@ func GetInstance() llm.LLMService {
 	return etaLlmClient
 }
 
+func (ds *ETALLMClient) DocumentChat(query string, KnowledgeId string, history []string, stream bool) (llmRes *http.Response, err error) {
+	ChatHistory := make([]eta_llm_http.HistoryContent, 0)
+	for _, historyItemStr := range history {
+		str := strings.Split(historyItemStr, "-")
+		historyItem := eta_llm_http.HistoryContent{
+			Role:    str[0],
+			Content: str[1],
+		}
+		ChatHistory = append(ChatHistory, historyItem)
+	}
+	kbReq := eta_llm_http.DocumentChatRequest{
+		Query:          query,
+		KnowledgeId:    KnowledgeId,
+		History:        ChatHistory,
+		TopK:           3,
+		ScoreThreshold: 0.5,
+		Stream:         stream,
+		ModelName:      ds.LlmModel,
+		Temperature:    0.7,
+		MaxTokens:      0,
+		PromptName:     DEFALUT_PROMPT_NAME,
+	}
+	fmt.Printf("%v", kbReq.History)
+	body, err := json.Marshal(kbReq)
+	if err != nil {
+		return
+	}
+	return ds.DoStreamPost(DOCUMENT_CHAT_API, body)
+}
 func (ds *ETALLMClient) KnowledgeBaseChat(query string, KnowledgeBaseName string, history []string) (llmRes *http.Response, err error) {
 	ChatHistory := make([]eta_llm_http.HistoryContent, 0)
 	for _, historyItemStr := range history {

+ 12 - 1
utils/llm/eta_llm/eta_llm_http/request.go

@@ -14,7 +14,18 @@ type KbChatRequest struct {
 	PromptName     string           `json:"prompt_name"`
 	ReturnDirect   bool             `json:"return_direct"`
 }
-
+type DocumentChatRequest struct {
+	Query          string           `json:"query"`
+	KnowledgeId    string           `json:"knowledge_id"`
+	TopK           int              `json:"top_k"`
+	ScoreThreshold float32          `json:"score_threshold"`
+	History        []HistoryContent `json:"history"`
+	Stream         bool             `json:"stream"`
+	ModelName      string           `json:"model_name"`
+	Temperature    float32          `json:"temperature"`
+	MaxTokens      int              `json:"max_tokens"`
+	PromptName     string           `json:"prompt_name"`
+}
 type HistoryContent struct {
 	Content string `json:"content"`
 	Role    string `json:"role"`

+ 3 - 0
utils/llm/llm_client.go

@@ -21,5 +21,8 @@ func NewLLMClient(baseURL string, timeout time.Duration) *LLMClient {
 
 type LLMService interface {
 	KnowledgeBaseChat(query string, KnowledgeBaseName string, history []string) (llmRes *http.Response, err error)
+	DocumentChat(query string, KnowledgeId string, history []string, stream bool) (llmRes *http.Response, err error)
 	SearchKbDocs(query string, KnowledgeBaseName string) (data interface{}, err error)
+
+
 }

+ 0 - 2
utils/lock/distrubtLock.go

@@ -34,7 +34,6 @@ func AcquireLock(key string, expiration int, Holder string) bool {
 	if result == 1 {
 		return true
 	}
-	fmt.Printf("加锁失败:")
 	return false
 }
 
@@ -55,6 +54,5 @@ func ReleaseLock(key string, holder string) bool {
 	if result == 1 {
 		return true
 	}
-	fmt.Printf("解锁失败:")
 	return false
 }