package llm

import (
	"encoding/json"
	"eta/eta_api/utils/llm/eta_llm/eta_llm_http"
	"net/http"
	"os"
	"time"
)

type LLMClient struct {
	BaseURL    string
	HttpClient *http.Client
}

func NewLLMClient(baseURL string, timeout time.Duration) *LLMClient {
	return &LLMClient{
		BaseURL: baseURL,
		HttpClient: &http.Client{
			Timeout: timeout * time.Second,
		},
	}
}

type LLMService interface {
	KnowledgeBaseChat(query string, KnowledgeBaseName string, history []json.RawMessage) (llmRes *http.Response, err error)
	DocumentChat(query string, KnowledgeId string, history []json.RawMessage, stream bool) (llmRes *http.Response, err error)
	SearchKbDocs(query string, KnowledgeBaseName string) (data interface{}, err error)
	UploadFileToTemplate(files []*os.File, param map[string]interface{}) (data interface{}, err error)
	FileChat(query string, KnowledgeId string, llmModel string, history []json.RawMessage) (resp eta_llm_http.BaseResponse, err error)
}