Browse Source

合并冲突

kobe6258 1 week ago
parent
commit
f602086239

+ 140 - 0
controllers/kpler.go

@@ -0,0 +1,140 @@
+package controllers
+
+import (
+	"encoding/json"
+	"eta/eta_data_analysis/models"
+	"eta/eta_data_analysis/services/kpler"
+	"fmt"
+
+	"github.com/beego/beego/v2/server/web"
+)
+
+type KplerController struct {
+	web.Controller
+}
+
+// 获取开普勒数据
+// @Title GetKplerData
+// @Description 获取开普勒数据
+// @Param body body models.KplerFlowDataLibReq true "请求参数"
+// @Success 200 {object} models.BaseResponse
+// @router /getFlowData [post]
+func (this *KplerController) GetFlowData() {
+    //获取入参
+	br := new(models.BaseResponse).Init()
+	defer func() {
+		if br.ErrMsg == "" {
+			br.IsSendEmail = false
+		}
+		this.Data["json"] = br
+		this.ServeJSON()
+	}()
+	var params models.KplerFlowDataLibReq
+	if e := json.Unmarshal(this.Ctx.Input.RequestBody, &params); e != nil {
+		br.Msg = "参数解析异常"
+		br.ErrMsg = fmt.Sprintf("参数解析失败, %v", e)
+		return
+	}
+	
+	if params.Granularity == "" {
+		br.Msg = "请选择频度"
+		return
+	}
+	if params.Split == "" {
+		br.Msg = "请选择拆分类型"
+		return
+	}
+	if params.Unit == "" {
+		br.Msg = "请选择单位"
+		return
+	}
+	if params.FlowDirection == "" {
+		br.Msg = "请选择流向"
+		return
+	}
+	
+	data, err := kpler.GetKplerData(params)
+	if err != nil {
+		br.Msg = "获取数据失败"
+		br.ErrMsg = fmt.Sprintf("获取数据失败, %v", err)
+		return
+	}
+	br.Data = data
+	br.Ret = 200
+	br.Success = true
+	br.Msg = "获取成功"
+}
+
+// 获取产品数据
+// @Title GetProductData
+// @Description 获取产品数据
+// @Param body body models.KplerProductDataLibReq true "请求参数"
+// @Success 200 {object} models.BaseResponse
+// @router /getProductData [post]
+func (this *KplerController) GetProductData() {
+	 //获取入参
+	br := new(models.BaseResponse).Init()
+	defer func() {
+		if br.ErrMsg == "" {
+			br.IsSendEmail = false
+		}
+		this.Data["json"] = br
+		this.ServeJSON()
+	}()
+	var params models.KplerProductLibReq
+	if e := json.Unmarshal(this.Ctx.Input.RequestBody, &params); e != nil {
+		br.Msg = "参数解析异常"
+		br.ErrMsg = fmt.Sprintf("参数解析失败, %v", e)
+		return
+	}
+	data, err := kpler.GetProducts(params)
+	if err != nil {
+		br.Msg = "获取数据失败"
+		br.ErrMsg = fmt.Sprintf("获取数据失败, %v", err)
+		return
+	}
+	br.Data = data
+	br.Ret = 200
+	br.Success = true
+	br.Msg = "获取成功"
+}
+
+// 获取区域数据
+// @Title GetZoneData
+// @Description 获取区域数据
+// @Param body body models.KplerZoneDataLibReq true "请求参数"
+// @Success 200 {object} models.BaseResponse
+// @router /getZoneData [post]
+func (this *KplerController) GetZoneData() {
+	//获取入参
+	br := new(models.BaseResponse).Init()
+	defer func() {
+		if br.ErrMsg == "" {
+			br.IsSendEmail = false
+		}
+		this.Data["json"] = br
+		this.ServeJSON()
+	}()
+	var params models.KplerZoneDataLibReq
+	if e := json.Unmarshal(this.Ctx.Input.RequestBody, &params); e != nil {
+		//br.Msg = "参数解析异常"
+		// br.ErrMsg = fmt.Sprintf("参数解析失败, %v", e)
+		// return
+	}
+	token, err := kpler.GetKplerAccessToken(false)
+	if err != nil {
+		br.Msg = "获取token失败"
+		br.ErrMsg = fmt.Sprintf("获取token失败, %v", err)
+		return
+	}
+	data, err := kpler.GetZonesByApi(token, params.AncestorName, params.DescendantType)
+	if err != nil {
+		br.Msg = "获取数据失败"
+		br.ErrMsg = fmt.Sprintf("获取数据失败, %v", err)
+		return
+	}
+	br.Data = data
+	br.Ret = 200
+	br.Success = true
+	br.Msg = "获取成功"
+}

+ 126 - 0
models/base_from_kpler.go

@@ -0,0 +1,126 @@
+package models
+
+type BaseFromKpler struct {
+	Date          string
+	Value         float64
+	PeriodEndDate string
+}
+
+  // withIntraCountry := false
+  // withIntraRegion := true
+  // withForecast := true
+  // withFreightView := false
+  // withProductEstimation := false
+  type KplerFlowDataLibReq struct {
+	Products string `description:"产品名称"`
+	FromZones string `description:"来源区域ID,对应Location" ` 
+	ToZones string `description:"流向区域ID"`
+	Split string `description:"拆分类型"`
+	FlowDirection string `description:"流向,对应periodicity:export/import"`
+	Granularity string `description:"粒度: daily/weekly/monthly/yearly"`
+	Unit string `description:"单位"`
+	FromCountries string `description:"来源国家"`
+	ToCountries string `description:"流向国家"`
+	FromInstallations string `description:"来源安装"`
+	ToInstallations string `description:"流向安装"`
+	OnlyRealized string `description:"是否只查询已实现数据"`
+	VesselTypes string `description:"船型"`
+	VesselTypesAlt string `description:"船型"`
+    WithIntraCountry string `description:"是否查询同国数据"`
+    WithIntraRegion string `description:"是否查询同区域数据"`
+    WithForecast string `description:"是否查询预测数据"`
+    WithFreightView string `description:"是否查询运费数据"`
+    WithProductEstimation string `description:"是否查询产品估算数据"`
+    StartDate string `description:"开始日期"`
+    EndDate string `description:"结束日期"`
+}
+
+type KplerFlowDataResp struct {
+	ApiQueryUrl string
+	List []KplerFlowData 
+}
+type KplerFlowData struct {
+	SplitItem string 
+	IndexData []KplerBaseExcelData 
+}
+
+type KplerBaseExcelData struct {
+	DataTime string
+	Value    string
+}
+
+//bodystr := "Id (Product);Name;Type (Product);Family;Family Id;Group;Group Id;Product;Product Id;Grade;Grade Id;Density (Product);Density Unit;Energy Density;Energy Density Unit;Expansion Ratio
+type KplerProduct struct {
+	Id string
+	Name string
+	Type string
+	Family string
+	FamilyId string
+	Group string
+	GroupId string
+	Product string
+	ProductId string
+	Grade string
+	GradeId string
+	Density string
+	DensityUnit string
+	EnergyDensity string
+	EnergyDensityUnit string
+	ExpansionRatio string
+}
+
+// Ancestor Id;Ancestor Name;Ancestor Type;Descendant Id;Descendant Name;Descendant Type
+type KplerZone struct {
+	AncestorId string
+	AncestorName string
+	AncestorType string
+	DescendantId string
+	DescendantName string
+	DescendantType string
+}
+
+type KplerZoneDataLibReq struct {
+	Token string `description:"token"`
+	AncestorName string `description:"祖先名称"`
+	DescendantType string `description:"子类型"`
+}
+
+// HandleKplerExcelData Kpler的excel数据
+type HandleKplerExcelData struct {
+	ClassifyName       string `description:"指标目录"`
+	ParentClassifyName string `description:"父级指标目录"`
+	ClassifySort       int    `description:"指标目录排序号"`
+	IndexName          string `description:"指标名称"`
+	IndexCode          string `description:"指标编码"`
+	Unit               string `description:"单位"`
+	Sort               int    `description:"排序号"`
+	Frequency          string `description:"频度"`
+	ProductNames       string `description:"产品名称"`
+	FromZoneNames      string `description:"区域名称"`
+	ToZoneNames        string `description:"区域名称"`
+	FlowDirection      string `description:"流向"`
+	Granularity        string `description:"粒度"`
+	Split              string `description:"拆分类型"`
+	SplitName          string `description:"拆分类型名称"`
+	ExcelQueryUrl      string `description:"Excel查询URL"`
+	ExcelDataMap       map[string]string
+}
+
+type HandleKplerExcelDataReq struct {
+	List         []*HandleKplerExcelData
+	TerminalCode string `description:"编码"`
+}
+
+type KplerProductLibReq struct {
+	AncestorFamilyIds string `description:"祖先家族ID"`
+	AncestorFamilyNames string `description:"祖先家族名称"`
+	AncestorGroupIds string `description:"祖先组ID"`
+	AncestorGroupNames string `description:"祖先组名称"`
+	AncestorProductIds string `description:"祖先产品ID"`
+	AncestorProductNames string `description:"祖先产品名称"`
+	AncestorGradeIds string `description:"祖先等级ID"`
+	AncestorGradeNames string `description:"祖先等级名称"`
+	Products string `description:"产品"`
+	ProductIds string `description:"产品ID"`
+}
+	

+ 74 - 0
models/kpler_request.go

@@ -0,0 +1,74 @@
+package models
+
+// KplerLocation represents a location with ID and name
+type KplerExcelLocation struct {
+	ID   string `json:"id"`
+	Name string `json:"name"`
+}
+
+// KplerProduct represents a product with ID and name
+type KplerExcelProduct struct {
+	ID   string `json:"id"`
+	Name string `json:"name"`
+}
+
+// KplerFlowsRequest represents the request structure for Kpler flows API
+type KplerFlowsRequest struct {
+	Platform            string          `json:"platform"`
+	Origins            []KplerExcelLocation `json:"origins"`
+	Destinations       []KplerExcelLocation `json:"destinations"`
+	FromInstallations  []KplerExcelLocation `json:"fromInstallations"`
+	ToInstallations    []KplerExcelLocation `json:"toInstallations"`
+	FlowDirection      string          `json:"flowDirection"`
+	Products           []KplerExcelProduct  `json:"products"`
+	Unit               string          `json:"unit"`
+	IsProductEstimation bool           `json:"isProductEstimation"`
+	IsIntracountry     bool           `json:"isIntracountry"`
+	IsIntraRegion      bool           `json:"isIntraRegion"`
+	IsWithForecast     bool           `json:"isWithForecast"`
+	Granularity        string         `json:"granularity"`
+	VesselClassification string       `json:"vesselClassification"`
+	VesselsTypes        []string      `json:"vesselsTypes"`
+	Split               string        `json:"split"`
+	IsFreightView       bool          `json:"isFreightView"`
+	IsWithPeriodEndTime bool          `json:"isWithPeriodEndTime"`
+	Projection          string        `json:"projection"`
+	SelectedPreset      string        `json:"selectedPreset"`
+	StartDate           *string       `json:"startDate"`
+	EndDate             *string       `json:"endDate"`
+}
+
+// ZoneInfo represents a geographical zone in Kpler API
+type ZoneInfo struct {
+	ID   string `json:"id"`
+	Name string `json:"name"`
+}
+
+// ProductInfo represents a product in Kpler API
+type ProductInfo struct {
+	ID   string `json:"id"`
+	Name string `json:"name"`
+}
+
+// KplerDataPoint represents a single data point with end date and value
+type KplerDataPoint struct {
+	EndDate string  `json:"end_date"`
+	Value   string `json:"val"`
+}
+
+// KplerExcelIndexData represents a single index data from Excel
+type KplerExcelIndexData struct {
+	Title      string            
+	Name       string            
+	DataPoints []KplerDataPoint  
+	Request    string        
+}
+
+// KplerFlowsExcelRequest is an alias for KplerFlowsRequest
+type KplerFlowsExcelRequest = KplerFlowsRequest
+
+// Process data rows
+type DataPoint struct {
+	Value string
+	Row   int
+}

+ 25 - 0
routers/commentsRouter.go

@@ -7,10 +7,35 @@ import (
 
 
 func init() {
 func init() {
 
 
+<<<<<<< HEAD
     beego.GlobalControllerRouter["eta/eta_data_analysis/controllers:PdfController"] = append(beego.GlobalControllerRouter["eta/eta_data_analysis/controllers:PdfController"],
     beego.GlobalControllerRouter["eta/eta_data_analysis/controllers:PdfController"] = append(beego.GlobalControllerRouter["eta/eta_data_analysis/controllers:PdfController"],
         beego.ControllerComments{
         beego.ControllerComments{
             Method: "GeneratePdf",
             Method: "GeneratePdf",
             Router: `/generate_pdf`,
             Router: `/generate_pdf`,
+=======
+    beego.GlobalControllerRouter["eta/eta_data_analysis/controllers:KplerController"] = append(beego.GlobalControllerRouter["eta/eta_data_analysis/controllers:KplerController"],
+        beego.ControllerComments{
+            Method: "GetFlowData",
+            Router: `/getFlowData`,
+            AllowHTTPMethods: []string{"post"},
+            MethodParams: param.Make(),
+            Filters: nil,
+            Params: nil})
+
+    beego.GlobalControllerRouter["eta/eta_data_analysis/controllers:KplerController"] = append(beego.GlobalControllerRouter["eta/eta_data_analysis/controllers:KplerController"],
+        beego.ControllerComments{
+            Method: "GetProductData",
+            Router: `/getProductData`,
+            AllowHTTPMethods: []string{"post"},
+            MethodParams: param.Make(),
+            Filters: nil,
+            Params: nil})
+
+    beego.GlobalControllerRouter["eta/eta_data_analysis/controllers:KplerController"] = append(beego.GlobalControllerRouter["eta/eta_data_analysis/controllers:KplerController"],
+        beego.ControllerComments{
+            Method: "GetZoneData",
+            Router: `/getZoneData`,
+>>>>>>> master
             AllowHTTPMethods: []string{"post"},
             AllowHTTPMethods: []string{"post"},
             MethodParams: param.Make(),
             MethodParams: param.Make(),
             Filters: nil,
             Filters: nil,

+ 6 - 0
routers/router.go

@@ -24,9 +24,15 @@ func init() {
 				&controllers.RzdController{},
 				&controllers.RzdController{},
 			),
 			),
 		),
 		),
+<<<<<<< HEAD
 		web.NSNamespace("/pdf",
 		web.NSNamespace("/pdf",
 			web.NSInclude(
 			web.NSInclude(
 				&controllers.PdfController{},
 				&controllers.PdfController{},
+=======
+		web.NSNamespace("/kpler",
+			web.NSInclude(
+				&controllers.KplerController{},
+>>>>>>> master
 			),
 			),
 		),
 		),
 	)
 	)

+ 199 - 0
services/base_from_kpler.go

@@ -0,0 +1,199 @@
+package services
+
+import (
+	"context"
+	"encoding/json"
+	"eta/eta_data_analysis/models"
+	"eta/eta_data_analysis/services/kpler"
+	"eta/eta_data_analysis/utils"
+	"fmt"
+	"io/fs"
+	"os"
+	"path/filepath"
+	"strings"
+	"syscall"
+	"time"
+
+	"github.com/patrickmn/go-cache"
+)
+
+func KplerExcelDataWatch(cont context.Context) (err error) {
+	fmt.Println("kplerExcelWatch start")
+	utils.FileLog.Info("kplerExcelWatch start")
+	defer func() {
+		if err != nil {
+			fmt.Println("kplerExcelDataWatch Err:" + err.Error())
+			utils.FileLog.Info(fmt.Sprintf("kplerExcelDataWatch, Err: %s", err))
+		}
+	}()
+	cacheClient := utils.CacheClient
+	if cacheClient == nil {
+		 utils.CacheClient = cache.New(365*24*time.Hour, 365*24*time.Hour)
+	}
+	err = filepath.Walk(utils.KplerExcelFilePath, func(path string, info fs.FileInfo, err error) error {
+		if err != nil {
+			return err
+		}
+		if !info.IsDir() {
+			fileInfo, e := os.Stat(path)
+			if e != nil {
+				err = e
+				fmt.Println("os.Stat:", err.Error())
+				utils.FileLog.Info(fmt.Sprintf("os.Stat, Err: %s", err))
+				return err
+			}
+			winFileAttr := fileInfo.Sys().(*syscall.Win32FileAttributeData)
+			modifyTimeStr := utils.SecondToTime(winFileAttr.LastWriteTime.Nanoseconds() / 1e9).Format(utils.FormatDateTime)
+			fmt.Println("文件的修改时间modifyTimeStr:", modifyTimeStr)
+			existModifyTime, ok := cacheClient.Get(path)
+			fmt.Println("缓存里的时间existModifyTime:", existModifyTime)
+			if ok {
+				existModifyTimeStr := existModifyTime.(string)
+				if existModifyTimeStr != modifyTimeStr {
+					 err = GetKplerDataByExcel(path)
+				}
+			} else {
+				 err = GetKplerDataByExcel(path)
+			}
+			cacheClient.Delete(path)
+			cacheClient.Set(path, modifyTimeStr, 24*time.Hour)
+		}
+		return nil
+	})
+	return
+}
+
+// Main function for standalone testing
+func GetKplerDataByExcel(filePath string) (err error) {
+	//filePath = "services/kpler/crude.xlsx"
+	fmt.Println("Starting Kpler data processing...")
+
+	// Process the Excel data
+	indexData, err :=kpler.ProcessKplerData(filePath)
+	if err != nil {
+		fmt.Printf("Error processing Excel data: %v\n", err)
+		return
+	}
+    indexList := make([]*models.HandleKplerExcelData, 0)
+	// Print the processed data
+	for k, index := range indexData {
+		// 解析请求参数
+		if index.Request != "" {
+			flowsRequestItem, err := kpler.ParseSpecificKplerFormulaV2(index.Request)
+			if err != nil {
+				fmt.Printf("Error parsing formula: %v\n", err)
+				continue
+			}
+			
+			indexName := fmt.Sprintf("%s_%s", index.Title, index.Name)
+			unit := flowsRequestItem.Unit
+			sort := k
+			classifyName := ""
+			productNameSlice := flowsRequestItem.Products
+			productNames := ""
+			if len(productNameSlice) > 0 {
+				for _, productName := range productNameSlice {
+					if classifyName == "" {
+						classifyName = productName.Name
+					}
+					productNames += productName.Name + ","
+				}
+			}
+			
+			productNames = strings.TrimSuffix(productNames, ",")
+			fromZoneNameSlice := flowsRequestItem.Origins
+			fromZoneNames := ""
+			if len(fromZoneNameSlice) > 0 {
+				for _, fromZoneName := range fromZoneNameSlice {
+					fromZoneNames += fromZoneName.Name + ","
+				}
+			}
+			fromZoneNames = strings.TrimSuffix(fromZoneNames, ",")
+			toZoneNames := ""
+			toZoneNameSlice := flowsRequestItem.Destinations
+			if len(toZoneNameSlice) > 0 {
+				for _, toZoneName := range toZoneNameSlice {
+					toZoneNames += toZoneName.Name + ","
+				}
+			}
+			toZoneNames = strings.TrimSuffix(toZoneNames, ",")
+			flowDirection := flowsRequestItem.FlowDirection
+			granularity := flowsRequestItem.Granularity
+			split := flowsRequestItem.Split
+			excelDataMap := make(map[string]string)
+            if len(index.DataPoints) > 0 {
+                for _, dataPoint := range index.DataPoints {
+                    excelDataMap[dataPoint.EndDate] = dataPoint.Value
+                }
+            }
+			tmp := models.HandleKplerExcelData{
+				IndexName: indexName,
+				Unit: unit,
+				Sort: sort,
+				ClassifyName: classifyName,
+				ProductNames: productNames,
+				FromZoneNames: fromZoneNames,
+				ToZoneNames: toZoneNames,
+				FlowDirection: flowDirection,
+				Granularity: granularity,
+				Split: split,
+				SplitName: index.Name,
+				ExcelQueryUrl: index.Request,
+				ExcelDataMap: excelDataMap,
+			}
+			indexList = append(indexList, &tmp)
+		}
+	}
+
+	if len(indexList) > 0 {
+		params := make(map[string]interface{})
+		params["List"] = indexList
+		params["TerminalCode"] = ""
+		result, e := PostEdbLib(params, utils.LIB_ROUTE_KPLER_DATA)
+		if e != nil {
+			b, _ := json.Marshal(params)
+			utils.FileLog.Info(fmt.Sprintf("sheet :GetKplerDataByExcel PostEdbLib err: %s, params: %s", e.Error(), string(b)))
+			return
+		}
+		resp := new(models.BaseEdbLibResponse)
+		if e := json.Unmarshal(result, &resp); e != nil {
+			utils.FileLog.Info(fmt.Sprintf("sheet :GetKplerDataByExcel json.Unmarshal err: %s", e))
+			return
+		}
+		if resp.Ret != 200 {
+			utils.FileLog.Info(fmt.Sprintf("sheet :GetKplerDataByExcel Msg: %s, ErrMsg: %s", resp.Msg, resp.ErrMsg))
+			return
+		}
+	}
+	// 传递list给指标服务
+
+	fmt.Println("GetKplerDataByExcel completed successfully!")
+	return
+}
+
+// 定时调用python脚本刷新kpler
+func RefreshKplerByExcel(cont context.Context) (err error) {
+	if utils.KplerRefreshUrl == "" {
+		return
+	}
+    //查询utils.KplerExcelFilePath目录下所有excel文件
+	files, err := filepath.Glob(utils.KplerExcelFilePath + "/*.xlsx")
+	if err != nil {
+		utils.FileLog.Info("RefreshKplerByExcel Err:" + err.Error())
+		return
+	}
+	for _, file := range files {
+		fmt.Println("RefreshKplerByExcel file:" + file)
+		kplerRefreshUrl := fmt.Sprintf("%s/kpler/refresh?FilePath=%s", utils.KplerRefreshUrl, file)
+		body, er := HttpGet(kplerRefreshUrl)
+		if er != nil {
+			utils.FileLog.Info("RefreshKplerByExcel Err:" + er.Error())
+			return
+		}
+		utils.FileLog.Info("RefreshKplerByExcel Result:" + string(body))
+	}
+	
+	return
+}
+
+

BIN
services/kpler/Kpler crude flow (自动保存的).xlsx


+ 383 - 0
services/kpler/excel.go

@@ -0,0 +1,383 @@
+package kpler
+
+import (
+	"encoding/json"
+	"eta/eta_data_analysis/models"
+	"eta/eta_data_analysis/utils"
+	"fmt"
+	"net/url"
+	"regexp"
+	"strings"
+	"time"
+
+	"github.com/xuri/excelize/v2"
+)
+
+// ExcelData represents structured data extracted from an Excel file
+type ExcelData struct {
+	Headers     []string
+	Rows        [][]string
+	SheetName   string
+}
+
+// ParseExcel reads and parses data from an Excel file
+func parseExcel(filePath string) (*ExcelData, error) {
+	// Open the Excel file
+	f, err := excelize.OpenFile(filePath)
+	if err != nil {
+		return nil, fmt.Errorf("error opening Excel file: %w", err)
+	}
+	defer f.Close()
+
+	// Get the first sheet by default
+	sheetName := f.GetSheetList()[0]
+	
+	// Get all rows from the sheet
+	rows, err := f.GetRows(sheetName)
+	if err != nil {
+		return nil, fmt.Errorf("error reading rows from sheet %s: %w", sheetName, err)
+	}
+
+	// Check if there's data
+	if len(rows) == 0 {
+		return nil, fmt.Errorf("no data found in sheet %s", sheetName)
+	}
+
+	// Create structured data
+	excelData := &ExcelData{
+		SheetName:    sheetName,
+		Headers:      rows[0],
+		Rows:         rows[1:],
+	}
+
+	return excelData, nil
+}
+
+// ScanSheetForFormulas scans an entire sheet for formulas
+func scanSheetForFormulas(filePath, sheetName string) (map[int]string, error) {
+	// Open the Excel file
+	f, err := excelize.OpenFile(filePath)
+	if err != nil {
+		return nil, fmt.Errorf("error opening Excel file: %w", err)
+	}
+	defer f.Close()
+
+	formulas := make(map[int]string)
+	
+	// Get sheet dimensions
+	dimension, err := f.GetSheetDimension(sheetName)
+	if err != nil {
+		return nil, fmt.Errorf("error getting sheet dimension: %w", err)
+	}
+	
+	// Parse dimension to get the range (e.g., "A1:K42")
+	parts := strings.Split(dimension, ":")
+	if len(parts) != 2 {
+		// Use a default range if dimension is not in expected format
+		parts = []string{"A1", "Z100"}
+	}
+	
+	// Extract the column letters and row numbers
+	startCol, startRow, err := excelize.CellNameToCoordinates(parts[0])
+	if err != nil {
+		startCol, startRow = 1, 1
+	}
+	
+	endCol, endRow, err := excelize.CellNameToCoordinates(parts[1])
+	if err != nil {
+		endCol, endRow = 26, 100 // Default to Z100
+	}
+	
+	// Scan cells for formulas
+	for row := startRow; row <= endRow; row++ {
+		for col := startCol; col <= endCol; col++ {
+			colName, err := excelize.ColumnNumberToName(col)
+			if err != nil {
+				continue
+			}
+			
+			cellCoord := fmt.Sprintf("%s%d", colName, row)
+			formula, err := f.GetCellFormula(sheetName, cellCoord)
+			
+			if err == nil && formula != "" {
+				// Store all formulas or only Kpler-related ones (adjust as needed)
+				//if strings.Contains(formula, "kpler") || strings.Contains(formula, "GetFlows") {
+				if strings.Contains(formula, "GETFLOWS") {
+					// fmt.Println("row: ", row)
+					// fmt.Println("col: ", col)
+					// fmt.Println("GetCellFormula: ", formula)
+					if _, ok := formulas[col-1]; !ok {
+						formulas[col-1] = formula
+					}
+				}
+			}
+		}
+	}
+	
+	return formulas, nil
+}
+
+// ProcessKplerData 解析excel获取指标对应的公式和数据
+func ProcessKplerData(filePath string) (indexData []models.KplerExcelIndexData, err error) {
+	defer func() {
+		if err != nil {
+			utils.FileLog.Info(fmt.Sprintf("ProcessKplerData error: %v\n", err))
+		}
+	}()
+
+	// Open the Excel file
+	f, err := excelize.OpenFile(filePath)
+	if err != nil {
+		return nil, fmt.Errorf("error opening Excel file: %w", err)
+	}
+	defer f.Close()
+
+	// Get the first sheet by default
+	data, err := parseExcel(filePath)
+	if err != nil {
+		return nil, fmt.Errorf("error parsing Excel file: %w", err)
+	}
+
+	// Look for Kpler formulas
+	formulas, err := scanSheetForFormulas(filePath, data.SheetName)
+	if err != nil {
+		return nil, fmt.Errorf("error scanning for formulas: %v", err)
+	}
+
+	fmt.Println("Found formulas:", formulas)
+
+	// Initialize maps to store column information
+	indexMap := make(map[int]*models.KplerExcelIndexData)    // Maps column to index data
+	dateColMap := make(map[int][]int)                        // Maps date column to its data columns
+	dateValues := make(map[int][]string)                     // Maps date column to its values
+
+	// First pass: identify data columns and their corresponding date columns
+	// Headers are in the third row (index 2)
+	if len(data.Rows) < 3 {
+		return nil, fmt.Errorf("Excel file does not have enough rows")
+	}
+    titles := data.Headers
+	titleMap := make(map[int]string)
+	for j, title := range titles {
+		titleMap[j] = title
+	}
+	headers := data.Rows[1] // Get headers from the second row
+	fmt.Println("Headers:", headers)
+
+	// First pass: find all date columns
+	var dateCols []int
+	for j, header := range headers {
+		if header == "Period End Date" {
+			dateCols = append(dateCols, j)
+			dateValues[j] = make([]string, 0)
+			dateColMap[j] = make([]int, 0)
+		}
+	}
+	fmt.Println("Date columns:", dateCols)
+
+	// Second pass: associate data columns with their nearest date column
+	for j, header := range headers {
+		if header == "" || header == "Date" || header == "Period End Date" {
+			continue
+		}
+
+		// Find the nearest date column after this data column
+		nearestDateCol := -1
+		for _, dateCol := range dateCols {
+			if dateCol > j {
+				nearestDateCol = dateCol
+				break
+			}
+		}
+
+		if nearestDateCol != -1 {
+			// This is a data column
+			indexMap[j] = &models.KplerExcelIndexData{
+				Name:       header,
+				DataPoints: make([]models.KplerDataPoint, 0),
+			}
+
+			// Associate this data column with its date column
+			dateColMap[nearestDateCol] = append(dateColMap[nearestDateCol], j)
+
+			// Process formula for this column if it exists
+			if formula, ok := formulas[j]; ok {
+				indexMap[j].Request = formula
+			} else {
+				// Look backwards for the formula
+				for k := j; k >= 0; k-- {
+					if formula, ok := formulas[k]; ok {
+						indexMap[j].Request = formula
+						break
+					}
+				}
+			}
+
+			// 获取标题
+			
+			title, ok := titleMap[j]
+			if ok && title != "" {
+				indexMap[j].Title = title
+			}else{
+				// Look backwards for the formula
+				for k := j; k >= 0; k-- {
+					title, ok := titleMap[k]
+					if ok && title != "" {
+						indexMap[j].Title = title
+						break
+					}
+				}
+			}
+		}
+	}
+
+	fmt.Println("Data columns mapping:", dateColMap)
+
+	// Create a map to store data values for each column
+	dataValues := make(map[int][]string)
+	for j := range indexMap {
+		dataValues[j] = make([]string, 0)
+	}
+
+	// First pass: collect all values
+	for i := 2; i < len(data.Rows); i++ {
+		row := data.Rows[i]
+		if len(row) == 0 {
+			continue
+		}
+
+		for j, cell := range row {
+			if cell == "" {
+				continue
+			}
+
+			// If this is a date column, store its values
+			if _, exists := dateValues[j]; exists {
+				// 对日期进行格式化
+				dateFormat := "01-02-06"
+				date, err := time.Parse(dateFormat, cell)
+				if err != nil {
+					fmt.Println("Error parsing date:", err)
+					continue
+				}
+				dateValues[j] = append(dateValues[j], date.Format(utils.FormatDate))
+				continue
+			}
+
+			// If this is a data column, store its values
+			if _, exists := indexMap[j]; exists {
+				dataValues[j] = append(dataValues[j], cell)
+			}
+		}
+	}
+
+	fmt.Println("Date values:", dateValues)
+	fmt.Println("Data values:", dataValues)
+
+	// Second pass: combine data and dates
+	for dateCol, dataCols := range dateColMap {
+		dates := dateValues[dateCol]
+		if len(dates) == 0 {
+			fmt.Printf("No dates found for date column %d\n", dateCol)
+			continue
+		}
+
+		fmt.Printf("Processing date column %d with data columns %v\n", dateCol, dataCols)
+
+		// Process each data column associated with this date column
+		for _, dataCol := range dataCols {
+			if idx, exists := indexMap[dataCol]; exists {
+				values := dataValues[dataCol]
+				
+				fmt.Printf("Column %d (%s): %d dates, %d values\n", dataCol, idx.Name, len(dates), len(values))
+
+				// Use the shorter length to avoid index out of range
+				length := len(dates)
+				if len(values) < length {
+					length = len(values)
+				}
+
+				// Combine data and dates
+				for i := 0; i < length; i++ {
+					idx.DataPoints = append(idx.DataPoints, models.KplerDataPoint{
+						EndDate: dates[i],
+						Value:   values[i],
+					})
+				}
+
+				fmt.Printf("Added %d data points for column %s\n", length, idx.Name)
+			}
+		}
+	}
+
+	// Convert map to slice
+	for _, index := range indexMap {
+		if len(index.DataPoints) > 0 {
+			indexData = append(indexData, *index)
+		}
+	}
+
+	return indexData, nil
+}
+
+
+func ParseSpecificKplerFormulaV2(specificFormula string) (reqObj models.KplerFlowsRequest, err error) {
+	// Remove Excel string concatenation
+	specificFormula = strings.ReplaceAll(specificFormula, `" & "`, "")
+	specificFormula = strings.ReplaceAll(specificFormula, `"&"`, "")
+	specificFormula = strings.ReplaceAll(specificFormula, `&amp;`, "")
+	specificFormula = strings.ReplaceAll(specificFormula, `\"`, `"`)
+
+	// Get content inside parentheses
+	re := regexp.MustCompile(`_xldudf_KPLER_GETFLOWS\((.*)\)`)
+	matches := re.FindStringSubmatch(specificFormula)
+	if len(matches) < 2 {
+		// Try the old format
+		re = regexp.MustCompile(`\((.*)\)`)
+		matches = re.FindStringSubmatch(specificFormula)
+		if len(matches) < 2 {
+			err = fmt.Errorf("没有找到括号里的内容")
+			return
+		}
+	}
+
+	// Get the parameter string
+	encodedParam := matches[1]
+	// Remove surrounding quotes if present
+	encodedParam = strings.Trim(encodedParam, `"`)
+	
+	// Try direct JSON parsing first
+	var jsonObj models.KplerFlowsRequest
+	if err = json.Unmarshal([]byte(encodedParam), &jsonObj); err == nil {
+		return jsonObj, nil
+	}
+
+	// If direct parsing fails, try URL decoding
+	decodedStr, err := url.QueryUnescape(encodedParam)
+	if err != nil {
+		// If URL decoding fails, try removing escapes and parse again
+		cleanStr := strings.ReplaceAll(encodedParam, `\`, "")
+		if err = json.Unmarshal([]byte(cleanStr), &jsonObj); err != nil {
+			// Try one more time with manual concatenation cleanup
+			cleanStr = strings.ReplaceAll(cleanStr, `" "`, "")
+			if err = json.Unmarshal([]byte(cleanStr), &jsonObj); err != nil {
+				return reqObj, fmt.Errorf("error parsing formula: %v", err)
+			}
+		}
+		return jsonObj, nil
+	}
+
+	// Remove surrounding quotes if present in decoded string
+	decodedStr = strings.Trim(decodedStr, `"`)
+	
+	// Try parsing the decoded string
+	if err = json.Unmarshal([]byte(decodedStr), &jsonObj); err != nil {
+		// Try one more time with manual cleanup
+		decodedStr = strings.ReplaceAll(decodedStr, `" "`, "")
+		if err = json.Unmarshal([]byte(decodedStr), &jsonObj); err != nil {
+			return reqObj, fmt.Errorf("error parsing decoded JSON: %v", err)
+		}
+	}
+
+	return jsonObj, nil
+}

+ 135 - 0
services/kpler/kpler.go

@@ -0,0 +1,135 @@
+package kpler
+
+import (
+	"eta/eta_data_analysis/models"
+	"eta/eta_data_analysis/services/alarm_msg"
+	"eta/eta_data_analysis/utils"
+	"fmt"
+	"time"
+
+	"github.com/patrickmn/go-cache"
+)
+
+func GetProducts(req models.KplerProductLibReq) (products []models.KplerProduct, err error) {
+	token, err := GetKplerAccessToken(false)
+	if err != nil {
+		return nil, err
+	}
+	products, err = GetProductsByApi(req, token)
+	if err != nil {
+		if err.Error() == "Unauthorized" {
+			token, err = GetKplerAccessToken(true)
+			if err != nil {
+				return 
+			}
+			products, err = GetProductsByApi(req, token)
+			if err != nil {
+				return 
+			}
+			return
+		}
+		return 
+	}
+	return 
+}
+//   token := ""
+func GetKplerData(req models.KplerFlowDataLibReq) (ret *models.KplerFlowDataResp, err error) {
+//   token := ""
+//   flowDirection := "import"
+//   granularity := "monthly"
+//   split := "Destination%20Countries"
+//   withIntraRegion := "true"
+//   startDate := "2024-01-01"
+//   endDate := "2025-06-30"
+//   unit := "kbd"
+//   products := "CPC%20Russia,Eastern%20Russia%20Crude,Western%20Russia%20Crude"
+//   fromZones := ""
+//   toZones := ""
+//   onlyRealized := "true"
+//   req = models.KplerFlowDataLibReq{
+// 		Granularity: granularity,
+// 		Split: split,
+// 		Unit: unit,
+// 		FlowDirection: flowDirection,
+// 		FromZones: fromZones,
+// 		ToZones: toZones,
+// 		OnlyRealized: onlyRealized,
+// 		WithIntraRegion: withIntraRegion,
+// 		StartDate: startDate,
+// 		EndDate: endDate,
+// 		Products: products,
+// 	}
+    token, err := GetKplerAccessToken(false)
+	if err != nil {
+		return nil, err
+	}
+	ret, err = GetKplerDataByApi(req, token)
+	if err != nil {
+		fmt.Println("GetKplerDataByApi error", err)
+		if err.Error() == "Unauthorized" {
+			token, err = GetKplerAccessToken(true)
+			if err != nil {
+				err = fmt.Errorf("获取开普勒API-AccessToken失败, %v", err)
+				return
+			}
+			ret, err = GetKplerDataByApi(req, token)
+			if err != nil {
+				fmt.Println("GetKplerDataByApi error", err)
+				return nil, err
+			}
+			return
+		}
+		return nil, err
+	}
+	return
+}
+
+// GetKplerAccessToken 获取登录凭证
+func GetKplerAccessToken(forceRefresh bool) (token string, err error) {
+	defer func() {
+		if err != nil {
+			go alarm_msg.SendAlarmMsg("获取开普勒的登录凭证失败,ERR:"+err.Error(), 3)
+		}
+	}()
+	redisKey := "kpler_access_token"
+	cacheClient := utils.CacheClient
+	tokenTmp, ok := cacheClient.Get(redisKey)
+	//如果从redis中accessToken 获取失败或者token为空了,再或者需要强制刷新了,那么重新获取accessToken
+	if !ok || forceRefresh {
+		token, err = refreshKplerAccessToken(cacheClient,redisKey)
+		return
+	}
+	fmt.Println("tokenTmp", tokenTmp)
+	if tokenTmp == nil {
+		token, err = refreshKplerAccessToken(cacheClient,redisKey)
+		return
+	}
+	token = tokenTmp.(string)
+	return
+}
+
+// refreshKplerAccessToken 强制刷新获取登录凭证
+func refreshKplerAccessToken(cacheClient *cache.Cache, redisKey string) (token string, err error) {
+	defer func() {
+		if err != nil {
+			go alarm_msg.SendAlarmMsg("获取开普勒的登录凭证失败;ERR:"+err.Error(), 3)
+		}
+	}()
+	token, tmpErr := login()
+	if tmpErr != nil {
+		err = tmpErr
+		return
+	}
+
+	expireTime := time.Now().Add(time.Hour * 24 * 30)
+
+	//token存入redis
+	//err = utils.Rc.Put(tokenRedisKey, token, time.Duration(expireTime.Unix()-600)*time.Second)
+	// 本来是要设置下600s的过期时间,但因为不是强制刷新token,就不获取了
+	cacheClient.Set(redisKey, token, time.Duration(expireTime.Unix())*time.Second)
+	if err != nil {
+		err = fmt.Errorf("获取开普勒的登录凭证成功;开普勒登录凭证存入redis失败,ERR:%s", err.Error())
+		return
+	}
+	return
+}

+ 440 - 0
services/kpler/liquid.go

@@ -0,0 +1,440 @@
+package kpler
+
+import (
+	"encoding/json"
+	"eta/eta_data_analysis/models"
+	"fmt"
+	"io/ioutil"
+	"net/http"
+	"net/url"
+	"strings"
+	"eta/eta_data_analysis/utils"
+)
+
+// 获取token登录凭证
+func login()(token string, err error){
+
+  url := "https://api.kpler.com/v1/login"
+  method := "POST"
+
+  payload := strings.NewReader(`{
+	"email": "` + utils.KplerApiAccount + `",
+	"password": "` + utils.KplerApiPassword + `"
+}`)
+
+  client := &http.Client {
+  }
+  req, err := http.NewRequest(method, url, payload)
+
+  if err != nil {
+    fmt.Println(err)
+    return
+  }
+  req.Header.Add("Content-Type", "application/json")
+
+  res, err := client.Do(req)
+  if err != nil {
+    fmt.Println(err)
+    return
+  }
+  defer res.Body.Close()
+
+  body, err := ioutil.ReadAll(res.Body)
+  if err != nil {
+    fmt.Println(err)
+    return
+  }
+  fmt.Println(string(body))
+  //bodyStr := `{"token":"lfl883KgRgwsBg_yuHjv05vr6voK2ac8ju47jiUoR8ccs","user":{"accounts":["coal","lpg","lng","oil","cpp","merge","liquids"]}}`
+  //解析body
+  var result map[string]interface{}
+  err = json.Unmarshal(body, &result)
+  if err != nil {
+    fmt.Println(err)
+    return
+  }
+  token = result["token"].(string)
+  return
+}
+
+// 分别获取group为:Clean Products;Crude/Co; DPP)的产品
+func GetProductsByApi(params models.KplerProductLibReq, token string) (data []models.KplerProduct, err error) {
+  uri := "https://api.kpler.com/v1/products"
+  ancestorFamilyIds := url.QueryEscape(params.AncestorFamilyIds)
+  ancestorFamilyNames := url.QueryEscape(params.AncestorFamilyNames)
+  ancestorGroupIds := url.QueryEscape(params.AncestorGroupIds)
+  ancestorGroupNames := url.QueryEscape(params.AncestorGroupNames)
+  ancestorProductIds := url.QueryEscape(params.AncestorProductIds)
+  ancestorProductNames := url.QueryEscape(params.AncestorProductNames)
+  ancestorGradeIds := url.QueryEscape(params.AncestorGradeIds)
+  ancestorGradeNames := url.QueryEscape(params.AncestorGradeNames)
+  products := params.Products
+  productIds := params.ProductIds
+  uri = fmt.Sprintf("%s?ancestorFamilyIds=%s&ancestorFamilyNames=%s&ancestorGroupIds=%s&ancestorGroupNames=%s&ancestorProductIds=%s&ancestorProductNames=%s&ancestorGradeIds=%s&ancestorGradeNames=%s&products=%s&productIds=%s", uri, ancestorFamilyIds, ancestorFamilyNames, ancestorGroupIds, ancestorGroupNames, ancestorProductIds, ancestorProductNames, ancestorGradeIds, ancestorGradeNames, products, productIds)
+    method := "GET"
+
+  client := &http.Client {
+  }
+  req, err := http.NewRequest(method, uri, nil)
+
+  if err != nil {
+    fmt.Println(err)
+    return
+  }
+  req.Header.Add("Content-Type", "application/json")
+  req.Header.Add("Authorization", token)
+  res, err := client.Do(req)
+  if err != nil {
+    fmt.Println(err)
+    return
+  }
+  defer res.Body.Close()
+
+  body, err := ioutil.ReadAll(res.Body)
+  if err != nil {
+    fmt.Println(err)
+    return
+  }
+  fmt.Println(string(body))
+  //bodystr := "Id (Product);Name;Type (Product);Family;Family Id;Group;Group Id;Product;Product Id;Grade;Grade Id;Density (Product);Density Unit;Energy Density;Energy Density Unit;Expansion Ratio
+// 2952;CPC Russia;grade;Dirty;1398;Crude/Co;1370;Crude;1368;CPC Russia;2952;805.0;kg/cm;26948.236;MJ/cm;1.0
+// 2953;CPC Kazakhstan;grade;Dirty;1398;Crude/Co;1370;Crude;1368;CPC Kazakhstan;2953;805.0;kg/cm;26948.236;MJ/cm;1.0
+// 1360;CPC;grade;Dirty;1398;Crude/Co;1370;Crude;1368;CPC;1360;805.0;kg/cm;26948.236;MJ/cm;1.0"
+// 解析body
+  var result map[string]interface{}
+  resErr := json.Unmarshal(body, &result)
+  if resErr == nil {
+    if result["message"] == "Unauthorized" {
+      fmt.Println("Unauthorized")
+      return
+    }
+    err = fmt.Errorf(result["message"].(string))
+    return
+  }
+  // 解析result
+  bodyStr := string(body)
+  lines := strings.Split(bodyStr, "\n")
+  for _, line := range lines {
+    fields := strings.Split(line, ";")
+    if len(fields) < 10 {
+      continue
+    }
+    data = append(data, models.KplerProduct{
+      Id: fields[0],
+      Name: fields[1],
+      Type: fields[2],
+      Family: fields[3],
+      FamilyId: fields[4],
+      Group: fields[5],
+      GroupId: fields[6],
+      Product: fields[7],
+      ProductId: fields[8],
+      Grade: fields[9],
+      GradeId: fields[10],
+      Density: fields[11],
+      DensityUnit: fields[12],
+      EnergyDensity: fields[13],
+      EnergyDensityUnit: fields[14],
+      ExpansionRatio: fields[15],
+    })
+  }
+  return
+}
+
+
+
+// 根据flowDirection 和 products 循环调用
+func GetKplerDataByApi(params models.KplerFlowDataLibReq, token string) (ret *models.KplerFlowDataResp, err error) {
+  flowDirection := params.FlowDirection
+  granularity := params.Granularity
+  products := url.QueryEscape(params.Products)
+  split := url.QueryEscape(params.Split)
+  startDate := params.StartDate
+  endDate := params.EndDate
+  unit := params.Unit
+  withIntraRegion := params.WithIntraRegion
+  fromZones := url.QueryEscape(params.FromZones)
+  toZones := url.QueryEscape(params.ToZones)
+  onlyRealized := params.OnlyRealized
+  withForecast := params.WithForecast
+  withProductEstimation := params.WithProductEstimation
+  // fromInstallations := req.FromInstallations
+  // toInstallations := req.ToInstallations
+  // fromCountries := req.FromCountries
+  // toCountries := req.ToCountries
+  // vesselTypes := req.VesselTypes
+  // vesselTypesAlt := req.VesselTypesAlt
+  // withIntraCountry := req.WithIntraCountry
+  // 
+  // withFreightView := req.WithFreightView
+
+  url := fmt.Sprintf("https://api.kpler.com/v1/flows?unit=%s&flowDirection=%s&granularity=%s&products=%s&split=%s&withIntraRegion=%s&startDate=%s&endDate=%s&fromZones=%s&toZones=%s&onlyRealized=%s&withForecast=%s&withProductEstimation=%s", unit, flowDirection, granularity, products, split, withIntraRegion, startDate, endDate, fromZones, toZones, onlyRealized, withForecast, withProductEstimation)
+  method := "GET"
+
+  client := &http.Client {
+  }
+  req, err := http.NewRequest(method, url, nil)
+
+  if err != nil {
+    fmt.Println(err)
+    return
+  }
+  req.Header.Add("Authorization", token)
+
+  res, err := client.Do(req)
+  if err != nil {
+    fmt.Println(err)
+    return
+  }
+  defer res.Body.Close()
+
+  body, err := ioutil.ReadAll(res.Body)
+  if err != nil {
+    fmt.Println(err)
+    return
+  }
+  fmt.Println(string(body))
+  //{"message":"Unauthorized"}
+  // 解析body
+  var result map[string]interface{}
+  resErr := json.Unmarshal(body, &result)
+  if resErr == nil {
+    if result["message"] == "Unauthorized" {
+      fmt.Println("Unauthorized")
+      return
+    }
+    err = fmt.Errorf(result["message"].(string))
+    return
+  }
+
+//   bodystr :=`Date;China;Period End Date
+// 2024-07;35763.15;2024-07-31
+// 2024-08;35386.42;2024-08-31
+// 2024-09;39657.10;2024-09-30
+// 2024-10;39909.08;2024-10-31
+// 2024-11;36541.03;2024-11-30
+// 2024-12;38551.49;2024-12-31
+// 2025-01;34607.56;2025-01-31
+// 2025-02;28280.53;2025-02-28
+// 2025-03;29965.73;2025-03-31
+// 2025-04;15157.51;2025-04-30
+// 2025-05;3795.25;2025-05-31
+// 2025-06;0;2025-06-30`
+
+  // 解析result
+  bodyStr := string(body)
+  lines := strings.Split(bodyStr, "\n")
+	
+	// 解析lines
+  splitNameMap := make(map[int]string)
+  splitDataMap := make(map[int][]models.KplerBaseExcelData)
+  endDateCol := 0
+	for row, line := range lines {
+		fields := strings.Split(line, ";")
+		if len(fields) < 3 {
+			continue
+		}
+    for col, field := range fields {
+      if col == 0 {
+        continue
+      }
+      // 处理表头
+      if row == 0 {
+         if field == "Period End Date" {
+          endDateCol = col
+         }else if field == "Date" {
+          continue
+         }else{
+          splitNameMap[col] = field
+         }
+      }else{
+        if col == endDateCol {
+          continue
+        }
+        date := fields[endDateCol]
+        value := fields[col]
+        splitDataMap[col] = append(splitDataMap[col], models.KplerBaseExcelData{
+          DataTime: date,
+          Value: value,
+        })
+      }
+    }
+	}
+  data := make([]models.KplerFlowData, 0)
+  for col, name := range splitNameMap {
+    data = append(data, models.KplerFlowData{
+      SplitItem: name,
+      IndexData: splitDataMap[col],
+    })
+  }
+  ret = &models.KplerFlowDataResp{
+    List: data,
+    ApiQueryUrl: url,
+  }
+  return
+}
+
+
+
+func GetZonesByApi(token string, ancestorName string, descendantType string) (data []models.KplerZone, err error) {
+  //url := "https://api.kpler.com/v1/zones"
+  url := fmt.Sprintf("https://api.kpler.com/v1/zones?ancestorName=%s&descendantType=%s", ancestorName, descendantType)
+
+  method := "GET"
+
+  client := &http.Client {
+  }
+  req, err := http.NewRequest(method, url, nil)
+
+  if err != nil {
+    fmt.Println(err)
+    return
+  }
+  req.Header.Add("Content-Type", "application/json")
+  req.Header.Add("Authorization", token)
+  res, err := client.Do(req)
+  if err != nil {
+    fmt.Println(err)
+    return
+  }
+  defer res.Body.Close()
+
+  body, err := ioutil.ReadAll(res.Body)
+  if err != nil {
+    fmt.Println(err)
+    return
+  }
+  fmt.Println(string(body))
+//   bodyStr := `Ancestor Id;Ancestor Name;Ancestor Type;Descendant Id;Descendant Name;Descendant Type
+// 87;Baltic Sea;gulf;1669;Kokkola;port
+// 87;Baltic Sea;gulf;1264;Stigsnaes;port
+// 87;Baltic Sea;gulf;110162;Uddevalla;port
+// 87;Baltic Sea;gulf;112012;Harnosand;port
+// 87;Baltic Sea;gulf;112945;Energihamnen;port
+// 87;Baltic Sea;gulf;112957;Falkenberg;port
+// 87;Baltic Sea;gulf;110567;Jakobstad;port
+// 87;Baltic Sea;gulf;112930;Sandefjord;port
+// 87;Baltic Sea;gulf;113141;Korsor;port
+// 87;Baltic Sea;gulf;3603;Inkoo;port
+// 87;Baltic Sea;gulf;112946;Skeppsbron;port
+// 87;Baltic Sea;gulf;112943;Vartahamnen;port
+// 87;Baltic Sea;gulf;112936;Solvesborg;port
+// 87;Baltic Sea;gulf;3388;Pori;port
+// 87;Baltic Sea;gulf;112944;Stadsgarden;port
+// 87;Baltic Sea;gulf;1697;Nacka;port
+// 87;Baltic Sea;gulf;107545;Grenaa;port
+// 87;Baltic Sea;gulf;107515;Wismar;port
+// 87;Baltic Sea;gulf;2604;Vysotsk;port
+// 87;Baltic Sea;gulf;112752;Stockholm;port
+// 87;Baltic Sea;gulf;113125;Monsteras;port
+// 87;Baltic Sea;gulf;113161;Hirtshals;port
+// 87;Baltic Sea;gulf;116132;Trelleborg;port
+// 87;Baltic Sea;gulf;1400;Lindø Industrial Park;port
+// 87;Baltic Sea;gulf;112013;Sandarne;port
+// 87;Baltic Sea;gulf;112011;Ornskoldsvik;port
+// 87;Baltic Sea;gulf;107089;Landskrona;port
+// 87;Baltic Sea;gulf;4689;Koping;port
+// 87;Baltic Sea;gulf;112745;Kaskinen;port
+// 87;Baltic Sea;gulf;112210;Vasteras;port
+// 87;Baltic Sea;gulf;112165;Kalmar;port
+// 87;Baltic Sea;gulf;112167;Paljassaare;port
+// 87;Baltic Sea;gulf;112152;Forby;port
+// 87;Baltic Sea;gulf;112194;Port of Koge;port
+// 87;Baltic Sea;gulf;112202;Lomonosov;port
+// 87;Baltic Sea;gulf;3423;Aarhus;port
+// 87;Baltic Sea;gulf;107591;Koloniya;port
+// 87;Baltic Sea;gulf;6812;Nyborg;port
+// 87;Baltic Sea;gulf;113842;Halden;port
+// 87;Baltic Sea;gulf;1027;Porvoo;port
+// 87;Baltic Sea;gulf;116201;Nykobing Falster;port
+// 87;Baltic Sea;gulf;116181;Ostrand;port
+// 87;Baltic Sea;gulf;113276;Karlsborg;port
+// 87;Baltic Sea;gulf;1651;Gdynia;port
+// 87;Baltic Sea;gulf;1102;Naantali;port
+// 87;Baltic Sea;gulf;112137;Drammen;port
+// 87;Baltic Sea;gulf;1165;Klaipeda;port
+// 87;Baltic Sea;gulf;6167;Hamina;port
+// 87;Baltic Sea;gulf;113292;Vastervik;port
+// 87;Baltic Sea;gulf;116242;Saetre;port
+// 87;Baltic Sea;gulf;116535;Frederikshavn;port
+// 87;Baltic Sea;gulf;1444;Aabenraa;port
+// 87;Baltic Sea;gulf;3725;Apatyth FSU;port
+// 87;Baltic Sea;gulf;1271;Primorsk;port
+// 87;Baltic Sea;gulf;1465;Karlshamn;port
+// 87;Baltic Sea;gulf;1399;Paldiski;port
+// 87;Baltic Sea;gulf;1684;Kemi;port
+// 87;Baltic Sea;gulf;1717;Vaasa;port
+// 87;Baltic Sea;gulf;110127;Nordjyllandsvaerket;port
+// 87;Baltic Sea;gulf;3467;Kiel;port
+// 87;Baltic Sea;gulf;4239;Kaliningrad;port
+// 87;Baltic Sea;gulf;3805;Loudden;port
+// 87;Baltic Sea;gulf;1404;Provestenen;port
+// 87;Baltic Sea;gulf;3403;Södertälje;port
+// 87;Baltic Sea;gulf;2002;Liepaja;port
+// 87;Baltic Sea;gulf;3389;Mussalo;port
+// 87;Baltic Sea;gulf;3407;Sundsvall;port
+// 87;Baltic Sea;gulf;3392;Halmstad;port
+// 87;Baltic Sea;gulf;2215;Raahe;port
+// 87;Baltic Sea;gulf;1334;Riga Harbour;port
+// 87;Baltic Sea;gulf;3381;Miiduranna;port
+// 87;Baltic Sea;gulf;1166;Gdansk;port
+// 87;Baltic Sea;gulf;107049;Oskarshamn;port
+// 87;Baltic Sea;gulf;3413;Holmsund;port
+// 87;Baltic Sea;gulf;3391;Rauma;port
+// 87;Baltic Sea;gulf;3393;Helsingborg;port
+// 87;Baltic Sea;gulf;3438;Sjursoya;port
+// 87;Baltic Sea;gulf;1553;Rostock;port
+// 87;Baltic Sea;gulf;1155;Sillamäe;port
+// 87;Baltic Sea;gulf;3664;Szczecin;port
+// 87;Baltic Sea;gulf;1362;Malmo;port
+// 87;Baltic Sea;gulf;1104;Nynashamn;port
+// 87;Baltic Sea;gulf;1158;Butinge;port
+// 87;Baltic Sea;gulf;1700;Oulu;port
+// 87;Baltic Sea;gulf;5454;Slagen;port
+// 87;Baltic Sea;gulf;1477;Norrkoping;port
+// 87;Baltic Sea;gulf;6722;Kunda Bay;port
+// 87;Baltic Sea;gulf;6761;Pitea;port
+// 87;Baltic Sea;gulf;1020;Swinoujscie Area;port
+// 87;Baltic Sea;gulf;3426;Aalborg;port
+// 87;Baltic Sea;gulf;105360;Visby;port
+// 87;Baltic Sea;gulf;3151;Gavle;port
+// 87;Baltic Sea;gulf;1445;Oxelosund;port
+// 87;Baltic Sea;gulf;3411;Ronnskar;port
+// 87;Baltic Sea;gulf;113011;Husum;port
+// 87;Baltic Sea;gulf;2008;Lulea;port
+// 87;Baltic Sea;gulf;107538;Varberg;port
+// 87;Baltic Sea;gulf;107537;Orrskar;port
+// 87;Baltic Sea;gulf;4690;Uusikaupunki Port;port
+// 87;Baltic Sea;gulf;110094;Studstrup;port
+// 87;Baltic Sea;gulf;6723;Helsinki;port
+// 87;Baltic Sea;gulf;1028;St Petersburg;port
+// 87;Baltic Sea;gulf;107467;Valko;port
+// 87;Baltic Sea;gulf;116671;Skoghall;port
+// 87;Baltic Sea;gulf;2464;Ventspils;port
+// 87;Baltic Sea;gulf;113860;Soby Havn;port
+// 87;Baltic Sea;gulf;3382;Kopli;port
+// 87;Baltic Sea;gulf;1156;Muuga Harbour;port
+// 87;Baltic Sea;gulf;2601;Ust Luga;port`
+
+  // 解析result
+  bodyStr := string(body)
+  lines := strings.Split(bodyStr, "\n")
+  for i, line := range lines {
+    if i == 0 {
+      continue
+    }
+    fields := strings.Split(line, ";")
+    if len(fields) < 6 {
+      continue
+    }
+    data = append(data, models.KplerZone{
+      AncestorId: fields[0],
+      AncestorName: fields[1],
+      AncestorType: fields[2],
+      DescendantId: fields[3],
+      DescendantName: fields[4],
+      DescendantType: fields[5],
+    })
+  }
+  return
+}

BIN
services/kpler/最新版kpler插件.xlsx


+ 8 - 0
services/task.go

@@ -154,6 +154,14 @@ func Task() {
 		c.Start()
 		c.Start()
 	}
 	}
 
 
+	if utils.KplerExcelOpen == "1" {
+		kplerExcelWatch := task.NewTask("kplerExcelWatch", "0 */2 * * * *", KplerExcelDataWatch)
+		task.AddTask("Kpler Excel数据检测", kplerExcelWatch)
+        
+		kplerExcelRefresh := task.NewTask("kplerExcelRefresh", "0 50 16,18 * * *", RefreshKplerByExcel)
+		task.AddTask("Kpler Excel数据刷新", kplerExcelRefresh)
+	}
+
 	task.StartTask()
 	task.StartTask()
 
 
 	fmt.Println("task end")
 	fmt.Println("task end")

+ 22 - 0
utils/config.go

@@ -3,9 +3,11 @@ package utils
 import (
 import (
 	"fmt"
 	"fmt"
 	"strconv"
 	"strconv"
+	"time"
 
 
 	beeLogger "github.com/beego/bee/v2/logger"
 	beeLogger "github.com/beego/bee/v2/logger"
 	"github.com/beego/beego/v2/server/web"
 	"github.com/beego/beego/v2/server/web"
+	"github.com/patrickmn/go-cache"
 )
 )
 
 
 var (
 var (
@@ -132,6 +134,7 @@ var (
 	ClarkSonsOpen     string //是否配置克拉克森数据源,1已配置
 	ClarkSonsOpen     string //是否配置克拉克森数据源,1已配置
 )
 )
 
 
+<<<<<<< HEAD
 // minio配置
 // minio配置
 var (
 var (
 	MinIoAccessKeyId     string
 	MinIoAccessKeyId     string
@@ -172,6 +175,19 @@ var (
 	Upload_Audio_Dir string
 	Upload_Audio_Dir string
 	AccessKeyId      string
 	AccessKeyId      string
 	AccessKeySecret  string
 	AccessKeySecret  string
+=======
+// Kpler
+var (
+	KplerExcelFilePath string //excel文件地址
+	KplerExcelOpen     string //是否配置Kpler数据源,1已配置
+	KplerRefreshUrl    string //Kpler刷新url
+	KplerApiAccount    string //Kpler API调用方式的账号
+	KplerApiPassword   string //Kpler API调用方式的密码
+)
+
+var (
+	CacheClient *cache.Cache
+>>>>>>> master
 )
 )
 
 
 func init() {
 func init() {
@@ -365,6 +381,12 @@ func init() {
 		AccessKeyId = config["access_key_id"]
 		AccessKeyId = config["access_key_id"]
 		AccessKeySecret = config["access_key_secret"]
 		AccessKeySecret = config["access_key_secret"]
 	}
 	}
+	KplerExcelOpen = config["kpler_excel_open"]
+	KplerExcelFilePath = config["kpler_excel_file_path"]
+	KplerRefreshUrl = config["kpler_refresh_url"]
+	KplerApiAccount = config["kpler_api_account"]
+	KplerApiPassword = config["kpler_api_password"]
+	CacheClient = cache.New(365*24*time.Hour, 365*24*time.Hour)
 }
 }
 
 
 //修改接口文档
 //修改接口文档

+ 2 - 1
utils/constants.go

@@ -274,8 +274,9 @@ const (
 	GET_RZD_EDB_INFO_BY_INDEX_CODE                    = "/rzd/get/rzd/edb/info/by/code"                         // 根据指标code获取指标信息
 	GET_RZD_EDB_INFO_BY_INDEX_CODE                    = "/rzd/get/rzd/edb/info/by/code"                         // 根据指标code获取指标信息
 	UPDATE_RZD_EDB_DATA                               = "/rzd/update/rzd/edb/data"                              // 修改指标库指标数据
 	UPDATE_RZD_EDB_DATA                               = "/rzd/update/rzd/edb/data"                              // 修改指标库指标数据
 	LIB_ROUTE_CLARKSONS                               = "/clarksons/data"                                       // 克拉克森
 	LIB_ROUTE_CLARKSONS                               = "/clarksons/data"                                       // 克拉克森
+	LIB_ROUTE_KPLER_DATA                               = "kpler/handle/excel_data"                                       // 克拉克森
 )
 )
 
 
 const (
 const (
 	APPNAME = "弘则-数据爬虫"
 	APPNAME = "弘则-数据爬虫"
-)
+)