Browse Source

Merge branch 'cygx_v2' of hongze/hongze_cygx into master

hongze 4 years ago
parent
commit
1dfec9f084
3 changed files with 243 additions and 48 deletions
  1. 1 0
      controllers/search.go
  2. 113 48
      services/elastic.go
  3. 129 0
      services/industry_map.go

+ 1 - 0
controllers/search.go

@@ -66,6 +66,7 @@ func (this *SearchController) SearchList() {
 	*/
 	//indexName := "article_list"
 	indexName := utils.IndexName
+	pageSize = 20
 	result, total, err := services.EsMultiMatchFunctionScoreQuery(indexName, keyWord, startSize, pageSize,user.UserId) //services.EsMatchFunctionScoreQuery(indexName, keyWord, startSize, pageSize) //services.EsMatchQuery(indexName, keyWord)
 	//result, total, err := services.SearchByKeyWordBack(indexName, keyWord, startSize, pageSize,user.UserId)
 	//result, total, err := services.EsMultiMatchFunctionScoreQueryFix(indexName, keyWord, startSize, pageSize)

+ 113 - 48
services/elastic.go

@@ -386,12 +386,10 @@ func EsMatchFunctionScoreQuery(indexName, keyWord string, startSize, pageSize in
 
 func EsMultiMatchFunctionScoreQuery(indexName, keyWord string, startSize, pageSize, userId int) (result []*models.SearchItem, total int64, err error) {
 	client, err := NewClient()
-	keyWordArr, err := GetIndustryMapNameSliceV2(keyWord)
-
+	keyWordArr, err := GetIndustryMapNameSliceV3(keyWord)
 	keyWordArr = RemoveDuplicatesAndEmpty(keyWordArr)
-	boolquery := elastic.NewBoolQuery()
-	matchArr := make([]elastic.Query, 0)
-
+	//artidArr := make([]elastic.Query, 0)
+	//matchArr := make([]elastic.Query, 0)
 	n := 0
 	keyWordLen := len(keyWordArr)
 	if keyWordLen <= 0 {
@@ -402,6 +400,16 @@ func EsMultiMatchFunctionScoreQuery(indexName, keyWord string, startSize, pageSi
 	keyWordWeight := GetWeight(keyWordLen)
 	for k, v := range keyWordArr {
 		if v != "" {
+			matchArr := make([]elastic.Query, 0)
+			boolquery := elastic.NewBoolQuery()
+			//weight := float64(keyWordWeight[k])
+			//multiMatch := elastic.NewMultiMatchQuery(v, "Title", "BodyText").Analyzer("ik_smart")
+			//bodyFunctionQuery := elastic.NewFunctionScoreQuery()
+			//bodyFunctionQuery.Query(multiMatch)
+			//bodyFunctions := elastic.NewWeightFactorFunction(weight)
+			//bodyFunctionQuery.AddScoreFunc(bodyFunctions)
+			//bodyFunctionQuery.BoostMode("replace")
+			//matchArr = append(matchArr, bodyFunctionQuery)
 			weight := float64(keyWordWeight[k])
 			multiMatch := elastic.NewMultiMatchQuery(v, "Title", "BodyText").Analyzer("ik_smart")
 			bodyFunctionQuery := elastic.NewFunctionScoreQuery()
@@ -410,54 +418,111 @@ func EsMultiMatchFunctionScoreQuery(indexName, keyWord string, startSize, pageSi
 			bodyFunctionQuery.AddScoreFunc(bodyFunctions)
 			bodyFunctionQuery.BoostMode("replace")
 			matchArr = append(matchArr, bodyFunctionQuery)
-		}
-		n++
-	}
-	boolquery.Should(matchArr...)
-	highlight := elastic.NewHighlight()
-	highlight = highlight.Fields(elastic.NewHighlighterField("Title"), elastic.NewHighlighterField("BodyText"))
-	highlight = highlight.PreTags("<font color='red'>").PostTags("</font>")
-	request := client.Search(indexName).Highlight(highlight).From(startSize).Size(pageSize).Query(boolquery)
-	searchByMatch, err := request.Do(context.Background())
-	if searchByMatch != nil {
-		if searchByMatch.Hits != nil {
-			for _, v := range searchByMatch.Hits.Hits {
-				articleJson, err := v.Source.MarshalJSON()
-				if err != nil {
-					return nil, 0, err
-				}
-				article := new(models.CygxArticle)
-				err = json.Unmarshal(articleJson, &article)
-				if err != nil {
-					return nil, 0, err
-				}
-				searchItem := new(models.SearchItem)
-				searchItem.ArticleId, _ = strconv.Atoi(v.Id)
-				if len(v.Highlight["BodyText"]) > 0 {
-					searchItem.Body = v.Highlight["BodyText"]
-				} else {
-					bodyRune := []rune(article.BodyText)
-					bodyRuneLen := len(bodyRune)
-					if bodyRuneLen > 100 {
-						bodyRuneLen = 100
+			boolquery.Should(matchArr...)
+			highlight := elastic.NewHighlight()
+			highlight = highlight.Fields(elastic.NewHighlighterField("Title"), elastic.NewHighlighterField("BodyText"))
+			highlight = highlight.PreTags("<font color='red'>").PostTags("</font>")
+			request := client.Search(indexName).Highlight(highlight).From(startSize).Size(pageSize).Query(boolquery)
+			searchByMatch, err := request.Do(context.Background())
+			if err != nil {
+				return nil, 0, err
+			}
+			if searchByMatch != nil {
+				if searchByMatch.Hits != nil {
+					for _, v := range searchByMatch.Hits.Hits {
+						var isAppend bool
+						articleJson, err := v.Source.MarshalJSON()
+						if err != nil {
+							return nil, 0, err
+						}
+						article := new(models.CygxArticle)
+						err = json.Unmarshal(articleJson, &article)
+						if err != nil {
+							return nil, 0, err
+						}
+						searchItem := new(models.SearchItem)
+						searchItem.ArticleId, _ = strconv.Atoi(v.Id)
+						if len(v.Highlight["BodyText"]) > 0 {
+							searchItem.Body = v.Highlight["BodyText"]
+						} else {
+							bodyRune := []rune(article.BodyText)
+							bodyRuneLen := len(bodyRune)
+							if bodyRuneLen > 100 {
+								bodyRuneLen = 100
+							}
+							body := string(bodyRune[:bodyRuneLen])
+							searchItem.Body = []string{body}
+						}
+						var title string
+						if len(v.Highlight["Title"]) > 0 {
+							title = v.Highlight["Title"][0]
+						} else {
+							title = article.Title
+						}
+						searchItem.Title = title
+						searchItem.PublishDate = article.PublishDate
+						for _,v_result := range result{
+							if v_result.ArticleId == searchItem.ArticleId{
+								isAppend = true
+							}
+						}
+						if !isAppend {
+							result = append(result, searchItem)
+						}
 					}
-					body := string(bodyRune[:bodyRuneLen])
-					searchItem.Body = []string{body}
-				}
-				var title string
-				if len(v.Highlight["Title"]) > 0 {
-					title = v.Highlight["Title"][0]
-				} else {
-					title = article.Title
 				}
-				searchItem.Title = title
-				searchItem.PublishDate = article.PublishDate
-
-				result = append(result, searchItem)
+				//total += searchByMatch.Hits.TotalHits.Value
 			}
 		}
-		total = searchByMatch.Hits.TotalHits.Value
+		n++
 	}
+	total = int64(len(result))
+	//fmt.Println(result)
+	//boolquery.Should(matchArr...)
+	//highlight := elastic.NewHighlight()
+	//highlight = highlight.Fields(elastic.NewHighlighterField("Title"), elastic.NewHighlighterField("BodyText"))
+	//highlight = highlight.PreTags("<font color='red'>").PostTags("</font>")
+	//request := client.Search(indexName).Highlight(highlight).From(startSize).Size(pageSize).Query(boolquery)
+	//searchByMatch, err := request.Do(context.Background())
+	//if searchByMatch != nil {
+	//	if searchByMatch.Hits != nil {
+	//		for _, v := range searchByMatch.Hits.Hits {
+	//			articleJson, err := v.Source.MarshalJSON()
+	//			if err != nil {
+	//				return nil, 0, err
+	//			}
+	//			article := new(models.CygxArticle)
+	//			err = json.Unmarshal(articleJson, &article)
+	//			if err != nil {
+	//				return nil, 0, err
+	//			}
+	//			searchItem := new(models.SearchItem)
+	//			searchItem.ArticleId, _ = strconv.Atoi(v.Id)
+	//			if len(v.Highlight["BodyText"]) > 0 {
+	//				searchItem.Body = v.Highlight["BodyText"]
+	//			} else {
+	//				bodyRune := []rune(article.BodyText)
+	//				bodyRuneLen := len(bodyRune)
+	//				if bodyRuneLen > 100 {
+	//					bodyRuneLen = 100
+	//				}
+	//				body := string(bodyRune[:bodyRuneLen])
+	//				searchItem.Body = []string{body}
+	//			}
+	//			var title string
+	//			if len(v.Highlight["Title"]) > 0 {
+	//				title = v.Highlight["Title"][0]
+	//			} else {
+	//				title = article.Title
+	//			}
+	//			searchItem.Title = title
+	//			searchItem.PublishDate = article.PublishDate
+	//
+	//			result = append(result, searchItem)
+	//		}
+	//	}
+	//	total = searchByMatch.Hits.TotalHits.Value
+	//}
 	return
 }
 

+ 129 - 0
services/industry_map.go

@@ -443,6 +443,135 @@ func GetIndustryMapNameSliceV2(industryName string) (nameSlice []string, err err
 	return
 }
 
+//获取行业图谱切片(v3版本,调整时间:2021-04-20 17:23:31,不乱序)
+func GetIndustryMapNameSliceV3(industryName string) (nameSlice []string, err error) {
+	nameSlice = append(nameSlice, industryName)
+	tree, err := GetIndustryTree()
+	if err != nil {
+		fmt.Println("获取树失败")
+		return
+	}
+
+	//fmt.Println(tree)
+
+	//已经存在的行业id的map集合
+	hasIdMap := make(map[int]string)
+	itemList, err := models.GetFirstCygxIndustryListByName(industryName)
+	if err != nil {
+		fmt.Println("获取数据失败,", err)
+		return
+	}
+	//找不到对应的数据
+	if len(itemList) <= 0 {
+		return
+	}
+
+	industryMapList := make(map[int][]*models.CygxIndustryMapItems)
+	//TODO  这里好像有问题,如果传入行业的话,上面再没有是否只是找到第一个节点判断,那么就会异常抛出
+	for _, item := range itemList {
+		industryMapList[item.Level] = append(industryMapList[item.Level], item)
+	}
+
+	//将查出来的根节点数据的key取出来,放在切片中,并对该切片做正序排列
+	var sortIndustryList []int
+	for k, _ := range industryMapList {
+		sortIndustryList = append(sortIndustryList, k)
+	}
+	sort.Ints(sortIndustryList)
+
+	//最底层节点的数据集合
+	list := industryMapList[sortIndustryList[0]]
+
+	//如果该数据正好是第一层节点数据,那么需要额外判断下
+	if list[0].ParentId <= 2 {
+		//如果存在第二级,那么就使用该层级
+		if len(sortIndustryList) > 1 {
+			list = industryMapList[sortIndustryList[1]]
+		}
+	}
+	//fmt.Println(list)
+	//return
+
+	otherChildMapSlice := map[int][]*models.CygxIndustryMapItems{}
+
+	//多个节点时,额外处理
+	if len(list) > 1 {
+		for _, item := range list {
+			hasIdMap[item.IndustryMapId] = ""
+			//将自己的节点给加进去
+			otherChildMapSlice[0] = append(otherChildMapSlice[0], item)
+
+			//获取上级
+			var tmpParentSlice []*models.CygxIndustryMapItems
+			tmpParentSlice, _ = parentTreeToSlice(tree, item, 0, tmpParentSlice, hasIdMap)
+			//父节点
+			parentItem := tmpParentSlice[len(tmpParentSlice)-1]
+			if _, ok := hasIdMap[parentItem.IndustryMapId]; ok == false {
+				hasIdMap[parentItem.IndustryMapId] = ""
+				otherChildMapSlice[1] = append(otherChildMapSlice[1], parentItem)
+			}
+		}
+	} else {
+		//匹配到单个节点
+		item := list[0]
+		hasIdMap[item.IndustryMapId] = ""
+		//将自己的节点给加进去
+		otherChildMapSlice[0] = append(otherChildMapSlice[0], item)
+		childTree := getChildTree(tree, item)
+
+		//如果是命中到最后一层节点
+		if len(childTree.Children) == 0 {
+			//获取上级
+			var tmpParentSlice []*models.CygxIndustryMapItems
+			tmpParentSlice, _ = parentTreeToSlice(tree, item, 0, tmpParentSlice, hasIdMap)
+			//父节点
+			parentItem := tmpParentSlice[len(tmpParentSlice)-1]
+
+			if _, ok := hasIdMap[parentItem.IndustryMapId]; ok == false {
+				hasIdMap[parentItem.IndustryMapId] = ""
+				otherChildMapSlice[1] = append(otherChildMapSlice[1], parentItem)
+			}
+
+			//兄弟节点
+			siblingTreeToSliceV2(parentItem, item, hasIdMap, 2, otherChildMapSlice)
+		} else {
+			//如果不是命中到最后一层节点
+			otherChildMapSlice[1] = append(otherChildMapSlice[1], childTree.Children...)
+		}
+	}
+
+	//return
+	var tmpSlice []*models.CygxIndustryMapItems
+
+	//将其他规律数据的key取出来,放在切片中,并对该切片做正序排列
+	var sortList []int
+	for k, _ := range otherChildMapSlice {
+		sortList = append(sortList, k)
+	}
+	sort.Ints(sortList)
+
+	//遍历该切片,根据下标key获取对应的数据,并插入到主数据中
+	for _, v := range sortList {
+		tmpChildSlice := otherChildMapSlice[v]
+		//randSlice(tmpChildSlice)
+		tmpSlice = append(tmpSlice, tmpChildSlice...)
+		//fmt.Println(k,"=====")
+		//for _,tmpV := range otherChildMapSlice[v]{
+		//	fmt.Println(tmpV.IndustryMapName)
+		//}
+	}
+	//名字切片
+	for _, v := range tmpSlice {
+		//fmt.Println("k===",k,"=======v=======",v)
+		nameSlice = append(nameSlice, v.IndustryMapName)
+	}
+
+	//fmt.Println(nameSlice)
+	//fmt.Println(strings.Join(nameSlice,","))
+	//utils.FileLog.Info("allNodes:%s",strings.Join(nameSlice,","))
+	return
+}
+
 //获取兄弟级树
 func siblingTreeToSliceV2(rootNode *models.CygxIndustryMapItems, nowNode *models.CygxIndustryMapItems, hasIdMap map[int]string, depth int, otherChildMapSlice map[int][]*models.CygxIndustryMapItems) (maxDepth int) {
 	if rootNode.Children != nil {