exchange_crawler.go 1023 B

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051
  1. package controllers
  2. import (
  3. "encoding/json"
  4. "eta/eta_index_lib/models"
  5. "fmt"
  6. "github.com/rdlucklib/rdluck_tools/http"
  7. )
  8. // 交易所爬虫
  9. type ExchangeCrawler struct {
  10. BaseAuthController
  11. }
  12. // @Title 刷新数据
  13. // @Description 刷新数据接口
  14. // @Param request body models.AddEdbClassifyReq true "type json string"
  15. // @Success 200 {object} models.EdbClassify
  16. // @router /refresh [post]
  17. func (this *ExchangeCrawler) GetOrAdd() {
  18. br := new(models.BaseResponse).Init()
  19. defer func() {
  20. this.Data["json"] = br
  21. this.ServeJSON()
  22. }()
  23. var req models.RefreshExchangeoReq
  24. err := json.Unmarshal(this.Ctx.Input.RequestBody, &req)
  25. if err != nil {
  26. br.Msg = "参数解析异常!"
  27. br.ErrMsg = "参数解析失败,Err:" + err.Error()
  28. return
  29. }
  30. if req.Url == "" {
  31. br.Msg = "交易所链接错误"
  32. br.IsSendEmail = false
  33. return
  34. }
  35. body, e := http.Get(req.Url)
  36. if e != nil {
  37. err = e
  38. fmt.Println("err:", err)
  39. return
  40. }
  41. br.Ret = 200
  42. br.Msg = "获取成功"
  43. br.Success = true
  44. br.Data = body
  45. }