exchange_crawler.go 1.1 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556
  1. package controllers
  2. import (
  3. "encoding/json"
  4. "eta/eta_index_lib/models"
  5. "fmt"
  6. "github.com/rdlucklib/rdluck_tools/http"
  7. )
  8. // 交易所爬虫
  9. type ExchangeCrawler struct {
  10. BaseAuthController
  11. }
  12. // @Title 刷新数据
  13. // @Description 刷新数据接口
  14. // @Param request body models.AddEdbClassifyReq true "type json string"
  15. // @Success 200 {object} models.EdbClassify
  16. // @router /refresh [post]
  17. func (this *ExchangeCrawler) GetOrAdd() {
  18. br := new(models.BaseResponse).Init()
  19. defer func() {
  20. this.Data["json"] = br
  21. this.ServeJSON()
  22. }()
  23. var req models.RefreshExchangeoReq
  24. err := json.Unmarshal(this.Ctx.Input.RequestBody, &req)
  25. if err != nil {
  26. br.Msg = "参数解析异常!"
  27. br.ErrMsg = "参数解析失败,Err:" + err.Error()
  28. return
  29. }
  30. if req.Url == "" {
  31. br.Msg = "交易所链接错误"
  32. br.IsSendEmail = false
  33. return
  34. }
  35. body, e := http.Get(req.Url)
  36. if e != nil {
  37. err = e
  38. fmt.Println("err:", err)
  39. return
  40. }
  41. var resp models.JSONData
  42. err = json.Unmarshal(body, &resp)
  43. if err != nil {
  44. fmt.Println(err)
  45. return
  46. }
  47. br.Ret = 200
  48. br.Msg = "获取成功"
  49. br.Success = true
  50. br.Data = resp
  51. }