Skip to content

Commit

Permalink
fix: get server info will return duplicated server address
Browse files Browse the repository at this point in the history
  • Loading branch information
YangKian committed Nov 9, 2023
1 parent 7f7d441 commit ab44bf3
Showing 1 changed file with 13 additions and 16 deletions.
29 changes: 13 additions & 16 deletions collector/collector.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ package collector

import (
"fmt"
"slices"
"os"
"sync"
"sync/atomic"
"time"
Expand Down Expand Up @@ -72,7 +72,7 @@ func (h *HStreamCollector) getServerInfo() {
util.Logger().Info("start get server info loop.", zap.String("duration", h.serverUpdateDuration.String()))

for range ticker.C {
urls, err := h.client.GetServerInfo()
urls, err := h.client.GetServerInfo(false)
if err != nil {
util.Logger().Error("get server info return error", zap.String("error", err.Error()))
continue
Expand Down Expand Up @@ -107,7 +107,7 @@ func NewHStreamCollector(serverUrl string, caPath string, token string, duration

client.SetLogLevel(zap.WarnLevel)

urls, err := client.GetServerInfo()
urls, err := client.GetServerInfo(false)
if err != nil {
return nil, errors.WithMessage(err, "Get server info error")
}
Expand Down Expand Up @@ -162,6 +162,7 @@ func (h *HStreamCollector) Collect(ch chan<- prometheus.Metric) {
metrics := h.getScrapedMetrics()
h.lock.RLock()
wg.Add(len(h.TargetUrls))
util.Logger().Debug("Start scrape targets", zap.String("urls", fmt.Sprintf("%v", h.TargetUrls)))
for _, u := range h.TargetUrls {
go func(url string) {
defer wg.Done()
Expand All @@ -170,6 +171,7 @@ func (h *HStreamCollector) Collect(ch chan<- prometheus.Metric) {
}
h.lock.RUnlock()
wg.Wait()
util.Logger().Debug("=============== scrape done ======================")
}

func (h *HStreamCollector) execute(metrics []scraper.Metrics, target string, ch chan<- prometheus.Metric) {
Expand All @@ -193,20 +195,15 @@ func (h *HStreamCollector) execute(metrics []scraper.Metrics, target string, ch
ch <- prometheus.MustNewConstMetric(scrapeFailedDesc, prometheus.CounterValue, float64(totalFailedScrap.Load()), target)

if faild != 0 {
util.Logger().Info("Scrape target failed, remove the url", zap.String("url", target))
info, err := h.client.GetServerInfo(true)
if err != nil {
util.Logger().Error("Can't get cluster server info, exit exporter", zap.String("error", err.Error()))
os.Exit(1)
}
h.lock.Lock()
defer h.lock.Unlock()

idx := slices.Index(h.TargetUrls, target)
if idx == -1 {
util.Logger().Warn("Try to remove url from url list, but not found",
zap.String("url", target), zap.String("url list", fmt.Sprintf("%v", h.TargetUrls)))
return
} else if h.TargetUrls[idx] != target {
util.Logger().Fatal("url should equal to TargetUrl[idx]",
zap.String("url", target), zap.Int("index", idx), zap.String("TargetUrl[idx]", h.TargetUrls[idx]))
} else {
h.TargetUrls = append(h.TargetUrls[:idx], h.TargetUrls[idx+1:]...)
}
h.TargetUrls = info
util.Logger().Info("Scrape target failed, update the url list", zap.String("target", target),
zap.String("urls", fmt.Sprintf("%v", h.TargetUrls)))
}
}

0 comments on commit ab44bf3

Please sign in to comment.