Add better crawler metrics

This commit is contained in:
James Mills
2021-02-02 12:07:53 +10:00
parent 865e5b5750
commit 4970b16d61
2 changed files with 18 additions and 20 deletions

View File

@@ -48,6 +48,8 @@ func (c *crawler) loop() {
log.Debugf("found %s", link) log.Debugf("found %s", link)
metrics.Counter("crawler", "crawled").Inc()
url := NewURL(link) url := NewURL(link)
url.CrawledAt = time.Now() url.CrawledAt = time.Now()
@@ -60,6 +62,8 @@ func (c *crawler) loop() {
} else { } else {
if err := c.db.SetURL(hash, url); err != nil { if err := c.db.SetURL(hash, url); err != nil {
log.WithError(err).Error("error recording url %s", link) log.WithError(err).Error("error recording url %s", link)
} else {
metrics.Counter("crawler", "scraped").Inc()
} }
} }
} }

View File

@@ -190,17 +190,22 @@ func (s *Server) setupMetrics() {
return float64(s.db.LenTokens()) return float64(s.db.LenTokens())
}, },
) )
metrics.NewGaugeFunc(
// feed cache sources "db", "urls",
metrics.NewGauge( "Number of database /urls keys",
"cache", "sources", func() float64 {
"Number of feed sources being fetched by the global feed cache", return float64(s.db.URLCount())
},
) )
// feed cache size // Crawler stats
metrics.NewGauge( metrics.NewCounter(
"cache", "feeds", "crawler", "crawled",
"Number of unique feeds in the global feed cache", "Number of links crawled by the crawler",
)
metrics.NewCounter(
"crawler", "scraped",
"Number of links scraped by the crawler",
) )
// server info // server info
@@ -216,17 +221,6 @@ func (s *Server) setupMetrics() {
"commit": spyda.Commit, "commit": spyda.Commit,
}).Set(1) }).Set(1)
// old avatars
metrics.NewCounter(
"media", "old_avatar",
"Count of old Avtar (PNG) conversions",
)
// old media
metrics.NewCounter(
"media", "old_media",
"Count of old Media (PNG) served",
)
s.AddRoute("GET", "/metrics", metrics.Handler()) s.AddRoute("GET", "/metrics", metrics.Handler())
} }