Add better crawler metrics

This commit is contained in:
James Mills
2021-02-02 12:07:53 +10:00
parent 865e5b5750
commit 4970b16d61
2 changed files with 18 additions and 20 deletions

View File

@@ -48,6 +48,8 @@ func (c *crawler) loop() {
log.Debugf("found %s", link)
metrics.Counter("crawler", "crawled").Inc()
url := NewURL(link)
url.CrawledAt = time.Now()
@@ -60,6 +62,8 @@ func (c *crawler) loop() {
} else {
if err := c.db.SetURL(hash, url); err != nil {
log.WithError(err).Error("error recording url %s", link)
} else {
metrics.Counter("crawler", "scraped").Inc()
}
}
}

View File

@@ -190,17 +190,22 @@ func (s *Server) setupMetrics() {
return float64(s.db.LenTokens())
},
)
// feed cache sources
metrics.NewGauge(
"cache", "sources",
"Number of feed sources being fetched by the global feed cache",
metrics.NewGaugeFunc(
"db", "urls",
"Number of database /urls keys",
func() float64 {
return float64(s.db.URLCount())
},
)
// feed cache size
metrics.NewGauge(
"cache", "feeds",
"Number of unique feeds in the global feed cache",
// Crawler stats
metrics.NewCounter(
"crawler", "crawled",
"Number of links crawled by the crawler",
)
metrics.NewCounter(
"crawler", "scraped",
"Number of links scraped by the crawler",
)
// server info
@@ -216,17 +221,6 @@ func (s *Server) setupMetrics() {
"commit": spyda.Commit,
}).Set(1)
// old avatars
metrics.NewCounter(
"media", "old_avatar",
"Count of old Avtar (PNG) conversions",
)
// old media
metrics.NewCounter(
"media", "old_media",
"Count of old Media (PNG) served",
)
s.AddRoute("GET", "/metrics", metrics.Handler())
}