package internal import ( log "github.com/sirupsen/logrus" ) type Crawler interface { Start() Crawl(url string) error } type crawler struct { conf *Config q chan string indexer Indexer } func NewCrawler(conf *Config, indexer Indexer) (Crawler, error) { return &crawler{ conf: conf, q: make(chan string), indexer: indexer, }, nil } func (c *crawler) loop() { for { url := <-c.q log.Debugf("crawling %s", url) links, err := GetLinks(url) if err != nil { log.WithError(err).Error("error crawling %s", url) continue } for link := range links { log.Debugf("found %s", link) entry, err := Scrape(c.conf, link) if err != nil { log.WithError(err).Error("error scraping %s", link) } else { if err := c.indexer.Index(entry); err != nil { log.WithError(err).Error("error indexing %s", link) } } } } } func (c *crawler) Crawl(url string) error { c.q <- url return nil } func (c *crawler) Start() { go c.loop() }