Record and skip urls the crawler has seen before

This commit is contained in:
James Mills
2021-02-01 23:46:26 +10:00
parent b398a3a709
commit 8a1161cf77
7 changed files with 126 additions and 36 deletions

View File

@@ -351,18 +351,6 @@ func NewServer(bind string, options ...Option) (*Server, error) {
return nil, fmt.Errorf("error validating config: %w", err)
}
indexer, err := NewIndexer(config)
if err != nil {
log.WithError(err).Error("error creating indexer")
return nil, err
}
crawler, err := NewCrawler(config, indexer)
if err != nil {
log.WithError(err).Error("error creating crawler")
return nil, err
}
db, err := NewStore(config.Store)
if err != nil {
log.WithError(err).Error("error creating store")
@@ -380,6 +368,18 @@ func NewServer(bind string, options ...Option) (*Server, error) {
return nil, err
}
indexer, err := NewIndexer(config)
if err != nil {
log.WithError(err).Error("error creating indexer")
return nil, err
}
crawler, err := NewCrawler(config, db, indexer)
if err != nil {
log.WithError(err).Error("error creating crawler")
return nil, err
}
router := NewRouter()
am := auth.NewManager(auth.NewOptions("/login", "/register"))