Add working Crawler, Scraper and Indexer
This commit is contained in:
42
internal/indexer.go
Normal file
42
internal/indexer.go
Normal file
@@ -0,0 +1,42 @@
|
||||
package internal
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
|
||||
"github.com/apex/log"
|
||||
"github.com/blevesearch/bleve/v2"
|
||||
)
|
||||
|
||||
type Indexer interface {
|
||||
Index(entry *Entry) error
|
||||
}
|
||||
|
||||
type indexer struct {
|
||||
idx bleve.Index
|
||||
}
|
||||
|
||||
func NewIndexer(conf *Config) (Indexer, error) {
|
||||
var (
|
||||
idx bleve.Index
|
||||
err error
|
||||
)
|
||||
|
||||
fn := filepath.Join(conf.Data, "spyda.bleve")
|
||||
|
||||
if FileExists(fn) {
|
||||
idx, err = bleve.Open(fn)
|
||||
} else {
|
||||
mapping := bleve.NewIndexMapping()
|
||||
idx, err = bleve.New(fn, mapping)
|
||||
}
|
||||
if err != nil {
|
||||
log.WithError(err).Error("error creating indexer")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &indexer{idx: idx}, nil
|
||||
}
|
||||
|
||||
func (i *indexer) Index(entry *Entry) error {
|
||||
return i.idx.Index(entry.Hash(), entry)
|
||||
}
|
||||
Reference in New Issue
Block a user