Record and skip urls the crawler has seen before
This commit is contained in:
@@ -11,6 +11,7 @@ var (
|
||||
ErrInvalidStore = errors.New("error: invalid store")
|
||||
ErrUserNotFound = errors.New("error: user not found")
|
||||
ErrTokenNotFound = errors.New("error: token not found")
|
||||
ErrURLNotFound = errors.New("error: url not found")
|
||||
ErrInvalidSession = errors.New("error: invalid session")
|
||||
)
|
||||
|
||||
@@ -27,6 +28,13 @@ type Store interface {
|
||||
SearchUsers(prefix string) []string
|
||||
GetAllUsers() ([]*User, error)
|
||||
|
||||
DelURL(hash string) error
|
||||
HasURL(hash string) bool
|
||||
GetURL(hash string) (*URL, error)
|
||||
SetURL(hash string, url *URL) error
|
||||
URLCount() int64
|
||||
ForEachURL(f func(*URL) error) error
|
||||
|
||||
GetSession(sid string) (*session.Session, error)
|
||||
SetSession(sid string, sess *session.Session) error
|
||||
HasSession(sid string) bool
|
||||
|
||||
Reference in New Issue
Block a user