diff options
author | Frédéric Guillot <fred@miniflux.net> | 2018-01-19 18:43:27 -0800 |
---|---|---|
committer | Frédéric Guillot <fred@miniflux.net> | 2018-01-20 13:25:20 -0800 |
commit | 3b62f904d6d3fdad168d3b212ff7c465c01b50f4 (patch) | |
tree | 51c9654701e676f1a1e758025cf291a1276819cd /reader/processor | |
parent | 09785df07f4b954698aa97be39c8cd9fdf7959fc (diff) |
Do not crawl existing entry URLs
Diffstat (limited to 'reader/processor')
-rw-r--r-- | reader/processor/processor.go | 19 |
1 files changed, 13 insertions, 6 deletions
diff --git a/reader/processor/processor.go b/reader/processor/processor.go index 33aa8ed..ca04603 100644 --- a/reader/processor/processor.go +++ b/reader/processor/processor.go @@ -10,10 +10,13 @@ import ( "github.com/miniflux/miniflux/reader/rewrite" "github.com/miniflux/miniflux/reader/sanitizer" "github.com/miniflux/miniflux/reader/scraper" + "github.com/miniflux/miniflux/storage" ) // FeedProcessor handles the processing of feed contents. type FeedProcessor struct { + userID int64 + store *storage.Storage feed *model.Feed scraperRules string rewriteRules string @@ -39,11 +42,15 @@ func (f *FeedProcessor) WithRewriteRules(rules string) { func (f *FeedProcessor) Process() { for _, entry := range f.feed.Entries { if f.crawler { - content, err := scraper.Fetch(entry.URL, f.scraperRules) - if err != nil { - logger.Error("[FeedProcessor] %v", err) + if f.store.EntryURLExists(f.userID, entry.URL) { + logger.Debug(`[FeedProcessor] Do not crawl existing entry URL: "%s"`, entry.URL) } else { - entry.Content = content + content, err := scraper.Fetch(entry.URL, f.scraperRules) + if err != nil { + logger.Error("[FeedProcessor] %v", err) + } else { + entry.Content = content + } } } @@ -53,6 +60,6 @@ func (f *FeedProcessor) Process() { } // NewFeedProcessor returns a new FeedProcessor. -func NewFeedProcessor(feed *model.Feed) *FeedProcessor { - return &FeedProcessor{feed: feed, crawler: false} +func NewFeedProcessor(userID int64, store *storage.Storage, feed *model.Feed) *FeedProcessor { + return &FeedProcessor{userID: userID, store: store, feed: feed, crawler: false} } |