aboutsummaryrefslogtreecommitdiffhomepage
path: root/reader
diff options
context:
space:
mode:
authorGravatar Frédéric Guillot <fred@miniflux.net>2018-01-19 18:43:27 -0800
committerGravatar Frédéric Guillot <fred@miniflux.net>2018-01-20 13:25:20 -0800
commit3b62f904d6d3fdad168d3b212ff7c465c01b50f4 (patch)
tree51c9654701e676f1a1e758025cf291a1276819cd /reader
parent09785df07f4b954698aa97be39c8cd9fdf7959fc (diff)
Do not crawl existing entry URLs
Diffstat (limited to 'reader')
-rw-r--r--reader/feed/handler.go4
-rw-r--r--reader/processor/processor.go19
2 files changed, 15 insertions, 8 deletions
diff --git a/reader/feed/handler.go b/reader/feed/handler.go
index 8b5658e..c1f42ae 100644
--- a/reader/feed/handler.go
+++ b/reader/feed/handler.go
@@ -70,7 +70,7 @@ func (h *Handler) CreateFeed(userID, categoryID int64, url string, crawler bool)
return nil, err
}
- feedProcessor := processor.NewFeedProcessor(subscription)
+ feedProcessor := processor.NewFeedProcessor(userID, h.store, subscription)
feedProcessor.WithCrawler(crawler)
feedProcessor.Process()
@@ -162,7 +162,7 @@ func (h *Handler) RefreshFeed(userID, feedID int64) error {
return err
}
- feedProcessor := processor.NewFeedProcessor(subscription)
+ feedProcessor := processor.NewFeedProcessor(userID, h.store, subscription)
feedProcessor.WithScraperRules(originalFeed.ScraperRules)
feedProcessor.WithRewriteRules(originalFeed.RewriteRules)
feedProcessor.WithCrawler(originalFeed.Crawler)
diff --git a/reader/processor/processor.go b/reader/processor/processor.go
index 33aa8ed..ca04603 100644
--- a/reader/processor/processor.go
+++ b/reader/processor/processor.go
@@ -10,10 +10,13 @@ import (
"github.com/miniflux/miniflux/reader/rewrite"
"github.com/miniflux/miniflux/reader/sanitizer"
"github.com/miniflux/miniflux/reader/scraper"
+ "github.com/miniflux/miniflux/storage"
)
// FeedProcessor handles the processing of feed contents.
type FeedProcessor struct {
+ userID int64
+ store *storage.Storage
feed *model.Feed
scraperRules string
rewriteRules string
@@ -39,11 +42,15 @@ func (f *FeedProcessor) WithRewriteRules(rules string) {
func (f *FeedProcessor) Process() {
for _, entry := range f.feed.Entries {
if f.crawler {
- content, err := scraper.Fetch(entry.URL, f.scraperRules)
- if err != nil {
- logger.Error("[FeedProcessor] %v", err)
+ if f.store.EntryURLExists(f.userID, entry.URL) {
+ logger.Debug(`[FeedProcessor] Do not crawl existing entry URL: "%s"`, entry.URL)
} else {
- entry.Content = content
+ content, err := scraper.Fetch(entry.URL, f.scraperRules)
+ if err != nil {
+ logger.Error("[FeedProcessor] %v", err)
+ } else {
+ entry.Content = content
+ }
}
}
@@ -53,6 +60,6 @@ func (f *FeedProcessor) Process() {
}
// NewFeedProcessor returns a new FeedProcessor.
-func NewFeedProcessor(feed *model.Feed) *FeedProcessor {
- return &FeedProcessor{feed: feed, crawler: false}
+func NewFeedProcessor(userID int64, store *storage.Storage, feed *model.Feed) *FeedProcessor {
+ return &FeedProcessor{userID: userID, store: store, feed: feed, crawler: false}
}