From 3b62f904d6d3fdad168d3b212ff7c465c01b50f4 Mon Sep 17 00:00:00 2001 From: Frédéric Guillot Date: Fri, 19 Jan 2018 18:43:27 -0800 Subject: Do not crawl existing entry URLs --- reader/feed/handler.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'reader/feed/handler.go') diff --git a/reader/feed/handler.go b/reader/feed/handler.go index 8b5658e..c1f42ae 100644 --- a/reader/feed/handler.go +++ b/reader/feed/handler.go @@ -70,7 +70,7 @@ func (h *Handler) CreateFeed(userID, categoryID int64, url string, crawler bool) return nil, err } - feedProcessor := processor.NewFeedProcessor(subscription) + feedProcessor := processor.NewFeedProcessor(userID, h.store, subscription) feedProcessor.WithCrawler(crawler) feedProcessor.Process() @@ -162,7 +162,7 @@ func (h *Handler) RefreshFeed(userID, feedID int64) error { return err } - feedProcessor := processor.NewFeedProcessor(subscription) + feedProcessor := processor.NewFeedProcessor(userID, h.store, subscription) feedProcessor.WithScraperRules(originalFeed.ScraperRules) feedProcessor.WithRewriteRules(originalFeed.RewriteRules) feedProcessor.WithCrawler(originalFeed.Crawler) -- cgit v1.2.3