aboutsummaryrefslogtreecommitdiffhomepage
path: root/reader/feed
diff options
context:
space:
mode:
authorGravatar Frédéric Guillot <fred@miniflux.net>2017-12-12 19:19:36 -0800
committerGravatar Frédéric Guillot <fred@miniflux.net>2017-12-12 19:19:36 -0800
commitef097f02fe76572d2b1887c28da3f2bd83a993a0 (patch)
tree285dd219fab64119728e03b421ab230392ac5e3e /reader/feed
parent33445e5b681bbdffaf0925ed020ecdcc49687f15 (diff)
Add the possibility to enable crawler for feeds
Diffstat (limited to 'reader/feed')
-rw-r--r--reader/feed/handler.go5
1 files changed, 4 insertions, 1 deletions
diff --git a/reader/feed/handler.go b/reader/feed/handler.go
index 7a98613..6b94627 100644
--- a/reader/feed/handler.go
+++ b/reader/feed/handler.go
@@ -33,7 +33,7 @@ type Handler struct {
}
// CreateFeed fetch, parse and store a new feed.
-func (h *Handler) CreateFeed(userID, categoryID int64, url string) (*model.Feed, error) {
+func (h *Handler) CreateFeed(userID, categoryID int64, url string, crawler bool) (*model.Feed, error) {
defer helper.ExecutionTime(time.Now(), fmt.Sprintf("[Handler:CreateFeed] feedUrl=%s", url))
if !h.store.CategoryExists(userID, categoryID) {
@@ -65,6 +65,7 @@ func (h *Handler) CreateFeed(userID, categoryID int64, url string) (*model.Feed,
}
feedProcessor := processor.NewFeedProcessor(subscription)
+ feedProcessor.WithCrawler(crawler)
feedProcessor.Process()
subscription.Category = &model.Category{ID: categoryID}
@@ -72,6 +73,7 @@ func (h *Handler) CreateFeed(userID, categoryID int64, url string) (*model.Feed,
subscription.LastModifiedHeader = response.LastModified
subscription.FeedURL = response.EffectiveURL
subscription.UserID = userID
+ subscription.Crawler = crawler
err = h.store.CreateFeed(subscription)
if err != nil {
@@ -143,6 +145,7 @@ func (h *Handler) RefreshFeed(userID, feedID int64) error {
feedProcessor := processor.NewFeedProcessor(subscription)
feedProcessor.WithScraperRules(originalFeed.ScraperRules)
feedProcessor.WithRewriteRules(originalFeed.RewriteRules)
+ feedProcessor.WithCrawler(originalFeed.Crawler)
feedProcessor.Process()
originalFeed.EtagHeader = response.ETag