aboutsummaryrefslogtreecommitdiffhomepage
path: root/reader/processor
diff options
context:
space:
mode:
authorGravatar Frédéric Guillot <fred@miniflux.net>2018-10-14 22:33:19 -0700
committerGravatar Frédéric Guillot <fred@miniflux.net>2018-10-14 22:33:19 -0700
commitb8f874a37d5ce57fb139e857b5cbd2276da46714 (patch)
treeb3754ad08399590d2a2cc2ad780792c534ecc431 /reader/processor
parent234b3710d4d3bc3b5cb5e56feaf74ceabfb9ef41 (diff)
Simplify feed entries filtering
- Rename processor package to filter - Remove boilerplate code
Diffstat (limited to 'reader/processor')
-rw-r--r--reader/processor/doc.go10
-rw-r--r--reader/processor/processor.go71
2 files changed, 0 insertions, 81 deletions
diff --git a/reader/processor/doc.go b/reader/processor/doc.go
deleted file mode 100644
index f0e7fd4..0000000
--- a/reader/processor/doc.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright 2018 Frédéric Guillot. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-/*
-
-Package processor handles the logic to manipulate feed contents.
-
-*/
-package processor // import "miniflux.app/reader/processor"
diff --git a/reader/processor/processor.go b/reader/processor/processor.go
deleted file mode 100644
index f57e6cd..0000000
--- a/reader/processor/processor.go
+++ /dev/null
@@ -1,71 +0,0 @@
-// Copyright 2017 Frédéric Guillot. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-package processor // import "miniflux.app/reader/processor"
-
-import (
- "miniflux.app/logger"
- "miniflux.app/model"
- "miniflux.app/reader/rewrite"
- "miniflux.app/reader/sanitizer"
- "miniflux.app/reader/scraper"
- "miniflux.app/storage"
-)
-
-// FeedProcessor handles the processing of feed contents.
-type FeedProcessor struct {
- userID int64
- store *storage.Storage
- feed *model.Feed
- scraperRules string
- rewriteRules string
- crawler bool
- userAgent string
-}
-
-// WithCrawler enables the crawler.
-func (f *FeedProcessor) WithCrawler(value bool) {
- f.crawler = value
-}
-
-// WithScraperRules adds scraper rules to the processing.
-func (f *FeedProcessor) WithScraperRules(rules string) {
- f.scraperRules = rules
-}
-
-// WithUserAgent sets the User-Agent header for fetching article content.
-func (f *FeedProcessor) WithUserAgent(userAgent string) {
- f.userAgent = userAgent
-}
-
-// WithRewriteRules adds rewrite rules to the processing.
-func (f *FeedProcessor) WithRewriteRules(rules string) {
- f.rewriteRules = rules
-}
-
-// Process applies rewrite and scraper rules.
-func (f *FeedProcessor) Process() {
- for _, entry := range f.feed.Entries {
- if f.crawler {
- if f.store.EntryURLExists(f.userID, entry.URL) {
- logger.Debug(`[FeedProcessor] Do not crawl existing entry URL: "%s"`, entry.URL)
- } else {
- content, err := scraper.Fetch(entry.URL, f.scraperRules, f.userAgent)
- if err != nil {
- logger.Error("[FeedProcessor] %v", err)
- } else {
- entry.Content = content
- }
- }
- }
-
- entry.Content = rewrite.Rewriter(entry.URL, entry.Content, f.rewriteRules)
- entry.Content = sanitizer.Sanitize(entry.URL, entry.Content)
- }
-}
-
-// NewFeedProcessor returns a new FeedProcessor.
-func NewFeedProcessor(userID int64, store *storage.Storage, feed *model.Feed) *FeedProcessor {
- return &FeedProcessor{userID: userID, store: store, feed: feed, crawler: false}
-}