aboutsummaryrefslogtreecommitdiffhomepage
path: root/reader/scraper
diff options
context:
space:
mode:
authorGravatar Frédéric Guillot <fred@miniflux.net>2017-12-12 19:19:36 -0800
committerGravatar Frédéric Guillot <fred@miniflux.net>2017-12-12 19:19:36 -0800
commitef097f02fe76572d2b1887c28da3f2bd83a993a0 (patch)
tree285dd219fab64119728e03b421ab230392ac5e3e /reader/scraper
parent33445e5b681bbdffaf0925ed020ecdcc49687f15 (diff)
Add the possibility to enable crawler for feeds
Diffstat (limited to 'reader/scraper')
-rw-r--r--reader/scraper/scraper.go5
1 files changed, 2 insertions, 3 deletions
diff --git a/reader/scraper/scraper.go b/reader/scraper/scraper.go
index b79a088..e799ad0 100644
--- a/reader/scraper/scraper.go
+++ b/reader/scraper/scraper.go
@@ -13,7 +13,6 @@ import (
"github.com/PuerkitoBio/goquery"
"github.com/miniflux/miniflux2/http"
"github.com/miniflux/miniflux2/reader/readability"
- "github.com/miniflux/miniflux2/reader/sanitizer"
"github.com/miniflux/miniflux2/url"
)
@@ -34,11 +33,11 @@ func Fetch(websiteURL, rules string) (string, error) {
return "", err
}
- var content string
if rules == "" {
rules = getPredefinedScraperRules(websiteURL)
}
+ var content string
if rules != "" {
log.Printf(`[Scraper] Using rules "%s" for "%s"`, rules, websiteURL)
content, err = scrapContent(page, rules)
@@ -51,7 +50,7 @@ func Fetch(websiteURL, rules string) (string, error) {
return "", err
}
- return sanitizer.Sanitize(websiteURL, content), nil
+ return content, nil
}
func scrapContent(page io.Reader, rules string) (string, error) {