From ef097f02fe76572d2b1887c28da3f2bd83a993a0 Mon Sep 17 00:00:00 2001 From: Frédéric Guillot Date: Tue, 12 Dec 2017 19:19:36 -0800 Subject: Add the possibility to enable crawler for feeds --- reader/scraper/scraper.go | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) (limited to 'reader/scraper') diff --git a/reader/scraper/scraper.go b/reader/scraper/scraper.go index b79a088..e799ad0 100644 --- a/reader/scraper/scraper.go +++ b/reader/scraper/scraper.go @@ -13,7 +13,6 @@ import ( "github.com/PuerkitoBio/goquery" "github.com/miniflux/miniflux2/http" "github.com/miniflux/miniflux2/reader/readability" - "github.com/miniflux/miniflux2/reader/sanitizer" "github.com/miniflux/miniflux2/url" ) @@ -34,11 +33,11 @@ func Fetch(websiteURL, rules string) (string, error) { return "", err } - var content string if rules == "" { rules = getPredefinedScraperRules(websiteURL) } + var content string if rules != "" { log.Printf(`[Scraper] Using rules "%s" for "%s"`, rules, websiteURL) content, err = scrapContent(page, rules) @@ -51,7 +50,7 @@ func Fetch(websiteURL, rules string) (string, error) { return "", err } - return sanitizer.Sanitize(websiteURL, content), nil + return content, nil } func scrapContent(page io.Reader, rules string) (string, error) { -- cgit v1.2.3