aboutsummaryrefslogtreecommitdiffhomepage
path: root/storage
diff options
context:
space:
mode:
authorGravatar Patrick <pmarschik@users.noreply.github.com>2018-09-20 03:19:24 +0200
committerGravatar Frédéric Guillot <fred@miniflux.net>2018-09-19 18:19:24 -0700
commit2538eea1776e1d03d33465ad2001512caca93937 (patch)
tree021c417d5f31606fe50d7945872c4cda6cdc2060 /storage
parent1d335390c2ac7c0feeb94fce89eefdae9a1e7c17 (diff)
Add the possibility to override default user agent for each feed
Diffstat (limited to 'storage')
-rw-r--r--storage/entry_query_builder.go3
-rw-r--r--storage/feed.go18
2 files changed, 13 insertions, 8 deletions
diff --git a/storage/entry_query_builder.go b/storage/entry_query_builder.go
index 18cf735..192f515 100644
--- a/storage/entry_query_builder.go
+++ b/storage/entry_query_builder.go
@@ -192,7 +192,7 @@ func (e *EntryQueryBuilder) GetEntries() (model.Entries, error) {
e.id, e.user_id, e.feed_id, e.hash, e.published_at at time zone u.timezone, e.title,
e.url, e.comments_url, e.author, e.content, e.status, e.starred,
f.title as feed_title, f.feed_url, f.site_url, f.checked_at,
- f.category_id, c.title as category_title, f.scraper_rules, f.rewrite_rules, f.crawler,
+ f.category_id, c.title as category_title, f.scraper_rules, f.rewrite_rules, f.crawler, f.user_agent,
fi.icon_id,
u.timezone
FROM entries e
@@ -247,6 +247,7 @@ func (e *EntryQueryBuilder) GetEntries() (model.Entries, error) {
&entry.Feed.ScraperRules,
&entry.Feed.RewriteRules,
&entry.Feed.Crawler,
+ &entry.Feed.UserAgent,
&iconID,
&tz,
)
diff --git a/storage/feed.go b/storage/feed.go
index be312ac..c60c11c 100644
--- a/storage/feed.go
+++ b/storage/feed.go
@@ -66,7 +66,7 @@ func (s *Storage) Feeds(userID int64) (model.Feeds, error) {
f.id, f.feed_url, f.site_url, f.title, f.etag_header, f.last_modified_header,
f.user_id, f.checked_at at time zone u.timezone,
f.parsing_error_count, f.parsing_error_msg,
- f.scraper_rules, f.rewrite_rules, f.crawler,
+ f.scraper_rules, f.rewrite_rules, f.crawler, f.user_agent,
f.username, f.password,
f.category_id, c.title as category_title,
fi.icon_id,
@@ -104,6 +104,7 @@ func (s *Storage) Feeds(userID int64) (model.Feeds, error) {
&feed.ScraperRules,
&feed.RewriteRules,
&feed.Crawler,
+ &feed.UserAgent,
&feed.Username,
&feed.Password,
&feed.Category.ID,
@@ -141,7 +142,7 @@ func (s *Storage) FeedByID(userID, feedID int64) (*model.Feed, error) {
f.id, f.feed_url, f.site_url, f.title, f.etag_header, f.last_modified_header,
f.user_id, f.checked_at at time zone u.timezone,
f.parsing_error_count, f.parsing_error_msg,
- f.scraper_rules, f.rewrite_rules, f.crawler,
+ f.scraper_rules, f.rewrite_rules, f.crawler, f.user_agent,
f.username, f.password,
f.category_id, c.title as category_title,
fi.icon_id,
@@ -166,6 +167,7 @@ func (s *Storage) FeedByID(userID, feedID int64) (*model.Feed, error) {
&feed.ScraperRules,
&feed.RewriteRules,
&feed.Crawler,
+ &feed.UserAgent,
&feed.Username,
&feed.Password,
&feed.Category.ID,
@@ -194,8 +196,8 @@ func (s *Storage) CreateFeed(feed *model.Feed) error {
defer timer.ExecutionTime(time.Now(), fmt.Sprintf("[Storage:CreateFeed] feedURL=%s", feed.FeedURL))
sql := `
INSERT INTO feeds
- (feed_url, site_url, title, category_id, user_id, etag_header, last_modified_header, crawler, username, password)
- VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
+ (feed_url, site_url, title, category_id, user_id, etag_header, last_modified_header, crawler, user_agent, username, password)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
RETURNING id
`
@@ -209,6 +211,7 @@ func (s *Storage) CreateFeed(feed *model.Feed) error {
feed.EtagHeader,
feed.LastModifiedHeader,
feed.Crawler,
+ feed.UserAgent,
feed.Username,
feed.Password,
).Scan(&feed.ID)
@@ -234,9 +237,9 @@ func (s *Storage) UpdateFeed(feed *model.Feed) (err error) {
query := `UPDATE feeds SET
feed_url=$1, site_url=$2, title=$3, category_id=$4, etag_header=$5, last_modified_header=$6, checked_at=$7,
- parsing_error_msg=$8, parsing_error_count=$9, scraper_rules=$10, rewrite_rules=$11, crawler=$12,
- username=$13, password=$14
- WHERE id=$15 AND user_id=$16`
+ parsing_error_msg=$8, parsing_error_count=$9, scraper_rules=$10, rewrite_rules=$11, crawler=$12, user_agent=$13,
+ username=$14, password=$15
+ WHERE id=$16 AND user_id=$17`
_, err = s.db.Exec(query,
feed.FeedURL,
@@ -251,6 +254,7 @@ func (s *Storage) UpdateFeed(feed *model.Feed) (err error) {
feed.ScraperRules,
feed.RewriteRules,
feed.Crawler,
+ feed.UserAgent,
feed.Username,
feed.Password,
feed.ID,