aboutsummaryrefslogtreecommitdiff
path: root/reader/processor/processor.go
diff options
context:
space:
mode:
authorGravatar Frédéric Guillot <fred@miniflux.net> 2019-02-28 20:43:33 -0800
committerGravatar Frédéric Guillot <fred@miniflux.net> 2019-02-28 20:43:33 -0800
commitf3fc8b70721524867a3dac7d2bcb548b830c282a (patch)
treed82d6a29e09a5fc66df5f873e9a250b9267de9c7 /reader/processor/processor.go
parent1634e267b8fe33d2f74454fb43c471d224642a89 (diff)
downloadv2-f3fc8b70721524867a3dac7d2bcb548b830c282a.tar.gz
v2-f3fc8b70721524867a3dac7d2bcb548b830c282a.tar.zst
v2-f3fc8b70721524867a3dac7d2bcb548b830c282a.zip
Use feed ID instead of user ID to check entry URLs presence
Diffstat (limited to 'reader/processor/processor.go')
-rw-r--r--reader/processor/processor.go2
1 files changed, 1 insertions, 1 deletions
diff --git a/reader/processor/processor.go b/reader/processor/processor.go
index faceccc4..5fab4427 100644
--- a/reader/processor/processor.go
+++ b/reader/processor/processor.go
@@ -17,7 +17,7 @@ import (
func ProcessFeedEntries(store *storage.Storage, feed *model.Feed) {
for _, entry := range feed.Entries {
if feed.Crawler {
- if !store.EntryURLExists(feed.UserID, entry.URL) {
+ if !store.EntryURLExists(feed.ID, entry.URL) {
content, err := scraper.Fetch(entry.URL, feed.ScraperRules, feed.UserAgent)
if err != nil {
logger.Error(`[Filter] Unable to crawl this entry: %q => %v`, entry.URL, err)