Fetch entries and list them on demand (#28)

* Fetch entries and list them on demand

* Fix bugs

* Bug

* Fix ordering
This commit is contained in:
Janos Dobronszki
2020-12-06 08:58:58 +01:00
committed by GitHub
parent 9a24c74081
commit a4c54cd5c3
5 changed files with 185 additions and 52 deletions

View File

@@ -25,45 +25,52 @@ func (e *Feeds) fetchAll() {
return
}
for _, feed := range fs {
log.Infof("Fetching address %v", feed.Url)
fd, err := rss.Fetch(feed.Url)
err = e.fetch(feed.Url)
if err != nil {
log.Errorf("Error fetching address %v: %v", feed.Url, err)
continue
}
domain := getDomain(feed.Url)
for _, item := range fd.Items {
id := fmt.Sprintf("%x", md5.Sum([]byte(item.ID)))
err = e.entries.Save(feeds.Entry{
Id: id,
Url: item.Link,
Title: item.Title,
Domain: domain,
Content: item.Summary,
Date: item.Date.Unix(),
})
if err != nil {
log.Errorf("Error saving item: %v", err)
}
// @todo make this optional
_, err := e.postsService.Save(context.TODO(), &posts.SaveRequest{
Id: id,
Title: item.Title,
Content: item.Content,
Timestamp: item.Date.Unix(),
Metadata: map[string]string{
"domain": domain,
"link": item.Link,
},
})
if err != nil {
log.Errorf("Error saving post: %v", err)
}
log.Errorf("Error saving post: %v", err)
}
}
}
func (e *Feeds) fetch(url string) error {
log.Infof("Fetching address %v", url)
fd, err := rss.Fetch(url)
if err != nil {
return fmt.Errorf("Error fetching address %v: %v", url, err)
}
domain := getDomain(url)
for _, item := range fd.Items {
id := fmt.Sprintf("%x", md5.Sum([]byte(item.ID)))
err = e.entries.Save(feeds.Entry{
Id: id,
Url: item.Link,
Title: item.Title,
Domain: domain,
Content: item.Summary,
Date: item.Date.Unix(),
})
if err != nil {
return fmt.Errorf("Error saving item: %v", err)
}
// @todo make this optional
_, err := e.postsService.Save(context.TODO(), &posts.SaveRequest{
Id: id,
Title: item.Title,
Content: item.Content,
Timestamp: item.Date.Unix(),
Metadata: map[string]string{
"domain": domain,
"link": item.Link,
},
})
if err != nil {
return err
}
}
return nil
}
func getDomain(address string) string {
uri, _ := url.Parse(address)
return uri.Host

View File

@@ -19,6 +19,7 @@ type Feeds struct {
feedsIdIndex model.Index
feedsNameIndex model.Index
entriesDateIndex model.Index
entriesURLIndex model.Index
}
func NewFeeds(postsService posts.PostsService) *Feeds {
@@ -32,6 +33,10 @@ func NewFeeds(postsService posts.PostsService) *Feeds {
dateIndex := model.ByEquality("date")
dateIndex.Order.Type = model.OrderTypeDesc
entriesURLIndex := model.ByEquality("url")
entriesURLIndex.Order.Type = model.OrderTypeDesc
entriesURLIndex.Order.FieldName = "date"
f := &Feeds{
feeds: model.New(
store.DefaultStore,
@@ -45,7 +50,7 @@ func NewFeeds(postsService posts.PostsService) *Feeds {
entries: model.New(
store.DefaultStore,
"entries",
model.Indexes(dateIndex),
model.Indexes(dateIndex, entriesURLIndex),
&model.ModelOptions{
Debug: false,
},
@@ -54,6 +59,7 @@ func NewFeeds(postsService posts.PostsService) *Feeds {
feedsIdIndex: idIndex,
feedsNameIndex: nameIndex,
entriesDateIndex: dateIndex,
entriesURLIndex: entriesURLIndex,
}
go f.crawl()
@@ -76,3 +82,12 @@ func (e *Feeds) New(ctx context.Context, req *feeds.NewRequest, rsp *feeds.NewRe
})
return nil
}
func (e *Feeds) Entries(ctx context.Context, req *feeds.EntriesRequest, rsp *feeds.EntriesResponse) error {
log.Info("Received Feeds.New request")
err := e.fetch(req.Url)
if err != nil {
return err
}
return e.entries.List(e.entriesURLIndex.ToQuery(req.Url), &rsp.Entries)
}