EZTV and Torznab support
EZTV is fully supported afaik and can be accessed to /feed/eztv Torznab is supported on display, miss t=caps route and can be accessed to /feed/torznab
Cette révision appartient à :
Parent
84aab574f1
révision
c1270e549a
3 fichiers modifiés avec 387 ajouts et 56 suppressions
|
@ -57,6 +57,8 @@ func init() {
|
||||||
Router.HandleFunc("/activities", ActivityListHandler).Name("activity_list")
|
Router.HandleFunc("/activities", ActivityListHandler).Name("activity_list")
|
||||||
Router.HandleFunc("/feed", RSSHandler).Name("feed")
|
Router.HandleFunc("/feed", RSSHandler).Name("feed")
|
||||||
Router.HandleFunc("/feed/{page}", RSSHandler).Name("feed_page")
|
Router.HandleFunc("/feed/{page}", RSSHandler).Name("feed_page")
|
||||||
|
Router.HandleFunc("/feed/torznab", RSSTorznabHandler).Name("feed_torznab")
|
||||||
|
Router.HandleFunc("/feed/eztv", RSSEztvHandler).Name("feed_eztv")
|
||||||
|
|
||||||
// !!! This line need to have the same download location as the one define in config.TorrentStorageLink !!!
|
// !!! This line need to have the same download location as the one define in config.TorrentStorageLink !!!
|
||||||
Router.Handle("/download/{hash}", wrapHandler(downloadTorrentHandler)).Name("torrent_download")
|
Router.Handle("/download/{hash}", wrapHandler(downloadTorrentHandler)).Name("torrent_download")
|
||||||
|
|
|
@ -1,13 +1,16 @@
|
||||||
package router
|
package router
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"errors"
|
||||||
"html"
|
"html"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strconv"
|
"strconv"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/NyaaPantsu/nyaa/config"
|
"github.com/NyaaPantsu/nyaa/config"
|
||||||
|
"github.com/NyaaPantsu/nyaa/model"
|
||||||
userService "github.com/NyaaPantsu/nyaa/service/user"
|
userService "github.com/NyaaPantsu/nyaa/service/user"
|
||||||
|
"github.com/NyaaPantsu/nyaa/util/feeds"
|
||||||
"github.com/NyaaPantsu/nyaa/util/search"
|
"github.com/NyaaPantsu/nyaa/util/search"
|
||||||
"github.com/gorilla/feeds"
|
"github.com/gorilla/feeds"
|
||||||
"github.com/gorilla/mux"
|
"github.com/gorilla/mux"
|
||||||
|
@ -16,80 +19,33 @@ import (
|
||||||
// RSSHandler : Controller for displaying rss feed, accepting common search arguments
|
// RSSHandler : Controller for displaying rss feed, accepting common search arguments
|
||||||
func RSSHandler(w http.ResponseWriter, r *http.Request) {
|
func RSSHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
defer r.Body.Close()
|
defer r.Body.Close()
|
||||||
vars := mux.Vars(r)
|
|
||||||
page := vars["page"]
|
|
||||||
userID := vars["id"]
|
|
||||||
|
|
||||||
offset := r.URL.Query().Get("offset")
|
// We only get the basic variable for rss based on search param
|
||||||
var err error
|
torrents, createdAsTime, title, err := getTorrentList(r)
|
||||||
pagenum := 1
|
|
||||||
if page == "" && offset != "" {
|
|
||||||
page = offset
|
|
||||||
}
|
|
||||||
if page != "" {
|
|
||||||
pagenum, err = strconv.Atoi(html.EscapeString(page))
|
|
||||||
if err != nil {
|
|
||||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if pagenum <= 0 {
|
|
||||||
NotFoundHandler(w, r)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if userID != "" {
|
|
||||||
userIDnum, err := strconv.Atoi(html.EscapeString(userID))
|
|
||||||
// Should we have a feed for anonymous uploads?
|
|
||||||
if err != nil || userIDnum == 0 {
|
|
||||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
_, _, err = userService.RetrieveUserForAdmin(userID)
|
|
||||||
if err != nil {
|
|
||||||
http.Error(w, "", http.StatusNotFound)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set the user ID on the request, so that SearchByQuery finds it.
|
|
||||||
query := r.URL.Query()
|
|
||||||
query.Set("userID", userID)
|
|
||||||
r.URL.RawQuery = query.Encode()
|
|
||||||
}
|
|
||||||
|
|
||||||
_, torrents, err := search.SearchByQueryNoCount(r, pagenum)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
createdAsTime := time.Now()
|
|
||||||
|
|
||||||
if len(torrents) > 0 {
|
feed := &nyaafeeds.RssFeed{
|
||||||
createdAsTime = torrents[0].Date
|
|
||||||
}
|
|
||||||
title := "Nyaa Pantsu"
|
|
||||||
if config.IsSukebei() {
|
|
||||||
title = "Sukebei Pantsu"
|
|
||||||
}
|
|
||||||
feed := &feeds.RssFeed{
|
|
||||||
Title: title,
|
Title: title,
|
||||||
Link: config.WebAddress() + "/",
|
Link: config.WebAddress() + "/",
|
||||||
PubDate: createdAsTime.String(),
|
PubDate: createdAsTime.String(),
|
||||||
}
|
}
|
||||||
feed.Items = make([]*feeds.RssItem, len(torrents))
|
feed.Items = make([]*nyaafeeds.RssItem, len(torrents))
|
||||||
|
|
||||||
for i, torrent := range torrents {
|
for i, torrent := range torrents {
|
||||||
torrentJSON := torrent.ToJSON()
|
torrentJSON := torrent.ToJSON()
|
||||||
feed.Items[i] = &feeds.RssItem{
|
feed.Items[i] = &nyaafeeds.RssItem{
|
||||||
Title: torrentJSON.Name,
|
Title: torrentJSON.Name,
|
||||||
Link: config.WebAddress() + "/view/" + strconv.FormatUint(uint64(torrentJSON.ID), 10),
|
Link: config.WebAddress() + "/view/" + strconv.FormatUint(uint64(torrentJSON.ID), 10),
|
||||||
Description: string(torrentJSON.Description),
|
Description: string(torrentJSON.Description),
|
||||||
Author: config.WebAddress() + "/view/" + strconv.FormatUint(uint64(torrentJSON.ID), 10),
|
Author: config.WebAddress() + "/view/" + strconv.FormatUint(uint64(torrentJSON.ID), 10),
|
||||||
PubDate: torrent.Date.String(),
|
PubDate: torrent.Date.String(),
|
||||||
Guid: config.WebAddress() + "/download/" + torrentJSON.Hash,
|
GUID: config.WebAddress() + "/download/" + torrentJSON.Hash,
|
||||||
Enclosure: &feeds.RssEnclosure{
|
Enclosure: &nyaafeeds.RssEnclosure{
|
||||||
Url: config.WebAddress() + "/download/" + torrentJSON.Hash,
|
URL: config.WebAddress() + "/download/" + torrentJSON.Hash,
|
||||||
Length: strconv.FormatUint(uint64(torrentJSON.Filesize), 10),
|
Length: strconv.FormatUint(uint64(torrentJSON.Filesize), 10),
|
||||||
Type: "application/x-bittorrent",
|
Type: "application/x-bittorrent",
|
||||||
},
|
},
|
||||||
|
@ -107,3 +63,177 @@ func RSSHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
http.Error(w, writeErr.Error(), http.StatusInternalServerError)
|
http.Error(w, writeErr.Error(), http.StatusInternalServerError)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// RSSEztvHandler : Controller for displaying rss feed, accepting common search arguments
|
||||||
|
func RSSEztvHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
defer r.Body.Close()
|
||||||
|
|
||||||
|
// We only get the basic variable for rss based on search param
|
||||||
|
torrents, createdAsTime, title, err := getTorrentList(r)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
feed := &nyaafeeds.RssFeed{
|
||||||
|
Title: title,
|
||||||
|
Link: config.WebAddress() + "/",
|
||||||
|
PubDate: createdAsTime.String(),
|
||||||
|
}
|
||||||
|
feed.Items = make([]*nyaafeeds.RssItem, len(torrents))
|
||||||
|
|
||||||
|
for i, torrent := range torrents {
|
||||||
|
torrentJSON := torrent.ToJSON()
|
||||||
|
feed.Items[i] = &nyaafeeds.RssItem{
|
||||||
|
Title: torrentJSON.Name,
|
||||||
|
Link: config.WebAddress() + "/download/" + torrentJSON.Hash,
|
||||||
|
Category: &nyaafeeds.RssCategory{
|
||||||
|
Domain: config.WebAddress() + "/search?c=" + torrentJSON.Category + "_" + torrentJSON.SubCategory,
|
||||||
|
},
|
||||||
|
Description: string(torrentJSON.Description),
|
||||||
|
Comments: config.WebAddress() + "/view/" + strconv.FormatUint(uint64(torrentJSON.ID), 10),
|
||||||
|
PubDate: torrent.Date.String(),
|
||||||
|
GUID: config.WebAddress() + "/view/" + strconv.FormatUint(uint64(torrentJSON.ID), 10),
|
||||||
|
Enclosure: &nyaafeeds.RssEnclosure{
|
||||||
|
URL: config.WebAddress() + "/download/" + torrentJSON.Hash,
|
||||||
|
Length: strconv.FormatUint(uint64(torrentJSON.Filesize), 10),
|
||||||
|
Type: "application/x-bittorrent",
|
||||||
|
},
|
||||||
|
Torrent: &nyaafeeds.RssTorrent{
|
||||||
|
Xmlns: "http://xmlns.ezrss.it/0.1/",
|
||||||
|
FileName: torrentJSON.Name + ".torrent",
|
||||||
|
ContentLength: strconv.FormatUint(uint64(torrentJSON.Filesize), 10),
|
||||||
|
InfoHash: torrentJSON.Hash,
|
||||||
|
MagnetURI: string(torrentJSON.Magnet),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// allow cross domain AJAX requests
|
||||||
|
w.Header().Set("Access-Control-Allow-Origin", "*")
|
||||||
|
rss, rssErr := feeds.ToXML(feed)
|
||||||
|
if rssErr != nil {
|
||||||
|
http.Error(w, rssErr.Error(), http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
|
||||||
|
_, writeErr := w.Write([]byte(rss))
|
||||||
|
if writeErr != nil {
|
||||||
|
http.Error(w, writeErr.Error(), http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// RSSEztvHandler : Controller for displaying rss feed, accepting common search arguments
|
||||||
|
func RSSTorznabHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
defer r.Body.Close()
|
||||||
|
|
||||||
|
// We only get the basic variable for rss based on search param
|
||||||
|
torrents, createdAsTime, title, err := getTorrentList(r)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
feed := &nyaafeeds.RssFeed{
|
||||||
|
Title: title,
|
||||||
|
Link: config.WebAddress() + "/",
|
||||||
|
PubDate: createdAsTime.String(),
|
||||||
|
}
|
||||||
|
feed.Items = make([]*nyaafeeds.RssItem, len(torrents))
|
||||||
|
|
||||||
|
for i, torrent := range torrents {
|
||||||
|
torrentJSON := torrent.ToJSON()
|
||||||
|
feed.Items[i] = &nyaafeeds.RssItem{
|
||||||
|
Title: torrentJSON.Name,
|
||||||
|
Link: config.WebAddress() + "/download/" + torrentJSON.Hash,
|
||||||
|
Category: &nyaafeeds.RssCategory{
|
||||||
|
Domain: config.WebAddress() + "/search?c=" + torrentJSON.Category + "_" + torrentJSON.SubCategory,
|
||||||
|
},
|
||||||
|
Description: string(torrentJSON.Description),
|
||||||
|
Comments: config.WebAddress() + "/view/" + strconv.FormatUint(uint64(torrentJSON.ID), 10),
|
||||||
|
PubDate: torrent.Date.String(),
|
||||||
|
GUID: config.WebAddress() + "/view/" + strconv.FormatUint(uint64(torrentJSON.ID), 10),
|
||||||
|
Enclosure: &nyaafeeds.RssEnclosure{
|
||||||
|
URL: config.WebAddress() + "/download/" + torrentJSON.Hash,
|
||||||
|
Length: strconv.FormatUint(uint64(torrentJSON.Filesize), 10),
|
||||||
|
Type: "application/x-bittorrent",
|
||||||
|
},
|
||||||
|
Torznab: &nyaafeeds.RssTorznab{
|
||||||
|
Xmlns: "http://torznab.com/schemas/2015/feed",
|
||||||
|
Size: strconv.FormatUint(uint64(torrentJSON.Filesize), 10),
|
||||||
|
Files: strconv.Itoa(len(torrentJSON.FileList)),
|
||||||
|
Grabs: strconv.Itoa(torrentJSON.Downloads),
|
||||||
|
Seeders: strconv.Itoa(int(torrentJSON.Seeders)),
|
||||||
|
Leechers: strconv.Itoa(int(torrentJSON.Leechers)),
|
||||||
|
Infohash: torrentJSON.Hash,
|
||||||
|
MagnetURL: string(torrentJSON.Magnet),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// allow cross domain AJAX requests
|
||||||
|
w.Header().Set("Access-Control-Allow-Origin", "*")
|
||||||
|
rss, rssErr := feeds.ToXML(feed)
|
||||||
|
if rssErr != nil {
|
||||||
|
http.Error(w, rssErr.Error(), http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
|
||||||
|
_, writeErr := w.Write([]byte(rss))
|
||||||
|
if writeErr != nil {
|
||||||
|
http.Error(w, writeErr.Error(), http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func getTorrentList(r *http.Request) (torrents []model.Torrent, createdAsTime time.Time, title string, err error) {
|
||||||
|
vars := mux.Vars(r)
|
||||||
|
page := vars["page"]
|
||||||
|
userID := vars["id"]
|
||||||
|
|
||||||
|
offset := r.URL.Query().Get("offset")
|
||||||
|
pagenum := 1
|
||||||
|
if page == "" && offset != "" {
|
||||||
|
page = offset
|
||||||
|
}
|
||||||
|
if page != "" {
|
||||||
|
pagenum, err = strconv.Atoi(html.EscapeString(page))
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if pagenum <= 0 {
|
||||||
|
err = errors.New("Page number is invalid")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if userID != "" {
|
||||||
|
userIDnum := 0
|
||||||
|
userIDnum, err = strconv.Atoi(html.EscapeString(userID))
|
||||||
|
// Should we have a feed for anonymous uploads?
|
||||||
|
if err != nil || userIDnum == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
_, _, err = userService.RetrieveUserForAdmin(userID)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// Set the user ID on the request, so that SearchByQuery finds it.
|
||||||
|
query := r.URL.Query()
|
||||||
|
query.Set("userID", userID)
|
||||||
|
r.URL.RawQuery = query.Encode()
|
||||||
|
}
|
||||||
|
|
||||||
|
_, torrents, err = search.SearchByQueryNoCount(r, pagenum)
|
||||||
|
|
||||||
|
createdAsTime = time.Now()
|
||||||
|
|
||||||
|
if len(torrents) > 0 {
|
||||||
|
createdAsTime = torrents[0].Date
|
||||||
|
}
|
||||||
|
|
||||||
|
title = "Nyaa Pantsu"
|
||||||
|
if config.IsSukebei() {
|
||||||
|
title = "Sukebei Pantsu"
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
199
util/feeds/rss.go
Fichier normal
199
util/feeds/rss.go
Fichier normal
|
@ -0,0 +1,199 @@
|
||||||
|
package nyaafeeds
|
||||||
|
|
||||||
|
// rss support
|
||||||
|
// validation done according to spec here:
|
||||||
|
// http://cyber.law.harvard.edu/rss/rss.html
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/xml"
|
||||||
|
"fmt"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/gorilla/feeds"
|
||||||
|
)
|
||||||
|
|
||||||
|
// private wrapper around the RssFeed which gives us the <rss>..</rss> xml
|
||||||
|
type rssFeedXML struct {
|
||||||
|
XMLName xml.Name `xml:"rss"`
|
||||||
|
Version string `xml:"version,attr"`
|
||||||
|
Channel *RssFeed
|
||||||
|
}
|
||||||
|
|
||||||
|
type RssImage struct {
|
||||||
|
XMLName xml.Name `xml:"image"`
|
||||||
|
URL string `xml:"url"`
|
||||||
|
Title string `xml:"title"`
|
||||||
|
Link string `xml:"link"`
|
||||||
|
Width int `xml:"width,omitempty"`
|
||||||
|
Height int `xml:"height,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type RssTextInput struct {
|
||||||
|
XMLName xml.Name `xml:"textInput"`
|
||||||
|
Title string `xml:"title"`
|
||||||
|
Description string `xml:"description"`
|
||||||
|
Name string `xml:"name"`
|
||||||
|
Link string `xml:"link"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type RssFeed struct {
|
||||||
|
XMLName xml.Name `xml:"channel"`
|
||||||
|
Title string `xml:"title"` // required
|
||||||
|
Link string `xml:"link"` // required
|
||||||
|
Description string `xml:"description"` // required
|
||||||
|
Language string `xml:"language,omitempty"`
|
||||||
|
Copyright string `xml:"copyright,omitempty"`
|
||||||
|
ManagingEditor string `xml:"managingEditor,omitempty"` // Author used
|
||||||
|
WebMaster string `xml:"webMaster,omitempty"`
|
||||||
|
PubDate string `xml:"pubDate,omitempty"` // created or updated
|
||||||
|
LastBuildDate string `xml:"lastBuildDate,omitempty"` // updated used
|
||||||
|
Category string `xml:"category,omitempty"`
|
||||||
|
Generator string `xml:"generator,omitempty"`
|
||||||
|
Docs string `xml:"docs,omitempty"`
|
||||||
|
Cloud string `xml:"cloud,omitempty"`
|
||||||
|
TTL int `xml:"ttl,omitempty"`
|
||||||
|
Rating string `xml:"rating,omitempty"`
|
||||||
|
SkipHours string `xml:"skipHours,omitempty"`
|
||||||
|
SkipDays string `xml:"skipDays,omitempty"`
|
||||||
|
Image *RssImage
|
||||||
|
TextInput *RssTextInput
|
||||||
|
Items []*RssItem
|
||||||
|
}
|
||||||
|
|
||||||
|
type RssItem struct {
|
||||||
|
XMLName xml.Name `xml:"item"`
|
||||||
|
Title string `xml:"title"` // required
|
||||||
|
Link string `xml:"link"` // required
|
||||||
|
Description string `xml:"description"` // required
|
||||||
|
Author string `xml:"author,omitempty"`
|
||||||
|
Category *RssCategory `xml:"category,omitempty"`
|
||||||
|
Comments string `xml:"comments,omitempty"`
|
||||||
|
Enclosure *RssEnclosure
|
||||||
|
GUID string `xml:"guid,omitempty"` // Id used
|
||||||
|
PubDate string `xml:"pubDate,omitempty"` // created or updated
|
||||||
|
Source string `xml:"source,omitempty"`
|
||||||
|
Torrent *RssTorrent `xml:"torrent,omitempty"`
|
||||||
|
Torznab *RssTorznab `xml:"torznab,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type RssTorrent struct {
|
||||||
|
XMLName xml.Name `xml:"torrent"`
|
||||||
|
Xmlns string `xml:"xmlns,attr"`
|
||||||
|
FileName string `xml:"fileName,omitempty"`
|
||||||
|
ContentLength string `xml:"contentLength,omitempty"`
|
||||||
|
InfoHash string `xml:"infoHash,omitempty"`
|
||||||
|
MagnetURI string `xml:"magnetUri,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type RssTorznab struct {
|
||||||
|
XMLName xml.Name `xml:"torznab"`
|
||||||
|
Xmlns string `xml:"xmlns,attr"`
|
||||||
|
Type string `xml:"type,omitempty"`
|
||||||
|
Size string `xml:"size,omitempty"`
|
||||||
|
Files string `xml:"files,omitempty"`
|
||||||
|
Grabs string `xml:"grabs,omitempty"`
|
||||||
|
Tvdbid string `xml:"tvdbid,omitempty"`
|
||||||
|
Rageid string `xml:"rageid,omitempty"`
|
||||||
|
Tvmazeid string `xml:"tvmazeid,omitempty"`
|
||||||
|
Imdb string `xml:"imdb,omitempty"`
|
||||||
|
BannerURL string `xml:"bannerurl,omitempty"`
|
||||||
|
Infohash string `xml:"infohash,omitempty"`
|
||||||
|
MagnetURL string `xml:"magneturl,omitempty"`
|
||||||
|
Seeders string `xml:"seeders,omitempty"`
|
||||||
|
Leechers string `xml:"leechers,omitempty"`
|
||||||
|
Peers string `xml:"peers,omitempty"`
|
||||||
|
SeedType string `xml:"seedtype,omitempty"`
|
||||||
|
MinimumRatio string `xml:"minimumratio,omitempty"`
|
||||||
|
MinimumSeedTime string `xml:"minimumseedtime,omitempty"`
|
||||||
|
DownloadVolumeFactor string `xml:"downloadvolumefactor,omitempty"`
|
||||||
|
UploadVolumeFactor string `xml:"uploadvolumefactor,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// RssCategory is a category for rss item
|
||||||
|
type RssCategory struct {
|
||||||
|
XMLName xml.Name `xml:"category"`
|
||||||
|
Domain string `xml:"domain"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type RssEnclosure struct {
|
||||||
|
//RSS 2.0 <enclosure url="http://example.com/file.mp3" length="123456789" type="audio/mpeg" />
|
||||||
|
XMLName xml.Name `xml:"enclosure"`
|
||||||
|
URL string `xml:"url,attr"`
|
||||||
|
Length string `xml:"length,attr"`
|
||||||
|
Type string `xml:"type,attr"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Rss struct {
|
||||||
|
*feeds.Feed
|
||||||
|
}
|
||||||
|
|
||||||
|
// create a new RssItem with a generic Item struct's data
|
||||||
|
func newRssItem(i *feeds.Item) *RssItem {
|
||||||
|
item := &RssItem{
|
||||||
|
Title: i.Title,
|
||||||
|
Link: i.Link.Href,
|
||||||
|
Description: i.Description,
|
||||||
|
GUID: i.Id,
|
||||||
|
PubDate: anyTimeFormat(time.RFC1123Z, i.Created, i.Updated),
|
||||||
|
}
|
||||||
|
|
||||||
|
intLength, err := strconv.ParseInt(i.Link.Length, 10, 64)
|
||||||
|
|
||||||
|
if err == nil && (intLength > 0 || i.Link.Type != "") {
|
||||||
|
item.Enclosure = &RssEnclosure{URL: i.Link.Href, Type: i.Link.Type, Length: i.Link.Length}
|
||||||
|
}
|
||||||
|
if i.Author != nil {
|
||||||
|
item.Author = i.Author.Name
|
||||||
|
}
|
||||||
|
return item
|
||||||
|
}
|
||||||
|
|
||||||
|
// returns the first non-zero time formatted as a string or ""
|
||||||
|
func anyTimeFormat(format string, times ...time.Time) string {
|
||||||
|
for _, t := range times {
|
||||||
|
if !t.IsZero() {
|
||||||
|
return t.Format(format)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// RssFeed : create a new RssFeed with a generic Feed struct's data
|
||||||
|
func (r *Rss) RssFeed() *RssFeed {
|
||||||
|
pub := anyTimeFormat(time.RFC1123Z, r.Created, r.Updated)
|
||||||
|
build := anyTimeFormat(time.RFC1123Z, r.Updated)
|
||||||
|
author := ""
|
||||||
|
if r.Author != nil {
|
||||||
|
author = r.Author.Email
|
||||||
|
if len(r.Author.Name) > 0 {
|
||||||
|
author = fmt.Sprintf("%s (%s)", r.Author.Email, r.Author.Name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
channel := &RssFeed{
|
||||||
|
Title: r.Title,
|
||||||
|
Link: r.Link.Href,
|
||||||
|
Description: r.Description,
|
||||||
|
ManagingEditor: author,
|
||||||
|
PubDate: pub,
|
||||||
|
LastBuildDate: build,
|
||||||
|
Copyright: r.Copyright,
|
||||||
|
}
|
||||||
|
for _, i := range r.Items {
|
||||||
|
channel.Items = append(channel.Items, newRssItem(i))
|
||||||
|
}
|
||||||
|
return channel
|
||||||
|
}
|
||||||
|
|
||||||
|
// FeedXml : return an XML-Ready object for an Rss object
|
||||||
|
func (r *Rss) FeedXml() interface{} {
|
||||||
|
// only generate version 2.0 feeds for now
|
||||||
|
return r.RssFeed().FeedXml()
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// FeedXml : return an XML-ready object for an RssFeed object
|
||||||
|
func (r *RssFeed) FeedXml() interface{} {
|
||||||
|
return &rssFeedXML{Version: "2.0", Channel: r}
|
||||||
|
}
|
Référencer dans un nouveau ticket