diff --git a/common/search.go b/common/search.go index c9679e79..eace727f 100644 --- a/common/search.go +++ b/common/search.go @@ -19,6 +19,9 @@ const ( Date Downloads Size + Seeders + Leechers + Completed ) type Category struct { @@ -44,5 +47,6 @@ type SearchParam struct { Page int UserID uint Max uint + NotNull string Query string } diff --git a/main.go b/main.go index 815b7579..8c92a00c 100644 --- a/main.go +++ b/main.go @@ -84,6 +84,7 @@ func RunScraper(conf *config.Config) { signals.RegisterCloser(scraper) // run udp scraper worker for workers > 0 { + log.Infof("starting up worker %d", workers) go scraper.RunWorker(pc) workers-- } diff --git a/router/modpanel.go b/router/modpanel.go index d810282a..834c28d1 100644 --- a/router/modpanel.go +++ b/router/modpanel.go @@ -3,9 +3,7 @@ package router import ( "fmt" "html" - "html/template" "net/http" - "path/filepath" "strconv" "github.com/ewhal/nyaa/db" @@ -23,23 +21,6 @@ import ( "github.com/gorilla/mux" ) -var panelIndex, panelTorrentList, panelUserList, panelCommentList, panelTorrentEd, panelTorrentReportList *template.Template - -func init() { - panelTorrentList = template.Must(template.New("torrentlist").Funcs(FuncMap).ParseFiles(filepath.Join(TemplateDir, "admin_index.html"), filepath.Join(TemplateDir, "admin/torrentlist.html"))) - panelTorrentList = template.Must(panelTorrentList.ParseGlob(filepath.Join(TemplateDir, "_*.html"))) - panelUserList = template.Must(template.New("userlist").Funcs(FuncMap).ParseFiles(filepath.Join(TemplateDir, "admin_index.html"), filepath.Join(TemplateDir, "admin/userlist.html"))) - panelUserList = template.Must(panelUserList.ParseGlob(filepath.Join(TemplateDir, "_*.html"))) - panelCommentList = template.Must(template.New("commentlist").Funcs(FuncMap).ParseFiles(filepath.Join(TemplateDir, "admin_index.html"), filepath.Join(TemplateDir, "admin/commentlist.html"))) - panelCommentList = template.Must(panelCommentList.ParseGlob(filepath.Join(TemplateDir, "_*.html"))) - panelIndex = template.Must(template.New("indexPanel").Funcs(FuncMap).ParseFiles(filepath.Join(TemplateDir, "admin_index.html"), filepath.Join(TemplateDir, "admin/panelindex.html"))) - panelIndex = template.Must(panelIndex.ParseGlob(filepath.Join(TemplateDir, "_*.html"))) - panelTorrentEd = template.Must(template.New("torrent_ed").Funcs(FuncMap).ParseFiles(filepath.Join(TemplateDir, "admin_index.html"), filepath.Join(TemplateDir, "admin/paneltorrentedit.html"))) - panelTorrentEd = template.Must(panelTorrentEd.ParseGlob(filepath.Join(TemplateDir, "_*.html"))) - panelTorrentReportList = template.Must(template.New("torrent_report").Funcs(FuncMap).ParseFiles(filepath.Join(TemplateDir, "admin_index.html"), filepath.Join(TemplateDir, "admin/torrent_report.html"))) - panelTorrentReportList = template.Must(panelTorrentReportList.ParseGlob(filepath.Join(TemplateDir, "_*.html"))) -} - func IndexModPanel(w http.ResponseWriter, r *http.Request) { currentUser := GetUser(r) if userPermission.HasAdmin(currentUser) { @@ -217,19 +198,19 @@ func TorrentPostEditModPanel(w http.ResponseWriter, r *http.Request) { err := form.NewErrors() infos := form.NewInfos() torrent, _ := torrentService.GetTorrentById(id) - if (torrent.ID > 0) { + if torrent.ID > 0 { errUp := uploadForm.ExtractEditInfo(r) if errUp != nil { err["errors"] = append(err["errors"], "Failed to update torrent!") } - if (len(err) == 0) { + if len(err) == 0 { // update some (but not all!) values - torrent.Name = uploadForm.Name - torrent.Category = uploadForm.CategoryID + torrent.Name = uploadForm.Name + torrent.Category = uploadForm.CategoryID torrent.SubCategory = uploadForm.SubCategoryID - torrent.Status = uploadForm.Status + torrent.Status = uploadForm.Status torrent.Description = uploadForm.Description - torrent.Uploader = nil // GORM will create a new user otherwise (wtf?!) + torrent.Uploader = nil // GORM will create a new user otherwise (wtf?!) db.ORM.Save(&torrent) infos["infos"] = append(infos["infos"], "Torrent details updated.") } diff --git a/router/template.go b/router/template.go index edf9ac20..26314ea4 100644 --- a/router/template.go +++ b/router/template.go @@ -9,15 +9,18 @@ var TemplateDir = "templates" var homeTemplate, searchTemplate, faqTemplate, uploadTemplate, viewTemplate, viewRegisterTemplate, viewLoginTemplate, viewRegisterSuccessTemplate, viewVerifySuccessTemplate, viewProfileTemplate, viewProfileEditTemplate, viewUserDeleteTemplate, notFoundTemplate *template.Template +var panelIndex, panelTorrentList, panelUserList, panelCommentList, panelTorrentEd, panelTorrentReportList *template.Template + type templateLoader struct { - templ **template.Template - file string - name string + templ **template.Template + file string + indexFile string + name string } // ReloadTemplates reloads templates on runtime func ReloadTemplates() { - templs := []templateLoader{ + pubTempls := []templateLoader{ templateLoader{ templ: &homeTemplate, name: "home", @@ -46,37 +49,37 @@ func ReloadTemplates() { templateLoader{ templ: &viewRegisterTemplate, name: "user_register", - file: "user/register.html", + file: filepath.Join("user", "register.html"), }, templateLoader{ templ: &viewRegisterSuccessTemplate, name: "user_register_success", - file: "user/signup_success.html", + file: filepath.Join("user", "signup_success.html"), }, templateLoader{ templ: &viewVerifySuccessTemplate, name: "user_verify_success", - file: "user/verify_success.html", + file: filepath.Join("user", "verify_success.html"), }, templateLoader{ templ: &viewLoginTemplate, name: "user_login", - file: "user/login.html", + file: filepath.Join("user", "login.html"), }, templateLoader{ templ: &viewProfileTemplate, name: "user_profile", - file: "user/profile.html", + file: filepath.Join("user", "profile.html"), }, templateLoader{ templ: &viewProfileEditTemplate, name: "user_profile", - file: "user/profile_edit.html", + file: filepath.Join("user", "profile_edit.html"), }, templateLoader{ templ: &viewUserDeleteTemplate, name: "user_delete", - file: "user/delete_success.html", + file: filepath.Join("user", "delete_success.html"), }, templateLoader{ templ: ¬FoundTemplate, @@ -84,10 +87,56 @@ func ReloadTemplates() { file: "404.html", }, } - for _, templ := range templs { - t := template.Must(template.New(templ.name).Funcs(FuncMap).ParseFiles(filepath.Join(TemplateDir, "index.html"), filepath.Join(TemplateDir, templ.file))) - t = template.Must(t.ParseGlob(filepath.Join(TemplateDir, "_*.html"))) + for idx := range pubTempls { + pubTempls[idx].indexFile = filepath.Join(TemplateDir, "index.html") + } + modTempls := []templateLoader{ + templateLoader{ + templ: &panelTorrentList, + name: "torrentlist", + file: filepath.Join("admin", "torrentlist.html"), + }, + templateLoader{ + templ: &panelUserList, + name: "userlist", + file: filepath.Join("admin", "userlist.html"), + }, + templateLoader{ + templ: &panelCommentList, + name: "commentlist", + file: filepath.Join("admin", "commentlist.html"), + }, + templateLoader{ + templ: &panelIndex, + name: "indexPanel", + file: filepath.Join("admin", "panelindex.html"), + }, + templateLoader{ + templ: &panelTorrentEd, + name: "torrent_ed", + file: filepath.Join("admin", "paneltorrentedit.html"), + }, + templateLoader{ + templ: &panelTorrentReportList, + name: "torrent_report", + file: filepath.Join("admin", "torrent_report.html"), + }, + } + + for idx := range modTempls { + modTempls[idx].indexFile = filepath.Join(TemplateDir, "admin_index.html") + } + + templs := make([]templateLoader, 0, len(modTempls)+len(pubTempls)) + + templs = append(templs, pubTempls...) + templs = append(templs, modTempls...) + + for _, templ := range templs { + t := template.Must(template.New(templ.name).Funcs(FuncMap).ParseFiles(templ.indexFile, filepath.Join(TemplateDir, templ.file))) + t = template.Must(t.ParseGlob(filepath.Join(TemplateDir, "_*.html"))) *templ.templ = t } + } diff --git a/service/scraper/bucket.go b/service/scraper/bucket.go index da301528..d9c85372 100644 --- a/service/scraper/bucket.go +++ b/service/scraper/bucket.go @@ -28,15 +28,42 @@ func (b *Bucket) NewTransaction(swarms []model.Torrent) (t *Transaction) { t = &Transaction{ TransactionID: id, bucket: b, - swarms: swarms, + swarms: make([]model.Torrent, len(swarms)), state: stateSendID, } + copy(t.swarms, swarms) b.transactions[id] = t b.access.Unlock() return } +func (b *Bucket) ForEachTransaction(v func(uint32, *Transaction)) { + + clone := make(map[uint32]*Transaction) + + b.access.Lock() + + for k := range b.transactions { + clone[k] = b.transactions[k] + } + + b.access.Unlock() + + for k := range clone { + v(k, clone[k]) + } +} + +func (b *Bucket) Forget(tid uint32) { + b.access.Lock() + _, ok := b.transactions[tid] + if ok { + delete(b.transactions, tid) + } + b.access.Unlock() +} + func (b *Bucket) VisitTransaction(tid uint32, v func(*Transaction)) { b.access.Lock() t, ok := b.transactions[tid] diff --git a/service/scraper/scraper.go b/service/scraper/scraper.go index 8077f41b..7d4b8e61 100644 --- a/service/scraper/scraper.go +++ b/service/scraper/scraper.go @@ -13,15 +13,20 @@ import ( // MTU yes this is the ipv6 mtu const MTU = 1500 +// max number of scrapes per packet +const ScrapesPerPacket = 74 + // bittorrent scraper type Scraper struct { - done chan int - sendQueue chan *SendEvent - recvQueue chan *RecvEvent - errQueue chan error - trackers map[string]*Bucket - ticker *time.Ticker - interval time.Duration + done chan int + sendQueue chan *SendEvent + recvQueue chan *RecvEvent + errQueue chan error + trackers map[string]*Bucket + ticker *time.Ticker + cleanup *time.Ticker + interval time.Duration + PacketsPerSecond uint } func New(conf *config.ScraperConfig) (sc *Scraper, err error) { @@ -33,7 +38,13 @@ func New(conf *config.ScraperConfig) (sc *Scraper, err error) { trackers: make(map[string]*Bucket), ticker: time.NewTicker(time.Second), interval: time.Second * time.Duration(conf.IntervalSeconds), + cleanup: time.NewTicker(time.Second), } + + if sc.PacketsPerSecond == 0 { + sc.PacketsPerSecond = 10 + } + for idx := range conf.Trackers { err = sc.AddTracker(&conf.Trackers[idx]) if err != nil { @@ -144,20 +155,37 @@ func (sc *Scraper) RunWorker(pc net.PacketConn) (err error) { func (sc *Scraper) Run() { for { - <-sc.ticker.C - sc.Scrape() + select { + case <-sc.ticker.C: + sc.Scrape(sc.PacketsPerSecond) + break + case <-sc.cleanup.C: + sc.removeStale() + break + } } } -func (sc *Scraper) Scrape() { +func (sc *Scraper) removeStale() { + + for k := range sc.trackers { + sc.trackers[k].ForEachTransaction(func(tid uint32, t *Transaction) { + if t == nil || t.IsTimedOut() { + sc.trackers[k].Forget(tid) + } + }) + } +} + +func (sc *Scraper) Scrape(packets uint) { now := time.Now().Add(0 - sc.interval) - rows, err := db.ORM.Raw("SELECT torrent_id, torrent_hash FROM torrents WHERE last_scrape IS NULL OR last_scrape < ? ORDER BY torrent_id DESC LIMIT 700", now).Rows() + rows, err := db.ORM.Raw("SELECT torrent_id, torrent_hash FROM torrents WHERE last_scrape IS NULL OR last_scrape < ? ORDER BY torrent_id DESC LIMIT ?", now, packets*ScrapesPerPacket).Rows() if err == nil { counter := 0 - var scrape [70]model.Torrent + var scrape [ScrapesPerPacket]model.Torrent for rows.Next() { - idx := counter % 70 + idx := counter % ScrapesPerPacket rows.Scan(&scrape[idx].ID, &scrape[idx].Hash) counter++ if idx == 0 { diff --git a/service/scraper/transaction.go b/service/scraper/transaction.go index 90ebcf77..7b7de953 100644 --- a/service/scraper/transaction.go +++ b/service/scraper/transaction.go @@ -11,6 +11,9 @@ import ( "github.com/ewhal/nyaa/util/log" ) +// TransactionTimeout 30 second timeout for transactions +const TransactionTimeout = time.Second * 30 + const stateSendID = 0 const stateRecvID = 1 const stateTransact = 2 @@ -27,13 +30,12 @@ type Transaction struct { bucket *Bucket state uint8 swarms []model.Torrent + lastData time.Time } // Done marks this transaction as done and removes it from parent func (t *Transaction) Done() { - t.bucket.access.Lock() - delete(t.bucket.transactions, t.TransactionID) - t.bucket.access.Unlock() + t.bucket.Forget(t.TransactionID) } func (t *Transaction) handleScrapeReply(data []byte) { @@ -95,6 +97,7 @@ func (t *Transaction) SendEvent(to net.Addr) (ev *SendEvent) { binary.BigEndian.PutUint32(ev.Data[12:], t.TransactionID) t.state = stateRecvID } + t.lastData = time.Now() return } @@ -104,7 +107,7 @@ func (t *Transaction) handleError(msg string) { // handle data for transaction func (t *Transaction) GotData(data []byte) (done bool) { - + t.lastData = time.Now() if len(data) > 4 { cmd := binary.BigEndian.Uint32(data) switch cmd { @@ -132,3 +135,8 @@ func (t *Transaction) GotData(data []byte) (done bool) { } return } + +func (t *Transaction) IsTimedOut() bool { + return t.lastData.Add(TransactionTimeout).Before(time.Now()) + +} diff --git a/util/search/search.go b/util/search/search.go index a2e7ec47..ba66540f 100644 --- a/util/search/search.go +++ b/util/search/search.go @@ -40,7 +40,7 @@ func searchByQuery(r *http.Request, pagenum int, countAll bool) ( search.Page = pagenum search.Query = r.URL.Query().Get("q") userID, _ := strconv.Atoi(r.URL.Query().Get("userID")) - search.UserID = uint(userID) + search.UserID = uint(userID) switch s := r.URL.Query().Get("s"); s { case "1": @@ -75,22 +75,36 @@ func searchByQuery(r *http.Request, pagenum int, countAll bool) ( case "1": search.Sort = common.Name orderBy += "torrent_name" + break case "2": search.Sort = common.Date orderBy += "date" + break case "3": search.Sort = common.Downloads orderBy += "downloads" + break case "4": search.Sort = common.Size orderBy += "filesize" + break case "5": + search.Sort = common.Seeders orderBy += "seeders" + search.NotNull += "seeders IS NOT NULL " + break case "6": + search.Sort = common.Leechers orderBy += "leechers" + search.NotNull += "leechers IS NOT NULL " + break case "7": + search.Sort = common.Completed orderBy += "completed" + search.NotNull += "completed IS NOT NULL " + break default: + search.Sort = common.ID orderBy += "torrent_id" } @@ -129,7 +143,9 @@ func searchByQuery(r *http.Request, pagenum int, countAll bool) ( } parameters.Params = append(parameters.Params, strconv.Itoa(int(search.Status)+1)) } - + if len(search.NotNull) > 0 { + conditions = append(conditions, search.NotNull) + } searchQuerySplit := strings.Fields(search.Query) for i, word := range searchQuerySplit { firstRune, _ := utf8.DecodeRuneInString(word)