2017-10-06 17:06:14 +02:00
package torrentController
import (
2017-11-07 16:45:23 +01:00
"encoding/hex"
2017-10-06 17:06:14 +02:00
"strconv"
"strings"
"net/url"
"time"
"github.com/NyaaPantsu/nyaa/models/torrents"
"github.com/NyaaPantsu/nyaa/models"
2017-10-15 13:18:57 +02:00
"github.com/NyaaPantsu/nyaa/config"
2017-11-07 16:45:23 +01:00
"github.com/NyaaPantsu/nyaa/utils/log"
"github.com/NyaaPantsu/nyaa/utils/format"
2017-10-06 17:06:14 +02:00
"github.com/Stephen304/goscrape"
"github.com/gin-gonic/gin"
2017-11-07 16:45:23 +01:00
"github.com/anacrolix/dht"
"github.com/anacrolix/torrent"
2017-10-06 17:06:14 +02:00
)
2017-11-07 16:45:23 +01:00
var client * torrent . Client
func initClient ( ) error {
clientConfig := torrent . Config {
DHTConfig : dht . ServerConfig {
StartingNodes : dht . GlobalBootstrapAddrs ,
} ,
ListenAddr : ":5977" ,
}
cl , err := torrent . NewClient ( & clientConfig )
if err != nil {
log . Errorf ( "error creating client: %s" , err )
return err
}
client = cl
return nil
}
2017-10-06 17:06:14 +02:00
// ViewHeadHandler : Controller for getting torrent stats
func GetStatsHandler ( c * gin . Context ) {
id , err := strconv . ParseInt ( c . Param ( "id" ) , 10 , 32 )
if err != nil {
return
}
2017-10-28 18:28:59 +02:00
2017-10-06 17:06:14 +02:00
torrent , err := torrents . FindRawByID ( uint ( id ) )
if err != nil {
return
}
2017-10-28 18:28:59 +02:00
var CurrentData models . Scrape
statsExists := ! ( models . ORM . Where ( "torrent_id = ?" , id ) . Find ( & CurrentData ) . RecordNotFound ( ) )
if statsExists {
//Stats already exist, we check if the torrent stats have been scraped already very recently and if so, we stop there to avoid abuse of the /stats/:id route
if ( CurrentData . Seeders == 0 && CurrentData . Leechers == 0 && CurrentData . Completed == 0 ) && time . Since ( CurrentData . LastScrape ) . Minutes ( ) <= config . Get ( ) . Scrape . MaxStatScrapingFrequencyUnknown {
//Unknown stats but has been scraped less than X minutes ago (X being the limit set in the config file)
return
}
if ( CurrentData . Seeders != 0 || CurrentData . Leechers != 0 || CurrentData . Completed != 0 ) && time . Since ( CurrentData . LastScrape ) . Minutes ( ) <= config . Get ( ) . Scrape . MaxStatScrapingFrequency {
//Known stats but has been scraped less than X minutes ago (X being the limit set in the config file)
return
}
}
2017-10-06 17:06:14 +02:00
var Trackers [ ] string
2017-11-07 16:45:23 +01:00
if len ( torrent . Trackers ) > 3 {
2017-10-16 04:55:38 +02:00
for _ , line := range strings . Split ( torrent . Trackers [ 3 : ] , "&tr=" ) {
tracker , error := url . QueryUnescape ( line )
2017-11-07 16:45:23 +01:00
if error == nil && strings . HasPrefix ( tracker , "udp" ) {
2017-10-16 04:55:38 +02:00
Trackers = append ( Trackers , tracker )
}
2017-11-07 16:45:23 +01:00
//Cannot scrape from http trackers only keep UDP ones
2017-10-06 17:06:14 +02:00
}
2017-10-15 13:18:57 +02:00
}
for _ , line := range config . Get ( ) . Torrents . Trackers . Default {
if ! contains ( Trackers , line ) {
Trackers = append ( Trackers , line )
}
}
2017-11-07 16:45:23 +01:00
err = ScrapeFiles ( format . InfoHashToMagnet ( strings . TrimSpace ( torrent . Hash ) , torrent . Name , Trackers ... ) , torrent )
if err != nil {
return
}
2017-10-06 17:06:14 +02:00
2017-11-07 16:45:23 +01:00
return
2017-10-07 01:39:38 +02:00
stats := goscrape . Single ( Trackers , [ ] string {
2017-10-06 17:06:14 +02:00
torrent . Hash ,
} ) [ 0 ]
2017-10-07 01:39:38 +02:00
//Single() returns an array which contain results for each torrent Hash it is fed, since we only feed him one we want to directly access the results
2017-10-06 17:06:14 +02:00
2017-10-07 01:39:38 +02:00
//If we put seeders on -1, the script instantly knows the fetching did not give any result, avoiding having to check all three stats below and in view.jet.html's javascript
if stats . Seeders == 0 && stats . Leechers == 0 && stats . Completed == 0 {
2017-10-06 17:06:14 +02:00
stats . Seeders = - 1
}
2017-10-15 07:31:05 +02:00
c . JSON ( 200 , gin . H {
"seeders" : stats . Seeders ,
"leechers" : stats . Leechers ,
"downloads" : stats . Completed ,
} )
2017-10-06 17:06:14 +02:00
2017-10-15 07:31:05 +02:00
if stats . Seeders == - 1 {
stats . Seeders = 0
}
2017-10-28 18:28:59 +02:00
if ! statsExists {
2017-10-15 07:31:05 +02:00
torrent . Scrape = torrent . Scrape . Create ( uint ( id ) , uint32 ( stats . Seeders ) , uint32 ( stats . Leechers ) , uint32 ( stats . Completed ) , time . Now ( ) )
//Create entry in the DB because none exist
} else {
//Entry in the DB already exists, simply update it
if ( CurrentData . Seeders == 0 && CurrentData . Leechers == 0 && CurrentData . Completed == 0 ) || ( stats . Seeders != 0 && stats . Leechers != 0 && stats . Completed != 0 ) {
2017-10-06 17:06:14 +02:00
torrent . Scrape = & models . Scrape { uint ( id ) , uint32 ( stats . Seeders ) , uint32 ( stats . Leechers ) , uint32 ( stats . Completed ) , time . Now ( ) }
2017-10-15 07:31:05 +02:00
} else {
torrent . Scrape = & models . Scrape { uint ( id ) , uint32 ( CurrentData . Seeders ) , uint32 ( CurrentData . Leechers ) , uint32 ( CurrentData . Completed ) , time . Now ( ) }
2017-10-06 17:06:14 +02:00
}
2017-10-15 07:31:05 +02:00
//Only overwrite stats if the old one are Unknown OR if the current ones are not unknown, preventing good stats from being turned into unknown own but allowing good stats to be updated to more reliable ones
torrent . Scrape . Update ( false )
2017-10-06 17:06:14 +02:00
}
return
}
2017-10-15 13:18:57 +02:00
2017-11-07 16:45:23 +01:00
type TStruct struct {
Peers goscrape . Result
Trackers [ ] string
//Files []metainfo.FileInfo
Files [ ] torrent . File
Magnet string
}
func ScrapeFiles ( magnet string , torrent models . Torrent ) error {
if client == nil {
err := initClient ( )
if err != nil {
return err
}
}
t , _ := client . AddMagnet ( magnet )
<- t . GotInfo ( )
infoHash := t . InfoHash ( )
dst := make ( [ ] byte , hex . EncodedLen ( len ( t . InfoHash ( ) ) ) )
hex . Encode ( dst , infoHash [ : ] )
var UDP [ ] string
for _ , tracker := range t . Metainfo ( ) . AnnounceList [ 0 ] {
if strings . HasPrefix ( tracker , "udp" ) {
UDP = append ( UDP , tracker )
}
}
if len ( UDP ) != 0 {
udpscrape := goscrape . NewBulk ( UDP )
results := udpscrape . ScrapeBulk ( [ ] string { torrent . Hash } ) [ 0 ]
if results . Btih != "0" {
torrent . Scrape = & models . Scrape { torrent . ID , uint32 ( results . Seeders ) , uint32 ( results . Leechers ) , uint32 ( results . Completed ) , time . Now ( ) }
}
}
torrent . FileList = [ ] models . File { }
for i , file := range t . Files ( ) {
log . Errorf ( "----- File %d / Path %s / Length %d" , i , file . DisplayPath ( ) , file . Length ( ) )
torrent . FileList = append ( torrent . FileList , models . File { uint ( i ) , torrent . ID , file . DisplayPath ( ) , file . Length ( ) } )
}
torrent . Update ( true )
t . Drop ( )
return nil
}
2017-10-15 13:18:57 +02:00
func contains ( s [ ] string , e string ) bool {
for _ , a := range s {
if a == e {
return true
}
}
return false
}