Merge branch 'master' of github.com:ewhal/nyaa
Cette révision appartient à :
révision
ab9a57296f
5 fichiers modifiés avec 106 ajouts et 106 suppressions
|
@ -15,7 +15,7 @@ nav#mainmenu {
|
|||
position: fixed;
|
||||
color: white;
|
||||
width: 100%;
|
||||
z-index: 2;
|
||||
z-index: 3;
|
||||
}
|
||||
nav#mainmenu a {
|
||||
color: white;
|
||||
|
|
|
@ -28,3 +28,11 @@
|
|||
|
||||
document.getElementById('page-next').href = pageString + next + query;
|
||||
document.getElementById('page-prev').href = pageString + prev + query;
|
||||
|
||||
// Used by spoiler tags
|
||||
function toggleLayer(elem) {
|
||||
if (elem.classList.contains("hide"))
|
||||
elem.classList.remove("hide");
|
||||
else
|
||||
elem.classList.add("hide");
|
||||
}
|
||||
|
|
137
main.go
137
main.go
|
@ -18,10 +18,19 @@ import (
|
|||
"time"
|
||||
)
|
||||
|
||||
type SearchParam struct {
|
||||
Category string
|
||||
Order string
|
||||
Query string
|
||||
Max int
|
||||
Status string
|
||||
Sort string
|
||||
}
|
||||
|
||||
var db *gorm.DB
|
||||
var router *mux.Router
|
||||
var debugLogger *log.Logger
|
||||
var trackers = "&tr=udp://zer0day.to:1337/announce&tr=udp://tracker.leechers-paradise.org:6969&tr=udp://explodie.org:6969&tr=udp://tracker.opentrackr.org:1337&tr=udp://tracker.coppersurfer.tk:6969&tr=http://tracker.baka-sub.cf/announce"
|
||||
var trackers = "&tr=udp://tracker.coppersurfer.tk:6969&tr=udp://zer0day.to:1337/announce&tr=udp://tracker.leechers-paradise.org:6969&tr=udp://explodie.org:6969&tr=udp://tracker.opentrackr.org:1337&tr=http://tracker.baka-sub.cf/announce"
|
||||
|
||||
func getDBHandle() *gorm.DB {
|
||||
dbInit, err := gorm.Open("sqlite3", "./nyaa.db")
|
||||
|
@ -108,38 +117,64 @@ func searchHandler(w http.ResponseWriter, r *http.Request) {
|
|||
page := vars["page"]
|
||||
|
||||
// db params url
|
||||
maxPerPage, errConv := strconv.Atoi(r.URL.Query().Get("max"))
|
||||
if errConv != nil {
|
||||
maxPerPage = 50 // default Value maxPerPage
|
||||
}
|
||||
pagenum, _ := strconv.Atoi(html.EscapeString(page))
|
||||
if pagenum == 0 {
|
||||
pagenum = 1
|
||||
}
|
||||
searchQuery := r.URL.Query().Get("q")
|
||||
cat := r.URL.Query().Get("c")
|
||||
stat := r.URL.Query().Get("s")
|
||||
sort := r.URL.Query().Get("sort")
|
||||
order := r.URL.Query().Get("order")
|
||||
|
||||
b := []TorrentsJson{}
|
||||
|
||||
search_param, torrents, nbTorrents := searchByQuery( r, pagenum )
|
||||
|
||||
catsSplit := strings.Split(cat, "_")
|
||||
for i, _ := range torrents {
|
||||
res := torrents[i].toJson()
|
||||
b = append(b, res)
|
||||
}
|
||||
|
||||
navigationTorrents := Navigation{nbTorrents, search_param.Max, pagenum, "search_page"}
|
||||
searchForm := SearchForm{
|
||||
search_param.Query,
|
||||
search_param.Status,
|
||||
search_param.Category,
|
||||
search_param.Sort,
|
||||
search_param.Order,
|
||||
}
|
||||
htv := HomeTemplateVariables{b, getAllCategories(false), searchForm, navigationTorrents, r.URL, mux.CurrentRoute(r)}
|
||||
|
||||
err := templates.ExecuteTemplate(w, "index.html", htv)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
|
||||
func searchByQuery(r *http.Request, pagenum int) (SearchParam, []Torrents, int) {
|
||||
maxPerPage, errConv := strconv.Atoi(r.URL.Query().Get("max"))
|
||||
if errConv != nil {
|
||||
maxPerPage = 50 // default Value maxPerPage
|
||||
}
|
||||
|
||||
search_param := SearchParam{}
|
||||
search_param.Max = maxPerPage
|
||||
search_param.Query = r.URL.Query().Get("q")
|
||||
search_param.Category = r.URL.Query().Get("c")
|
||||
search_param.Status = r.URL.Query().Get("s")
|
||||
search_param.Sort = r.URL.Query().Get("sort")
|
||||
search_param.Order = r.URL.Query().Get("order")
|
||||
|
||||
catsSplit := strings.Split(search_param.Category, "_")
|
||||
// need this to prevent out of index panics
|
||||
var searchCatId, searchSubCatId string
|
||||
if len(catsSplit) == 2 {
|
||||
searchCatId = html.EscapeString(catsSplit[0])
|
||||
searchSubCatId = html.EscapeString(catsSplit[1])
|
||||
}
|
||||
if sort == "" {
|
||||
sort = "torrent_id"
|
||||
if search_param.Sort == "" {
|
||||
search_param.Sort = "torrent_id"
|
||||
}
|
||||
if order == "" {
|
||||
order = "desc"
|
||||
if search_param.Order == "" {
|
||||
search_param.Order = "desc"
|
||||
}
|
||||
order_by := sort + " " + order
|
||||
|
||||
nbTorrents := 0
|
||||
|
||||
b := []TorrentsJson{}
|
||||
order_by := search_param.Sort + " " + search_param.Order
|
||||
|
||||
parameters := WhereParams{}
|
||||
conditions := []string{}
|
||||
|
@ -151,11 +186,11 @@ func searchHandler(w http.ResponseWriter, r *http.Request) {
|
|||
conditions = append(conditions, "sub_category_id = ?")
|
||||
parameters.params = append(parameters.params, searchSubCatId)
|
||||
}
|
||||
if stat != "" {
|
||||
if search_param.Status != "" {
|
||||
conditions = append(conditions, "status_id = ?")
|
||||
parameters.params = append(parameters.params, stat)
|
||||
parameters.params = append(parameters.params, search_param.Status)
|
||||
}
|
||||
searchQuerySplit := strings.Split(searchQuery, " ")
|
||||
searchQuerySplit := strings.Split(search_param.Query, " ")
|
||||
for i, _ := range searchQuerySplit {
|
||||
conditions = append(conditions, "torrent_name LIKE ?")
|
||||
parameters.params = append(parameters.params, "%"+searchQuerySplit[i]+"%")
|
||||
|
@ -163,22 +198,10 @@ func searchHandler(w http.ResponseWriter, r *http.Request) {
|
|||
|
||||
parameters.conditions = strings.Join(conditions[:], " AND ")
|
||||
log.Printf("SQL query is :: %s\n", parameters.conditions)
|
||||
torrents, nbTorrents := getTorrentsOrderBy(¶meters, order_by, maxPerPage, maxPerPage*(pagenum-1))
|
||||
|
||||
for i, _ := range torrents {
|
||||
res := torrents[i].toJson()
|
||||
b = append(b, res)
|
||||
}
|
||||
|
||||
navigationTorrents := Navigation{nbTorrents, maxPerPage, pagenum, "search_page"}
|
||||
searchForm := SearchForm{searchQuery, stat, cat, sort, order}
|
||||
htv := HomeTemplateVariables{b, getAllCategories(false), searchForm, navigationTorrents, r.URL, mux.CurrentRoute(r)}
|
||||
|
||||
err := templates.ExecuteTemplate(w, "index.html", htv)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
}
|
||||
torrents, n := getTorrentsOrderBy(¶meters, order_by, maxPerPage, maxPerPage*(pagenum-1))
|
||||
return search_param, torrents, n
|
||||
}
|
||||
|
||||
func safe(s string) template.URL {
|
||||
return template.URL(s)
|
||||
}
|
||||
|
@ -193,20 +216,10 @@ func faqHandler(w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
|
||||
func rssHandler(w http.ResponseWriter, r *http.Request) {
|
||||
//vars := mux.Vars(r)
|
||||
//category := vars["c"]
|
||||
|
||||
// db params url
|
||||
//maxPerPage := 50 // default Value maxPerPage
|
||||
|
||||
torrents := getFeeds()
|
||||
created := time.Now().String()
|
||||
_, torrents, _ := searchByQuery( r, 1 )
|
||||
created_as_time := time.Now()
|
||||
if len(torrents) > 0 {
|
||||
created = torrents[0].Timestamp
|
||||
}
|
||||
created_as_time, err := time.Parse("2006-01-02 15:04:05", created)
|
||||
if err == nil {
|
||||
|
||||
created_as_time = time.Unix(torrents[0].Date, 0)
|
||||
}
|
||||
feed := &feeds.Feed{
|
||||
Title: "Nyaa Pantsu",
|
||||
|
@ -217,17 +230,16 @@ func rssHandler(w http.ResponseWriter, r *http.Request) {
|
|||
feed.Items = make([]*feeds.Item, len(torrents))
|
||||
|
||||
for i, _ := range torrents {
|
||||
timestamp_as_time, err := time.Parse("2006-01-02 15:04:05", torrents[i].Timestamp)
|
||||
if err == nil {
|
||||
feed.Items[i] = &feeds.Item{
|
||||
// need a torrent view first
|
||||
//Id: URL + torrents[i].Hash,
|
||||
Title: torrents[i].Name,
|
||||
Link: &feeds.Link{Href: string(torrents[i].Magnet)},
|
||||
Description: "",
|
||||
Created: timestamp_as_time,
|
||||
Updated: timestamp_as_time,
|
||||
}
|
||||
timestamp_as_time := time.Unix(torrents[0].Date, 0)
|
||||
torrent_json := torrents[i].toJson()
|
||||
feed.Items[i] = &feeds.Item{
|
||||
// need a torrent view first
|
||||
//Id: URL + torrents[i].Hash,
|
||||
Title: torrents[i].Name,
|
||||
Link: &feeds.Link{Href: string(torrent_json.Magnet)},
|
||||
Description: "",
|
||||
Created: timestamp_as_time,
|
||||
Updated: timestamp_as_time,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -238,6 +250,7 @@ func rssHandler(w http.ResponseWriter, r *http.Request) {
|
|||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
|
||||
func viewHandler(w http.ResponseWriter, r *http.Request) {
|
||||
var templates = template.Must(template.ParseFiles("templates/index.html", "templates/view.html"))
|
||||
templates.ParseGlob("templates/_*.html") // common
|
||||
|
|
29
models.go
29
models.go
|
@ -10,14 +10,6 @@ import (
|
|||
"time"
|
||||
)
|
||||
|
||||
type Feed struct {
|
||||
Id int
|
||||
Name string
|
||||
Hash string
|
||||
Magnet string
|
||||
Timestamp string
|
||||
}
|
||||
|
||||
type Categories struct {
|
||||
Id int `gorm:"column:category_id"`
|
||||
Name string `gorm:"column:category_name"`
|
||||
|
@ -97,27 +89,6 @@ type WhereParams struct {
|
|||
*
|
||||
*/
|
||||
|
||||
// don't need raw SQL once we get MySQL
|
||||
func getFeeds() []Feed {
|
||||
var result []Feed
|
||||
rows, err := db.DB().
|
||||
Query(
|
||||
"SELECT `torrent_id` AS `id`, `torrent_name` AS `name`, `torrent_hash` AS `hash`, `timestamp` FROM `torrents` " +
|
||||
"ORDER BY `timestamp` desc LIMIT 50")
|
||||
if err == nil {
|
||||
for rows.Next() {
|
||||
item := Feed{}
|
||||
rows.Scan(&item.Id, &item.Name, &item.Hash, &item.Timestamp)
|
||||
magnet := "magnet:?xt=urn:btih:" + strings.TrimSpace(item.Hash) + "&dn=" + item.Name + trackers
|
||||
item.Magnet = magnet
|
||||
// memory hog
|
||||
result = append(result, item)
|
||||
}
|
||||
rows.Close()
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func getTorrentById(id string) (Torrents, error) {
|
||||
var torrent Torrents
|
||||
|
||||
|
|
|
@ -1,28 +1,31 @@
|
|||
{{define "title"}}FAQ{{end}}
|
||||
{{define "content"}}
|
||||
<div class="blockBody">
|
||||
<h1>NOTICE: KEEP SEEDING YOU RETARD</h1>
|
||||
<!-- marquees are the true purpose of HTML -->
|
||||
<marquee><h2>NOTICE: KEEP SEEDING YOU RETARD</h2></marquee>
|
||||
|
||||
<h1>Official Nyaapocalypse FAQ</h1>
|
||||
<br />
|
||||
|
||||
<h2>Links for the replacement/mirror</h2>
|
||||
<a href="https://nyaa.pantsu.cat">Nyaa - nyaa.pantsu.cat</a>
|
||||
<a href="https://nyaa.pantsu.cat">Nyaa - nyaa.pantsu.cat</a><br />
|
||||
<a href="https://sukebei.pantsu.cat">Sukebei - sukebei.pantsu.cat</a>
|
||||
|
||||
<h2>What happened?</h2>
|
||||
<ul>
|
||||
<li>- nyaa.se and associated domains (such nyaatorrents.info) went offline on May 01, 2017.</li>
|
||||
<li>- They were deactivated, so it's not a DDoS attack like usual.</li>
|
||||
<li>- Future prospects for nyaa are not looking good.</li>
|
||||
<li>- There is a recovery effort taking place.</li>
|
||||
<li>nyaa.se and associated domains (such as nyaatorrents.info) went offline on May 01, 2017.</li>
|
||||
<li>They were deactivated, so it's not a DDoS attack like usual.</li>
|
||||
<li>Future prospects for nyaa are not looking good.</li>
|
||||
<li>There is a recovery effort taking place.</li>
|
||||
</ul>
|
||||
|
||||
<h2>Is everything Lost?</h2>
|
||||
<h2>Is everything lost?</h2>
|
||||
<p>In short, No.</p>
|
||||
|
||||
<h2>Are some things lost?</h2>
|
||||
<p>We have a database of the torrents on nyaa up to April 5. That means that everything past April 5 is gone.
|
||||
Sukebei, however is in worse shape. We only have sukebei databases up to 2016.</p>
|
||||
<p>We have a database of the torrents on nyaa up to <s>April 5</s> May 1. That means almost nothing is lost.</p>
|
||||
<p>Sukebei, however might be in worse shape. Currently we only have sukebei databases up to 2016,
|
||||
but a newer database might be available for use.</p>
|
||||
|
||||
<h2>How are we recovering?</h2>
|
||||
<p>The aforementioned databases are being hosted at nyaa.pantsu.cat and sukebei.pantsu.cat.
|
||||
|
@ -38,11 +41,16 @@
|
|||
<p>Just use the <b>magnet link</b>. The magnet link will used by your BitTorrent client to look up the file on the
|
||||
DHT network and it should download just fine.</p>
|
||||
<p>If your magnet link starts with "magnet:magnet:", delete the first "magnet:". It should look like:
|
||||
magnet:?xt=urn:btih:[hash]&dn=[name]&tr=[tracker]</p>
|
||||
magnet:?xt=urn:btih:[hash]&dn=[name]&tr=[tracker]&tr=[...]</p>
|
||||
|
||||
<h1>How can I help?</h1>
|
||||
<p>If you have website development expertise, you can join the #nyaapantsu IRC channel on irc.rizon.net. If you have any current databases, <b>UPLOAD THEM</b>.</p>
|
||||
<h2>How can I help?</h2>
|
||||
<p>If you have website development expertise, you can join the #nyaapantsu IRC channel on irc.rizon.net.
|
||||
If you have any current databases, especially for sukebei, <b>UPLOAD THEM</b>.</p>
|
||||
|
||||
<h1>nyaa.pantsu.cat and sukebei.pantsu.cat do not host any files.</h1>
|
||||
<h2>Your design sucks / I found a bug</h2>
|
||||
<p><a href="https://github.com/ewhal/nyaa/issues">https://github.com/ewhal/nyaa/issues</a>.</p>
|
||||
|
||||
<br />
|
||||
<h2>nyaa.pantsu.cat and sukebei.pantsu.cat do not host any files.</h2>
|
||||
</div>
|
||||
{{end}}
|
||||
{{end}}
|
||||
|
|
Référencer dans un nouveau ticket