Albirew/nyaa-pantsu
Albirew
/
nyaa-pantsu
Archivé
1
0
Bifurcation 0

New config files (#854)

* New config files

As decided, config files are parsed at runtime.
I decided to go for YAML config files because there can be comments in
it.
There are 2 files:
* config/default_config.yml <= which shouldn't be edited unless we add a
config parameter
* config/config.yml <= which is the user-defined config. This file
shouldn't be commited

Changed every call to config.XXX to config.Conf.XXX (look to the new
stucture of config in config/types.go)

Of course, putting config parameters in config.yml overrides config in
config_default.yml. You don't have to put everything in it, just add
what you want to override.

* Fixing test

Replacing conf.New by config.Conf

* Fixing call to config.Conf to config.Config{} in test files

* Might have fixed testing with this

Printf instead of Fatalf

* Renaming config.yml in example file

* Forbid commiting config.yml

* Should be now fixed

* Do not need this file anymore
Cette révision appartient à :
akuma06 2017-05-31 04:21:57 +02:00 révisé par alucard0134
Parent ffbbacb84a
révision 5376b9e271
52 fichiers modifiés avec 513 ajouts et 492 suppressions

1
.gitignore externe
Voir le fichier

@ -15,6 +15,7 @@ templates/*.html.go
*.backup
tags
*.retry
config/config.yml
# emacs temp files
*\#*

Voir le fichier

@ -51,9 +51,9 @@ func (p *TorrentParam) FromRequest(r *http.Request) {
max, err := strconv.ParseUint(r.URL.Query().Get("max"), 10, 32)
if err != nil {
max = config.TorrentsPerPage
} else if max > config.MaxTorrentsPerPage {
max = config.MaxTorrentsPerPage
max = uint64(config.Conf.Navigation.TorrentsPerPage)
} else if max > uint64(config.Conf.Navigation.MaxTorrentsPerPage) {
max = uint64(config.Conf.Navigation.MaxTorrentsPerPage)
}
// FIXME 0 means no userId defined
@ -145,7 +145,7 @@ func (p *TorrentParam) Find(client *elastic.Client) (int64, []model.Torrent, err
query := elastic.NewSimpleQueryStringQuery(p.NameLike).
Field("name").
Analyzer(config.DefaultElasticsearchAnalyzer).
Analyzer(config.Conf.Search.ElasticsearchAnalyzer).
DefaultOperator("AND")
fsc := elastic.NewFetchSourceContext(true).
@ -153,9 +153,9 @@ func (p *TorrentParam) Find(client *elastic.Client) (int64, []model.Torrent, err
// TODO Find a better way to keep in sync with mapping in ansible
search := client.Search().
Index(config.DefaultElasticsearchIndex).
Index(config.Conf.Search.ElasticsearchIndex).
Query(query).
Type(config.DefaultElasticsearchType).
Type(config.Conf.Search.ElasticsearchType).
From(int((p.Offset-1)*p.Max)).
Size(int(p.Max)).
Sort(p.Sort.ToESField(), p.Order).
@ -205,7 +205,7 @@ func (p *TorrentParam) Find(client *elastic.Client) (int64, []model.Torrent, err
idsToString += "," + strconv.FormatUint(uint64(tid.Id), 10)
}
idsToString += "}"
db.ORM.Raw("SELECT * FROM " + config.TorrentsTableName +
db.ORM.Raw("SELECT * FROM " + config.Conf.Models.TorrentsTableName +
" JOIN unnest('" + idsToString + "'::int[]) " +
" WITH ORDINALITY t(torrent_id, ord) USING (torrent_id) ORDER BY t.ord").Find(&torrents)
}

Voir le fichier

@ -1,16 +0,0 @@
package config
// CacheConfig is config struct for caching strategy
type CacheConfig struct {
Dialect string
URL string
Size float64
}
// DefaultCacheSize : Size by default for the cache
const DefaultCacheSize = 1 << 10
// DefaultCacheConfig : Config by default for the cache
var DefaultCacheConfig = CacheConfig{
Dialect: "nop",
}

Voir le fichier

@ -1,81 +1,24 @@
package config
import (
"bufio"
"encoding/json"
"flag"
"fmt"
"io"
"io/ioutil"
"log"
"os"
yaml "gopkg.in/yaml.v2"
)
const (
// LastOldTorrentID is the highest torrent ID
// that was copied from the original Nyaa
LastOldTorrentID = 923000
// TorrentsTableName : Name of torrent table in DB
TorrentsTableName = "torrents"
// ReportsTableName : Name of torrent report table in DB
ReportsTableName = "torrent_reports"
// CommentsTableName : Name of comments table in DB
CommentsTableName = "comments"
// UploadsOldTableName : Name of uploads table in DB
UploadsOldTableName = "user_uploads_old"
// FilesTableName : Name of files table in DB
FilesTableName = "files"
// NotificationTableName : Name of notifications table in DB
NotificationTableName = "notifications"
// for sukebei:
//LastOldTorrentID = 2303945
//TorrentsTableName = "sukebei_torrents"
//ReportsTableName = "sukebei_torrent_reports"
//CommentsTableName = "sukebei_comments"
//UploadsOldTableName = "sukebei_user_uploads_old"
//FilesTableName = "sukebei_files"
)
// Conf : Modified configuration
var Conf *Config
var privateConf Config
// IsSukebei : Tells if we are on the sukebei website
func IsSukebei() bool {
return TorrentsTableName == "sukebei_torrents"
}
// Config : Configuration for DB, I2P, Fetcher, Go Server and Translation
type Config struct {
Host string `json:"host"`
Port int `json:"port"`
DBType string `json:"db_type"`
// DBParams will be directly passed to Gorm, and its internal
// structure depends on the dialect for each db type
DBParams string `json:"db_params"`
DBLogMode string `json:"db_logmode"`
// tracker scraper config (required)
Scrape ScraperConfig `json:"scraper"`
// cache config
Cache CacheConfig `json:"cache"`
// search config
Search SearchConfig `json:"search"`
// optional i2p configuration
I2P *I2PConfig `json:"i2p"`
// filesize fetcher config
MetainfoFetcher MetainfoFetcherConfig `json:"metainfo_fetcher"`
// internationalization config
I18n I18nConfig `json:"i18n"`
}
// Defaults : Configuration by default
var Defaults = Config{
Host: "localhost",
Port: 9999,
DBType: "sqlite3",
DBParams: "./nyaa.db?cache_size=50",
DBLogMode: "default",
Scrape: DefaultScraperConfig,
Cache: DefaultCacheConfig,
Search: DefaultSearchConfig,
I2P: nil,
MetainfoFetcher: DefaultMetainfoFetcherConfig,
I18n: DefaultI18nConfig,
return Conf.Models.TorrentsTableName == "sukebei_torrents"
}
var allowedDatabaseTypes = map[string]bool{
@ -91,22 +34,35 @@ var allowedDBLogModes = map[string]bool{
"silent": true,
}
// New : Construct a new config variable
func New() *Config {
cfg := &Config{}
*cfg = Defaults
return cfg
// Construct a new config variable
func init() {
getDefaultConfig()
privateConf = *DefaultConfig
Conf = &privateConf
overrideDefaults()
}
func overrideDefaults() {
path := "config/config.yml"
data, err := ioutil.ReadFile(path)
if err != nil {
log.Printf("can't read file '%s'", path)
}
err = yaml.Unmarshal(data, &Conf)
if err != nil {
log.Printf("error: %v", err)
}
}
// BindFlags returns a function which is to be used after
// flag.Parse to check and copy the flags' values to the Config instance.
func (config *Config) BindFlags() func() error {
confFile := flag.String("conf", "", "path to the configuration file")
dbType := flag.String("dbtype", Defaults.DBType, "database backend")
host := flag.String("host", Defaults.Host, "binding address of the server")
port := flag.Int("port", Defaults.Port, "port of the server")
dbParams := flag.String("dbparams", Defaults.DBParams, "parameters to open the database (see Gorm's doc)")
dbLogMode := flag.String("dblogmode", Defaults.DBLogMode, "database log verbosity (errors only by default)")
dbType := flag.String("dbtype", Conf.DBType, "database backend")
host := flag.String("host", Conf.Host, "binding address of the server")
port := flag.Int("port", Conf.Port, "port of the server")
dbParams := flag.String("dbparams", Conf.DBParams, "parameters to open the database (see Gorm's doc)")
dbLogMode := flag.String("dblogmode", Conf.DBLogMode, "database log verbosity (errors only by default)")
return func() error {
// You can override fields in the config file with flags.
@ -133,8 +89,12 @@ func (config *Config) HandleConfFileFlag(path string) error {
if err != nil {
return fmt.Errorf("can't read file '%s'", path)
}
err = config.Read(bufio.NewReader(file))
var b []byte
_, err = file.Read(b)
if err != nil {
return fmt.Errorf("failed to parse file '%s' (%s)", path, err)
}
err = yaml.Unmarshal(b, config)
if err != nil {
return fmt.Errorf("failed to parse file '%s' (%s)", path, err)
}
@ -172,11 +132,10 @@ func (config *Config) Write(output io.Writer) error {
// Pretty : Write config json in a file
func (config *Config) Pretty(output io.Writer) error {
data, err := json.MarshalIndent(config, "", "\t")
data, err := yaml.Marshal(config)
if err != nil {
return err
}
data = append(data, []byte("\n")...)
_, err = output.Write(data)
return err
}

5
config/config_example.yml Fichier normal
Voir le fichier

@ -0,0 +1,5 @@
# Configuration file for NyaaPantsu
# Please, do not change default_config, create your own config.yml
# You do not have to change every values, just set here what you want to edit
# For example, if you just want to change the port, just write:
#port: 9998

164
config/default_config.yml Fichier normal
Voir le fichier

@ -0,0 +1,164 @@
# Default Configuration file for NyaaPantsu
# PLEASE, do NOT change default_config.yml, create your own config.yml
# Host of server
host: localhost
# port of server
port: 9999
# database type
db_type: sqlite3
# DBParams will be directly passed to Gorm, and its internal structure depends on the dialect for each db type
db_params: ./nyaa.db?cache_size=50
# logmode for database
db_logmode: default
# Environment should be one of: DEVELOPMENT, TEST, PRODUCTION
environment: DEVELOPMENT
# WebAddress : url of the website
web_address: nyaa.pantsu.cat
# AuthTokenExpirationDay : Number of Days for token expiration when logged in
auth_token_expiration: 1000
# EnableSecureCSRF : Enable CSRF https mode : True if website support https, false otherwise (eg. testing locally: false)
enable_secure_csrf: true
# the default config for bittorrent scraping
scraper:
addr: :9999
workers: 4
interval: 3600
trackers:
- {url: "udp://tracker.coppersurfer.tk:6969/", name: coppersurfer.tk}
# Config by default for the cache
cache:
dialect: nop
# Size by default for the cache
size: 1024
search:
# default analyzer for ES
es_analyze: nyaapantsu_analyzer
# default search index for ES
es_index: nyaapantsu
# Name of the type in the es mapping
es_type: torrents
# Default configuration for i2p
#i2p:
# name:
# addr:
# keyfile:
# Default configuration for metainfofetcher
metainfo_fetcher:
queue_size: 10
timeout: 120
max_days: 90
base_fail_cooldown: 1800
max_fail_cooldown: 172800
wake_up_interval: 300
upload_rate_limiter: 1024
download_rate_limiter: 1024
fetch_new_torrents_only: true
i18n:
# Default configuration for translation directory
directory: translations
# Default configuration for language
default_language: en-us
torrents:
# TorrentStatus : Config of different status id for torrents
status: [true, true, true, true, true, true]
# TorrentSukebeiCategories : Config for Sukebei categories
sukebei_categories: {"1_": "art", "1_1": "art_anime", "1_2": "art_doujinshi", "1_3": "art_games", "1_4": "art_manga", "1_5": "art_pictures", "2_": "real_life", "2_1": "real_life_photobooks_and_pictures", "2_2": "real_life_videos"}
# TorrentCleanCategories : Config for Site categories
clean_categories: {"3_": "anime", "3_12": "anime_amv", "3_5": "anime_english_translated", "3_13": "anime_non_english_translated", "3_6": "anime_raw", "2_": "audio", "2_3": "audio_lossless", "2_4": "audio_lossy", "4_": "literature", "4_7": "literature_english_translated", "4_8": "literature_raw", "4_14": "literature_non_english_translated", "5_": "live_action", "5_9": "live_action_english_translated", "5_10": "live_action_idol_pv", "5_18": "live_action_non_english_translated", "5_11": "live_action_raw", "6_": "pictures", "6_15": "pictures_graphics", "6_16": "pictures_photos", "1_": "software", "1_1": "software_applications", "1_2": "software_games"}
# TorrentFileStorage : Path to default torrent storage location (eg /var/www/wherever/you/want)
filestorage:
# TorrentStorageLink : Url of torrent file download location (eg https://your.site/somewhere/%s.torrent)
storage_link:
# TorrentCacheLink : Url of torrent site cache
cache_link: http://anicache.com/torrent/%s.torrent
# UploadsDisabled : Disable uploads for everyone except below
uploads_disabled: false
# AdminsAreStillAllowedTo : Enable admin torrent upload even if UploadsDisabled is true
admins_are_still_allowed_to: true
# TrustedUsersAreStillAllowedTo : Enable trusted users torrent upload even if UploadsDisabled is true
trusted_users_are_still_allowed_to: true
trackers:
# Trackers : Default trackers supported
default:
- udp://tracker.doko.moe:6969
- udp://tracker.coppersurfer.tk:6969
- udp://tracker.zer0day.to:1337/announce
- udp://tracker.leechers-paradise.org:6969
- udp://explodie.org:6969
- udp://tracker.opentrackr.org:1337
- udp://tracker.internetwarriors.net:1337/announce
- http://mgtracker.org:6969/announce
- udp://ipv6.leechers-paradise.org:6969/announce
# NeededTrackers : Array indexes of Trackers for needed tracker in a torrent file
needed:
- 0
# TorrentOrder : Default sorting field for torrents
order: torrent_id
# TorrentSort : Default sorting order for torrents
sort: DESC
users:
default_notifications_settings: {"new_torrent": true, "new_torrent_email": false, "new_comment": true, "new_comment_email": false, "new_responses": false, "new_responses_email": false, "new_follower": false, "new_follower_email": false, "followed": false, "followed_email": false}
navigation:
torrents_per_page: 50
max_torrents_per_page: 300
log:
# AccessLogFilePath : Path to logs access
access_log_filepath: log/access
# AccessLogFileExtension : Extension for log file
access_log_fileextension: .txt
# AccessLogMaxSize : Size max for a log file in megabytes
access_log_max_size: 5
# AccessLogMaxBackups : Number of file for logs
access_log_max_backups: 7
# AccessLogMaxAge : Number of days that we keep logs
access_log_max_age: 30
# ErrorLogFilePath : Path to log errors
error_log_filepath: log/error
# ErrorLogFileExtension : Extension for log file
error_log_fileextension: .json
# ErrorLogMaxSize : Size max for a log file in megabytes
error_log_max_size: 10
# ErrorLogMaxBackups : Number of file for logs
error_log_max_backups: 7
# ErrorLogMaxAge : Number of days that we keep logs
error_log_max_age: 30
email:
# SendEmail : Enable Email
send_email: true
# EmailFrom : email address by default
from: donotrespond@nyaa.pantsu.cat
# EmailTestTo : when testing to who send email
test_to:
# EmailHost : Host of mail server
host: localhost
# EmailUsername : Username needed for the connection
username:
# EmailPassword : Password needed for the connection
password:
# EmailPort : Mail Server port
port: 465
# EmailTimeout : Timeout for waiting server response
timeout: 10000000000
models:
# LastOldTorrentID is the highest torrent ID that was copied from the original Nyaa
last_old_torrent_id: 923000
# TorrentsTableName : Name of torrent table in DB
torrents_table_name: torrents
# ReportsTableName : Name of torrent report table in DB
reports_table_name: torrent_reports
# CommentsTableName : Name of comments table in DB
comments_table_name: comments
# UploadsOldTableName : Name of uploads table in DB
uploads_old_table_name: user_uploads_old
# FilesTableName : Name of files table in DB
files_table_name: files
# NotificationTableName : Name of notifications table in DB
notifications_table_name: notifications
# for sukebei:
# LastOldTorrentID = 2303945
# TorrentsTableName = "sukebei_torrents"
# ReportsTableName = "sukebei_torrent_reports"
# CommentsTableName = "sukebei_comments"
# UploadsOldTableName = "sukebei_user_uploads_old"
# FilesTableName = "sukebei_files"

Voir le fichier

@ -1,25 +0,0 @@
package config
import "time"
// TODO: Perform email configuration at runtime
// Future hosts shouldn't have to rebuild the binary to update a setting
const (
// SendEmail : Enable Email
SendEmail = true
// EmailFrom : email address by default
EmailFrom = "donotrespond@nyaa.pantsu.cat"
// EmailTestTo : when testing to who send email
EmailTestTo = ""
// EmailHost : Host of mail server
EmailHost = "localhost"
// EmailUsername : Username needed for the connection
EmailUsername = ""
// EmailPassword : Password needed for the connection
EmailPassword = ""
// EmailPort : Mail Server port
EmailPort = 465
// EmailTimeout : Timeout for waiting server response
EmailTimeout = 10 * time.Second
)

Voir le fichier

@ -1,17 +0,0 @@
package config
// /!\ PLEASE DONT PULL THIS FILE UNLESS NEEDED CHANGES /!\
// TODO: Perform environment configuration at runtime
// Future hosts shouldn't have to rebuild the binary to update a setting
const (
// Environment should be one of: DEVELOPMENT, TEST, PRODUCTION
Environment = "DEVELOPMENT"
// WebAddress : url of the website
WebAddress = "nyaa.pantsu.cat"
// AuthTokenExpirationDay : Number of Days for token expiration when logged in
AuthTokenExpirationDay = 1000
// EnableSecureCSRF : Enable CSRF https mode : True if website support https, false otherwise (eg. testing locally: false)
EnableSecureCSRF = true
)

Voir le fichier

@ -1,13 +0,0 @@
package config
// I18nConfig : Config struct for translation
type I18nConfig struct {
TranslationsDirectory string `json:"translations_directory"`
DefaultLanguage string `json:"default_language"`
}
// DefaultI18nConfig : Default configuration for translation
var DefaultI18nConfig = I18nConfig{
TranslationsDirectory: "translations",
DefaultLanguage: "en-us",
}

Voir le fichier

@ -1,8 +0,0 @@
package config
// I2PConfig : Config struct for I2P
type I2PConfig struct {
Name string `json:"name"`
Addr string `json:"samaddr"`
Keyfile string `json:"keyfile"`
}

Voir le fichier

@ -1,24 +0,0 @@
package config
const (
// AccessLogFilePath : Path to logs access
AccessLogFilePath = "log/access"
// AccessLogFileExtension : Extension for log file
AccessLogFileExtension = ".txt"
// AccessLogMaxSize : Size max for a log file in megabytes
AccessLogMaxSize = 5
// AccessLogMaxBackups : Number of file for logs
AccessLogMaxBackups = 7
// AccessLogMaxAge : Number of days that we keep logs
AccessLogMaxAge = 30
// ErrorLogFilePath : Path to log errors
ErrorLogFilePath = "log/error"
// ErrorLogFileExtension : Extension for log file
ErrorLogFileExtension = ".json"
// ErrorLogMaxSize : Size max for a log file in megabytes
ErrorLogMaxSize = 10
// ErrorLogMaxBackups : Number of file for logs
ErrorLogMaxBackups = 7
// ErrorLogMaxAge : Number of days that we keep logs
ErrorLogMaxAge = 30
)

Voir le fichier

@ -1,30 +0,0 @@
package config
// MetainfoFetcherConfig : Config struct for metainfo fetcher
type MetainfoFetcherConfig struct {
QueueSize int `json:"queue_size"`
Timeout int `json:"timeout"`
MaxDays int `json:"max_days"`
BaseFailCooldown int `json:"base_fail_cooldown"`
MaxFailCooldown int `json:"max_fail_cooldown"`
WakeUpInterval int `json:"wake_up_interval"`
UploadRateLimiter int `json:"upload_rate_limiter"`
DownloadRateLimiter int `json:"download_rate_limiter"`
FetchNewTorrentsOnly bool `json:"fetch_new_torrents_only"`
}
// DefaultMetainfoFetcherConfig : Default configuration for metainfofetcher
var DefaultMetainfoFetcherConfig = MetainfoFetcherConfig{
QueueSize: 10,
Timeout: 120, // 2 min
MaxDays: 90,
BaseFailCooldown: 30 * 60, // in seconds, when failed torrents will be able to be fetched again.
MaxFailCooldown: 48 * 60 * 60,
WakeUpInterval: 300, // 5 min
UploadRateLimiter: 1024, // kbps
DownloadRateLimiter: 1024,
FetchNewTorrentsOnly: true, // Only fetch torrents newer than config.LastOldTorrentID
}

Voir le fichier

@ -1,11 +0,0 @@
package config
// TODO: Perform navigation configuration at runtime
// Future hosts shouldn't have to rebuild the binary to update a setting
const (
// TorrentsPerPage : Number of torrents per page
TorrentsPerPage = 50
// MaxTorrentsPerPage : maximum torrents per page
MaxTorrentsPerPage = 300
)

25
config/parser.go Fichier normal
Voir le fichier

@ -0,0 +1,25 @@
package config
import (
"io/ioutil"
"log"
yaml "gopkg.in/yaml.v2"
)
// DefaultConfig : Default configuration
var DefaultConfig *Config
func getDefaultConfig() *Config {
DefaultConfig = &Config{}
path := "config/default_config.yml"
data, err := ioutil.ReadFile(path)
if err != nil {
log.Printf("can't read file '%s'", path)
}
err = yaml.Unmarshal(data, DefaultConfig)
if err != nil {
log.Printf("error: %v", err)
}
return DefaultConfig
}

Voir le fichier

@ -1,31 +0,0 @@
package config
// ScrapeConfig : Config struct for Scraping
type ScrapeConfig struct {
URL string `json:"scrape_url"`
Name string `json:"name"`
IntervalSeconds int64 `json:"interval"`
}
// ScraperConfig : Config struct for Scraper
type ScraperConfig struct {
Addr string `json:"bind"`
NumWorkers int `json:"workers"`
IntervalSeconds int64 `json:"default_interval"`
Trackers []ScrapeConfig `json:"trackers"`
}
// DefaultScraperConfig is the default config for bittorrent scraping
var DefaultScraperConfig = ScraperConfig{
Addr: ":9999",
// TODO: query system?
NumWorkers: 4,
// every hour
IntervalSeconds: 60 * 60,
Trackers: []ScrapeConfig{
{
URL: "udp://tracker.coppersurfer.tk:6969/",
Name: "coppersurfer.tk",
},
},
}

Voir le fichier

@ -1,18 +0,0 @@
package config
// SearchConfig : Config struct for search
// Is it deprecated?
type SearchConfig struct {
}
// DefaultSearchConfig : Default config for search
var DefaultSearchConfig = SearchConfig{}
const (
// DefaultElasticsearchAnalyzer : default analyzer for ES
DefaultElasticsearchAnalyzer = "nyaapantsu_analyzer"
// DefaultElasticsearchIndex : default search index for ES
DefaultElasticsearchIndex = "nyaapantsu"
// DefaultElasticsearchType : Name of the type in the es mapping
DefaultElasticsearchType = "torrents"
)

Voir le fichier

@ -1,11 +0,0 @@
package config
// TODO: Perform sorting configuration at runtime
// Future hosts shouldn't have to rebuild the binary to update a setting
const (
// TorrentOrder : Default sorting field for torrents
TorrentOrder = "torrent_id"
// TorrentSort : Default sorting order for torrents
TorrentSort = "DESC"
)

Voir le fichier

@ -1,50 +0,0 @@
package config
// TorrentStatus : Config of different status id for torrents
var TorrentStatus = map[int]bool{
0: true,
1: true,
2: true,
3: true,
4: true,
}
// TorrentSukebeiCategories : Config for Sukebei categories
var TorrentSukebeiCategories = map[string]string{
"1_": "art",
"1_1": "art_anime",
"1_2": "art_doujinshi",
"1_3": "art_games",
"1_4": "art_manga",
"1_5": "art_pictures",
"2_": "real_life",
"2_1": "real_life_photobooks_and_pictures",
"2_2": "real_life_videos",
}
// TorrentCleanCategories : Config for Site categories
var TorrentCleanCategories = map[string]string{
"3_": "anime",
"3_12": "anime_amv",
"3_5": "anime_english_translated",
"3_13": "anime_non_english_translated",
"3_6": "anime_raw",
"2_": "audio",
"2_3": "audio_lossless",
"2_4": "audio_lossy",
"4_": "literature",
"4_7": "literature_english_translated",
"4_8": "literature_raw",
"4_14": "literature_non_english_translated",
"5_": "live_action",
"5_9": "live_action_english_translated",
"5_10": "live_action_idol_pv",
"5_18": "live_action_non_english_translated",
"5_11": "live_action_raw",
"6_": "pictures",
"6_15": "pictures_graphics",
"6_16": "pictures_photos",
"1_": "software",
"1_1": "software_applications",
"1_2": "software_games",
}

Voir le fichier

@ -1,20 +0,0 @@
package config
// TODO: Update FAQ template to use this variable
// Trackers : Default trackers supported
var Trackers = []string{
"udp://tracker.doko.moe:6969",
"udp://tracker.coppersurfer.tk:6969",
"udp://tracker.zer0day.to:1337/announce",
"udp://tracker.leechers-paradise.org:6969",
"udp://explodie.org:6969",
"udp://tracker.opentrackr.org:1337",
"udp://tracker.internetwarriors.net:1337/announce",
"http://mgtracker.org:6969/announce",
"udp://ipv6.leechers-paradise.org:6969/announce"}
// NeededTrackers : Array indexes of Trackers for needed tracker in a torrent file
var NeededTrackers = []int{
0,
}

167
config/types.go Fichier normal
Voir le fichier

@ -0,0 +1,167 @@
package config
// Config : Configuration for DB, I2P, Fetcher, Go Server and Translation
type Config struct {
Host string `json:"host" yaml:"host,omitempty"`
Port int `json:"port" yaml:"port,omitempty"`
DBType string `json:"db_type" yaml:"db_type,omitempty"`
Environment string `json:"environment" yaml:"environment,omitempty"`
WebAddress string `json:"web_address" yaml:"web_address,omitempty"`
AuthTokenExpirationDay int `json:"auth_token_expiration" yaml:"auth_token_expiration,omitempty"`
EnableSecureCSRF bool `json:"enable_secure_csrf" yaml:"enable_secure_csrf,omitempty"`
// DBParams will be directly passed to Gorm, and its internal
// structure depends on the dialect for each db type
DBParams string `json:"db_params" yaml:"db_params,omitempty"`
DBLogMode string `json:"db_logmode" yaml:"db_logmode,omitempty"`
// tracker scraper config (required)
Scrape ScraperConfig `json:"scraper" yaml:"scraper,flow,omitempty"`
// cache config
Cache CacheConfig `json:"cache" yaml:"cache,flow,omitempty"`
// search config
Search SearchConfig `json:"search" yaml:"search,flow,omitempty"`
// optional i2p configuration
I2P *I2PConfig `json:"i2p" yaml:"i2p,flow"`
// filesize fetcher config
MetainfoFetcher MetainfoFetcherConfig `json:"metainfo_fetcher" yaml:"metainfo_fetcher,flow,omitempty"`
// internationalization config
I18n I18nConfig `json:"i18n" yaml:"i18n,flow,omitempty"`
// torrents config
Torrents TorrentsConfig `yaml:"torrents,flow,omitempty"`
// user config
Users UsersConfig `yaml:"users,flow,omitempty"`
// navigation config
Navigation NavigationConfig `yaml:"navigation,flow,omitempty"`
// log config
Log LogConfig `yaml:"log,flow,omitempty"`
// email config
Email EmailConfig `yaml:"email,flow,omitempty"`
// models config
Models ModelsConfig `yaml:"models,flow,omitempty"`
}
// CacheConfig is config struct for caching strategy
type CacheConfig struct {
Dialect string `yaml:"dialect,omitempty"`
URL string `yaml:"url,omitempty"`
Size float64 `yaml:"size,omitempty"`
}
// I2PConfig : Config struct for I2P
type I2PConfig struct {
Name string `json:"name" yaml:"name,omitempty"`
Addr string `json:"samaddr" yaml:"addr,omitempty"`
Keyfile string `json:"keyfile" yaml:"keyfile,omitempty"`
}
// I18nConfig : Config struct for translation
type I18nConfig struct {
Directory string `json:"translations_directory" yaml:"directory,omitempty"`
DefaultLanguage string `json:"default_language" yaml:"default_language,omitempty"`
}
// ScrapeConfig : Config struct for Scraping
type ScrapeConfig struct {
URL string `json:"scrape_url" yaml:"url,omitempty"`
Name string `json:"name" yaml:"name,omitempty"`
IntervalSeconds int64 `json:"interval" yaml:"interval,omitempty"`
}
// ScraperConfig : Config struct for Scraper
type ScraperConfig struct {
Addr string `json:"bind" yaml:"addr,omitempty"`
NumWorkers int `json:"workers" yaml:"workers,omitempty"`
IntervalSeconds int64 `json:"default_interval" yaml:"default_interval,omitempty"`
Trackers []ScrapeConfig `json:"trackers" yaml:"trackers,omitempty"`
}
// TrackersConfig ; Config struct for Trackers
type TrackersConfig struct {
Default []string `yaml:"default,flow,omitempty"`
NeededTrackers []int `yaml:"needed,flow,omitempty"`
}
// TorrentsConfig : Config struct for Torrents
type TorrentsConfig struct {
Status []bool `yaml:"status,omitempty,omitempty"`
SukebeiCategories map[string]string `yaml:"sukebei_categories,omitempty"`
CleanCategories map[string]string `yaml:"clean_categories,omitempty"`
FileStorage string `yaml:"filestorage,omitempty"`
StorageLink string `yaml:"storage_link,omitempty"`
CacheLink string `yaml:"cache_link,omitempty"`
UploadsDisabled bool `yaml:"uploads_disabled,omitempty"`
AdminsAreStillAllowedTo bool `yaml:"admins_are_still_allowed_to,omitempty"`
TrustedUsersAreStillAllowedTo bool `yaml:"trusted_users_are_still_allowed_to,omitempty"`
Trackers TrackersConfig `yaml:"trackers,flow,omitempty"`
Order string `yaml:"order,omitempty"`
Sort string `yaml:"sort,omitempty"`
}
// UsersConfig : Config struct for Users
type UsersConfig struct {
DefaultUserSettings map[string]bool `yaml:"default_notifications_settings,flow,omitempty"`
}
// NavigationConfig : Config struct for Navigation
type NavigationConfig struct {
TorrentsPerPage int `yaml:"torrents_per_page,omitempty"`
MaxTorrentsPerPage int `yaml:"max_torrents_per_page,omitempty"`
}
// MetainfoFetcherConfig : Config struct for metainfo fetcher
type MetainfoFetcherConfig struct {
QueueSize int `json:"queue_size" yaml:"queue_size,omitempty"`
Timeout int `json:"timeout" yaml:"timeout,omitempty"`
MaxDays int `json:"max_days" yaml:"max_days,omitempty"`
BaseFailCooldown int `json:"base_fail_cooldown" yaml:"base_fail_cooldown,omitempty"`
MaxFailCooldown int `json:"max_fail_cooldown" yaml:"max_fail_cooldown,omitempty"`
WakeUpInterval int `json:"wake_up_interval" yaml:"wake_up_interval,omitempty"`
UploadRateLimiter int `json:"upload_rate_limiter" yaml:"upload_rate_limiter,omitempty"`
DownloadRateLimiter int `json:"download_rate_limiter" yaml:"download_rate_limiter,omitempty"`
FetchNewTorrentsOnly bool `json:"fetch_new_torrents_only" yaml:"fetch_new_torrents_only,omitempty"`
}
// LogConfig : Config struct for Logs
type LogConfig struct {
AccessLogFilePath string `yaml:"access_log_filepath,omitempty"`
AccessLogFileExtension string `yaml:"access_log_fileextension,omitempty"`
AccessLogMaxSize int `yaml:"access_log_max_size,omitempty"`
AccessLogMaxBackups int `yaml:"access_log_max_backups,omitempty"`
AccessLogMaxAge int `yaml:"access_log_max_age,omitempty"`
ErrorLogFilePath string `yaml:"error_log_filepath,omitempty"`
ErrorLogFileExtension string `yaml:"error_log_fileextension,omitempty"`
ErrorLogMaxSize int `yaml:"error_log_max_size,omitempty"`
ErrorLogMaxBackups int `yaml:"error_log_max_backups,omitempty"`
ErrorLogMaxAge int `yaml:"error_log_max_age,omitempty"`
}
// EmailConfig : Config struct for email
type EmailConfig struct {
SendEmail bool `yaml:"send_email,omitempty"`
From string `yaml:"from,omitempty"`
TestTo string `yaml:"test_to,omitempty"`
Host string `yaml:"host,omitempty"`
Username string `yaml:"username,omitempty"`
Password string `yaml:"password,omitempty"`
Port int `yaml:"port,omitempty"`
Timeout int `yaml:"timeout,omitempty"`
}
// ModelsConfig : Config struct for models
type ModelsConfig struct {
LastOldTorrentID uint `yaml:"last_old_torrent_id,omitempty"`
TorrentsTableName string `yaml:"torrents_table_name,omitempty"`
ReportsTableName string `yaml:"reports_table_name,omitempty"`
CommentsTableName string `yaml:"comments_table_name,omitempty"`
UploadsOldTableName string `yaml:"uploads_old_table_name,omitempty"`
FilesTableName string `yaml:"files_table_name,omitempty"`
NotificationsTableName string `yaml:"notifications_table_name,omitempty"`
}
// SearchConfig : Config struct for search
type SearchConfig struct {
ElasticsearchAnalyzer string `yaml:"es_analyze,omitempty"`
ElasticsearchIndex string `yaml:"es_index,omitempty"`
ElasticsearchType string `yaml:"es_type,omitempty"`
}

Voir le fichier

@ -1,22 +0,0 @@
package config
const (
// TorrentFileStorage = "/var/www/wherever/you/want"
// TorrentStorageLink = "https://your.site/somewhere/%s.torrent"
// TorrentFileStorage : Path to default torrent storage location
TorrentFileStorage = ""
// TorrentStorageLink : Url of torrent file download location
TorrentStorageLink = ""
// TODO: deprecate this and move all files to the same server
// TorrentCacheLink : Url of torrent site cache
TorrentCacheLink = "http://anicache.com/torrent/%s.torrent"
// UploadsDisabled : Disable uploads for everyone except below
UploadsDisabled = false
// AdminsAreStillAllowedTo : Enable admin torrent upload even if UploadsDisabled is true
AdminsAreStillAllowedTo = true
// TrustedUsersAreStillAllowedTo : Enable trusted users torrent upload even if UploadsDisabled is true
TrustedUsersAreStillAllowedTo = true
)

Voir le fichier

@ -1,20 +0,0 @@
package config
// DefaultUserSettings :
/* Here we config the notifications options
* Uses in user model for default setting
* Be aware, default values in user update form are
* in service/user/form/form_validator.go
*/
var DefaultUserSettings = map[string]bool{
"new_torrent": true,
"new_torrent_email": false,
"new_comment": true,
"new_comment_email": false,
"new_responses": false,
"new_responses_email": false,
"new_follower": false,
"new_follower_email": false,
"followed": false,
"followed_email": false,
}

Voir le fichier

@ -19,7 +19,7 @@ type Logger interface {
}
// DefaultLogger : use the default gorm logger that prints to stdout
var DefaultLogger Logger = nil
var DefaultLogger Logger
// ORM : Variable for interacting with database
var ORM *gorm.DB
@ -54,7 +54,7 @@ func GormInit(conf *config.Config, logger Logger) (*gorm.DB, error) {
db.DB().SetMaxIdleConns(maxIdleConns)
db.DB().SetMaxOpenConns(400)
if config.Environment == "DEVELOPMENT" {
if config.Conf.Environment == "DEVELOPMENT" {
db.LogMode(true)
}

Voir le fichier

@ -25,12 +25,19 @@ func (logger *errorLogger) Print(values ...interface{}) {
}
func TestGormInitSqlite(t *testing.T) {
conf := config.New()
conf.DBType = SqliteType
conf.DBParams = ":memory:?cache=shared&mode=memory"
conf.DBLogMode = "detailed"
db, err := GormInit(conf, &errorLogger{t})
config.Conf.DBType = SqliteType
config.Conf.DBParams = ":memory:?cache=shared&mode=memory"
config.Conf.DBLogMode = "detailed"
config.Conf.Models.CommentsTableName = "comments"
config.Conf.Models.FilesTableName = "files"
config.Conf.Models.NotificationsTableName = "notifications"
config.Conf.Models.ReportsTableName = "torrent_reports"
config.Conf.Models.TorrentsTableName = "torrents"
config.Conf.Models.UploadsOldTableName = "user_uploads_old"
config.Conf.Models.LastOldTorrentID = 90000
db, err := GormInit(config.Conf, &errorLogger{t})
if err != nil {
t.Errorf("failed to initialize database: %v", err)
return
@ -62,12 +69,18 @@ func TestGormInitPostgres(t *testing.T) {
t.Skip("skip", testPostgres)
}
conf := config.New()
conf.DBType = "postgres"
conf.DBParams = "host=localhost user=nyaapantsu dbname=nyaapantsu sslmode=disable password=nyaapantsu"
conf.DBLogMode = "detailed"
config.Conf.DBType = "postgres"
config.Conf.DBParams = "host=localhost user=nyaapantsu dbname=nyaapantsu sslmode=disable password=nyaapantsu"
config.Conf.DBLogMode = "detailed"
config.Conf.Models.CommentsTableName = "comments"
config.Conf.Models.FilesTableName = "files"
config.Conf.Models.NotificationsTableName = "notifications"
config.Conf.Models.ReportsTableName = "torrent_reports"
config.Conf.Models.TorrentsTableName = "torrents"
config.Conf.Models.UploadsOldTableName = "user_uploads_old"
config.Conf.Models.LastOldTorrentID = 90000
db, err := GormInit(conf, &errorLogger{t})
db, err := GormInit(config.Conf, &errorLogger{t})
if err != nil {
t.Errorf("failed to initialize database: %v", err)
}

Voir le fichier

@ -111,11 +111,11 @@ func RunMetainfoFetcher(conf *config.Config) {
}
func main() {
conf := config.New()
conf := config.Conf
processFlags := conf.BindFlags()
defaults := flag.Bool("print-defaults", false, "print the default configuration file on stdout")
mode := flag.String("mode", "webapp", "which mode to run daemon in, either webapp, scraper or metainfo_fetcher")
flag.Float64Var(&conf.Cache.Size, "c", config.DefaultCacheSize, "size of the search cache in MB")
flag.Float64Var(&conf.Cache.Size, "c", config.Conf.Cache.Size, "size of the search cache in MB")
flag.Parse()
if *defaults {
@ -151,8 +151,8 @@ func main() {
log.Fatal(err.Error())
}
signals.Handle()
if len(config.TorrentFileStorage) > 0 {
err := os.MkdirAll(config.TorrentFileStorage, 0700)
if len(config.Conf.Torrents.FileStorage) > 0 {
err := os.MkdirAll(config.Conf.Torrents.FileStorage, 0700)
if err != nil {
log.Fatal(err.Error())
}

Voir le fichier

@ -27,7 +27,7 @@ func (c Comment) Size() int {
// TableName : Return the name of comment table
func (c Comment) TableName() string {
return config.CommentsTableName
return config.Conf.Models.CommentsTableName
}
// Identifier : Return the identifier of the comment

Voir le fichier

@ -16,7 +16,7 @@ type File struct {
// TableName : Return the name of files table
func (f File) TableName() string {
return config.FilesTableName
return config.Conf.Models.FilesTableName
}
// Size : Returns the total size of memory allocated for this struct

Voir le fichier

@ -22,5 +22,5 @@ func NewNotification(identifier string, c string, url string) Notification {
// TableName : Return the name of notification table
func (n *Notification) TableName() string {
return config.NotificationTableName
return config.Conf.Models.NotificationsTableName
}

Voir le fichier

@ -23,7 +23,7 @@ type TorrentReport struct {
// TableName : Return the name of torrent report table
func (report TorrentReport) TableName() string {
return config.ReportsTableName
return config.Conf.Models.ReportsTableName
}
// TorrentReportJSON : Json struct of torrent report model

Voir le fichier

@ -82,7 +82,7 @@ func (t Torrent) Size() (s int) {
// TableName : Return the name of torrents table
func (t Torrent) TableName() string {
return config.TorrentsTableName
return config.Conf.Models.TorrentsTableName
}
// Identifier : Return the identifier of a torrent
@ -125,8 +125,8 @@ func (t Torrent) AddToESIndex(client *elastic.Client) error {
ctx := context.Background()
torrentJSON := t.ToJSON()
_, err := client.Index().
Index(config.DefaultElasticsearchIndex).
Type(config.DefaultElasticsearchType).
Index(config.Conf.Search.ElasticsearchIndex).
Type(config.Conf.Search.ElasticsearchType).
Id(strconv.FormatUint(uint64(torrentJSON.ID), 10)).
BodyJson(torrentJSON).
Refresh("true").
@ -138,8 +138,8 @@ func (t Torrent) AddToESIndex(client *elastic.Client) error {
func (t Torrent) DeleteFromESIndex(client *elastic.Client) error {
ctx := context.Background()
_, err := client.Delete().
Index(config.DefaultElasticsearchIndex).
Type(config.DefaultElasticsearchType).
Index(config.Conf.Search.ElasticsearchIndex).
Type(config.Conf.Search.ElasticsearchType).
Id(strconv.FormatInt(int64(t.ID), 10)).
Do(ctx)
return err
@ -148,20 +148,20 @@ func (t Torrent) DeleteFromESIndex(client *elastic.Client) error {
// ParseTrackers : Takes an array of trackers, adds needed trackers and parse it to url string
func (t *Torrent) ParseTrackers(trackers []string) {
v := url.Values{}
if len(config.NeededTrackers) > 0 { // if we have some needed trackers configured
if len(config.Conf.Torrents.Trackers.NeededTrackers) > 0 { // if we have some needed trackers configured
if len(trackers) == 0 {
trackers = config.Trackers
trackers = config.Conf.Torrents.Trackers.Default
} else {
for _, id := range config.NeededTrackers {
for _, id := range config.Conf.Torrents.Trackers.NeededTrackers {
found := false
for _, tracker := range trackers {
if tracker == config.Trackers[id] {
if tracker == config.Conf.Torrents.Trackers.Default[id] {
found = true
break
}
}
if !found {
trackers = append(trackers, config.Trackers[id])
trackers = append(trackers, config.Conf.Torrents.Trackers.Default[id])
}
}
}
@ -232,7 +232,7 @@ type TorrentJSON struct {
func (t *Torrent) ToJSON() TorrentJSON {
var trackers []string
if t.Trackers == "" {
trackers = config.Trackers
trackers = config.Conf.Torrents.Trackers.Default
} else {
trackers = t.GetTrackersArray()
}
@ -277,14 +277,14 @@ func (t *Torrent) ToJSON() TorrentJSON {
uploaderID = t.UploaderID
}
torrentlink := ""
if t.ID <= config.LastOldTorrentID && len(config.TorrentCacheLink) > 0 {
if t.ID <= config.Conf.Models.LastOldTorrentID && len(config.Conf.Torrents.CacheLink) > 0 {
if config.IsSukebei() {
torrentlink = "" // torrent cache doesn't have sukebei torrents
} else {
torrentlink = fmt.Sprintf(config.TorrentCacheLink, t.Hash)
torrentlink = fmt.Sprintf(config.Conf.Torrents.CacheLink, t.Hash)
}
} else if t.ID > config.LastOldTorrentID && len(config.TorrentStorageLink) > 0 {
torrentlink = fmt.Sprintf(config.TorrentStorageLink, t.Hash)
} else if t.ID > config.Conf.Models.LastOldTorrentID && len(config.Conf.Torrents.StorageLink) > 0 {
torrentlink = fmt.Sprintf(config.Conf.Torrents.StorageLink, t.Hash)
}
res := TorrentJSON{
ID: t.ID,

Voir le fichier

@ -129,7 +129,7 @@ type UserSettings struct {
// TableName : Return the name of OldComment table
func (c UserUploadsOld) TableName() string {
// is this needed here?
return config.UploadsOldTableName
return config.Conf.Models.UploadsOldTableName
}
// ToJSON : Conversion of a user model to json
@ -152,7 +152,7 @@ func (s *UserSettings) Get(key string) bool {
if val, ok := s.Settings[key]; ok {
return val
}
return config.DefaultUserSettings[key]
return config.Conf.Users.DefaultUserSettings[key]
}
// GetSettings : get all user settings
@ -170,7 +170,7 @@ func (s *UserSettings) Set(key string, val bool) {
// ToDefault : Set user settings to default
func (s *UserSettings) ToDefault() {
s.Settings = config.DefaultUserSettings
s.Settings = config.Conf.Users.DefaultUserSettings
}
func (s *UserSettings) initialize() {

Voir le fichier

@ -40,7 +40,7 @@ func APIHandler(w http.ResponseWriter, r *http.Request) {
}
if req.MaxPerPage == 0 {
req.MaxPerPage = config.TorrentsPerPage
req.MaxPerPage = config.Conf.Navigation.TorrentsPerPage
}
if req.Page <= 0 {
req.Page = 1
@ -53,10 +53,10 @@ func APIHandler(w http.ResponseWriter, r *http.Request) {
if maxString != "" {
req.MaxPerPage, err = strconv.Atoi(maxString)
if !log.CheckError(err) {
req.MaxPerPage = config.TorrentsPerPage
req.MaxPerPage = config.Conf.Navigation.TorrentsPerPage
}
} else {
req.MaxPerPage = config.TorrentsPerPage
req.MaxPerPage = config.Conf.Navigation.TorrentsPerPage
}
req.Page = 1

Voir le fichier

@ -544,7 +544,7 @@ func torrentManyAction(r *http.Request) {
if r.FormValue("withreport") == "" { // Default behavior for withreport
withReport = false
}
if !config.TorrentStatus[status] { // Check if the status exist
if !config.Conf.Torrents.Status[status] { // Check if the status exist
messages.AddErrorTf("errors", "no_status_exist", status)
status = -1
}

Voir le fichier

@ -87,7 +87,7 @@ func init() {
userRoutes.HandleFunc("/{id}/{username}/feed/{page}", RSSHandler).Name("feed_user_page")
// Please make EnableSecureCSRF to false when testing locally
if config.EnableSecureCSRF {
if config.Conf.EnableSecureCSRF {
userRoutes.Handle("/", csrf.Protect(config.CSRFTokenHashKey)(userRoutes))
torrentRoutes.Handle("/", csrf.Protect(config.CSRFTokenHashKey)(torrentRoutes))
torrentViewRoutes.Handle("/", csrf.Protect(config.CSRFTokenHashKey)(torrentViewRoutes))

Voir le fichier

@ -67,7 +67,7 @@ func RSSHandler(w http.ResponseWriter, r *http.Request) {
}
feed := &feeds.Feed{
Title: "Nyaa Pantsu",
Link: &feeds.Link{Href: "https://" + config.WebAddress + "/"},
Link: &feeds.Link{Href: "https://" + config.Conf.WebAddress + "/"},
Created: createdAsTime,
}
feed.Items = make([]*feeds.Item, len(torrents))
@ -75,7 +75,7 @@ func RSSHandler(w http.ResponseWriter, r *http.Request) {
for i, torrent := range torrents {
torrentJSON := torrent.ToJSON()
feed.Items[i] = &feeds.Item{
ID: "https://" + config.WebAddress + "/view/" + strconv.FormatUint(uint64(torrentJSON.ID), 10),
ID: "https://" + config.Conf.WebAddress + "/view/" + strconv.FormatUint(uint64(torrentJSON.ID), 10),
Title: torrent.Name,
Link: &feeds.Link{Href: string("<![CDATA[" + torrentJSON.Magnet + "]]>")},
Description: string(torrentJSON.Description),

Voir le fichier

@ -191,7 +191,7 @@ var FuncMap = template.FuncMap{
return captchaData{captchaID, T}
},
"DefaultUserSettings": func(s string) bool {
return config.DefaultUserSettings[s]
return config.Conf.Users.DefaultUserSettings[s]
},
"makeTreeViewData": func(f *filelist.FileListFolder, nestLevel int, T publicSettings.TemplateTfunc, identifierChain string) interface{} {
return struct {

Voir le fichier

@ -70,7 +70,7 @@ var errTorrentPlusMagnet = errors.New("Upload either a torrent file or magnet li
var errPrivateTorrent = errors.New("Torrent is private")
// error indicating a problem with its trackers
var errTrackerProblem = errors.New("Torrent does not have any (working) trackers: https://" + config.WebAddress + "/faq#trackers")
var errTrackerProblem = errors.New("Torrent does not have any (working) trackers: https://" + config.Conf.WebAddress + "/faq#trackers")
// error indicating a torrent's name is invalid
var errInvalidTorrentName = errors.New("Torrent name is invalid")
@ -232,7 +232,7 @@ func (f *uploadForm) ExtractInfo(r *http.Request) error {
}
// after data has been checked & extracted, write it to disk
if len(config.TorrentFileStorage) > 0 {
if len(config.Conf.Torrents.FileStorage) > 0 {
err := writeTorrentToDisk(tfile, f.Infohash+".torrent", &f.Filepath)
if err != nil {
return err
@ -290,7 +290,7 @@ func writeTorrentToDisk(file multipart.File, name string, fullpath *string) erro
if err != nil {
return err
}
*fullpath = fmt.Sprintf("%s%c%s", config.TorrentFileStorage, os.PathSeparator, name)
*fullpath = fmt.Sprintf("%s%c%s", config.Conf.Torrents.FileStorage, os.PathSeparator, name)
return ioutil.WriteFile(*fullpath, b, 0644)
}

Voir le fichier

@ -105,7 +105,7 @@ func UploadPostHandler(w http.ResponseWriter, r *http.Request) {
url, err := Router.Get("view_torrent").URL("id", strconv.FormatUint(uint64(torrent.ID), 10))
if user.ID > 0 && config.DefaultUserSettings["new_torrent"] { // If we are a member and notifications for new torrents are enabled
if user.ID > 0 && config.Conf.Users.DefaultUserSettings["new_torrent"] { // If we are a member and notifications for new torrents are enabled
userService.GetFollowers(user) // We populate the liked field for users
if len(user.Followers) > 0 { // If we are followed by at least someone
for _, follower := range user.Followers {

Voir le fichier

@ -257,14 +257,14 @@ func DownloadTorrent(w http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
hash := vars["hash"]
if hash == "" && len(config.TorrentFileStorage) == 0 {
if hash == "" && len(config.Conf.Torrents.FileStorage) == 0 {
//File not found, send 404
http.Error(w, "File not found.", 404)
return
}
//Check if file exists and open
Openfile, err := os.Open(fmt.Sprintf("%s%c%s.torrent", config.TorrentFileStorage, os.PathSeparator, hash))
Openfile, err := os.Open(fmt.Sprintf("%s%c%s.torrent", config.Conf.Torrents.FileStorage, os.PathSeparator, hash))
defer Openfile.Close() //Close after function return
if err != nil {
//File not found, send 404

Voir le fichier

@ -1,13 +1,14 @@
package scraperService
import (
"net"
"net/url"
"time"
"github.com/NyaaPantsu/nyaa/config"
"github.com/NyaaPantsu/nyaa/db"
"github.com/NyaaPantsu/nyaa/model"
"github.com/NyaaPantsu/nyaa/util/log"
"net"
"net/url"
"time"
)
// MTU yes this is the ipv6 mtu
@ -181,7 +182,7 @@ func (sc *Scraper) Scrape(packets uint) {
now := time.Now().Add(0 - sc.interval)
// only scrape torretns uploaded within 90 days
oldest := now.Add(0 - (time.Hour * 24 * 90))
rows, err := db.ORM.Raw("SELECT torrent_id, torrent_hash FROM "+config.TorrentsTableName+" WHERE ( last_scrape IS NULL OR last_scrape < ? ) AND date > ? ORDER BY torrent_id DESC LIMIT ?", now, oldest, packets*ScrapesPerPacket).Rows()
rows, err := db.ORM.Raw("SELECT torrent_id, torrent_hash FROM "+config.Conf.Models.TorrentsTableName+" WHERE ( last_scrape IS NULL OR last_scrape < ? ) AND date > ? ORDER BY torrent_id DESC LIMIT ?", now, oldest, packets*ScrapesPerPacket).Rows()
if err == nil {
counter := 0
var scrape [ScrapesPerPacket]model.Torrent

Voir le fichier

@ -54,8 +54,8 @@ func (t *Transaction) handleScrapeReply(data []byte) {
}
}
const pgQuery = "UPDATE " + config.TorrentsTableName + " SET seeders = $1 , leechers = $2 , completed = $3 , last_scrape = $4 WHERE torrent_id = $5"
const sqliteQuery = "UPDATE " + config.TorrentsTableName + " SET seeders = ? , leechers = ? , completed = ? , last_scrape = ? WHERE torrent_id = ?"
var pgQuery = "UPDATE " + config.Conf.Models.TorrentsTableName + " SET seeders = $1 , leechers = $2 , completed = $3 , last_scrape = $4 WHERE torrent_id = $5"
var sqliteQuery = "UPDATE " + config.Conf.Models.TorrentsTableName + " SET seeders = ? , leechers = ? , completed = ? , last_scrape = ? WHERE torrent_id = ?"
// Sync syncs models with database
func (t *Transaction) Sync() (err error) {

Voir le fichier

@ -233,8 +233,8 @@ func (fetcher *MetainfoFetcher) fillQueue() {
excludedIDS = append(excludedIDS, id)
}
tFiles := config.FilesTableName
tTorrents := config.TorrentsTableName
tFiles := config.Conf.Models.FilesTableName
tTorrents := config.Conf.Models.TorrentsTableName
// Select the torrents with no filesize, or without any rows with torrent_id in the files table...
queryString := "((filesize IS NULL OR filesize = 0) OR (" + tTorrents + ".torrent_id NOT " +
"IN (SELECT " + tFiles + ".torrent_id FROM " + tFiles + " WHERE " + tFiles +
@ -254,7 +254,7 @@ func (fetcher *MetainfoFetcher) fillQueue() {
// and, if true, that aren't from the old Nyaa database
if fetcher.newTorrentsOnly {
queryString += " AND torrent_id > ? "
whereParamsArgs = append(whereParamsArgs, config.LastOldTorrentID)
whereParamsArgs = append(whereParamsArgs, config.Conf.Models.LastOldTorrentID)
}
params := serviceBase.CreateWhereParams(queryString, whereParamsArgs...)

Voir le fichier

@ -39,7 +39,7 @@ func NewFetchOperation(fetcher *MetainfoFetcher, dbEntry model.Torrent) (op *Fet
func (op *FetchOperation) Start(out chan Result) {
defer op.fetcher.wg.Done()
magnet := util.InfoHashToMagnet(strings.TrimSpace(op.torrent.Hash), op.torrent.Name, config.Trackers...)
magnet := util.InfoHashToMagnet(strings.TrimSpace(op.torrent.Hash), op.torrent.Name, config.Conf.Torrents.Trackers.Default...)
downloadingTorrent, err := op.fetcher.torrentClient.AddMagnet(magnet)
if err != nil {
out <- Result{op, err, nil}

Voir le fichier

@ -27,7 +27,7 @@ func GetFeeds() (result []model.Feed, err error) {
result = make([]model.Feed, 0, 50)
rows, err := db.ORM.DB().
Query(
"SELECT `torrent_id` AS `id`, `torrent_name` AS `name`, `torrent_hash` AS `hash`, `timestamp` FROM `" + config.TorrentsTableName +
"SELECT `torrent_id` AS `id`, `torrent_name` AS `name`, `torrent_hash` AS `hash`, `timestamp` FROM `" + config.Conf.Models.TorrentsTableName +
"` ORDER BY `timestamp` desc LIMIT 50")
if err != nil {
return nil, err
@ -40,7 +40,7 @@ func GetFeeds() (result []model.Feed, err error) {
if err != nil {
return
}
magnet := util.InfoHashToMagnet(strings.TrimSpace(item.Hash), item.Name, config.Trackers...)
magnet := util.InfoHashToMagnet(strings.TrimSpace(item.Hash), item.Name, config.Conf.Torrents.Trackers.Default...)
item.Magnet = magnet
// TODO: memory hog
result = append(result, item)
@ -58,10 +58,10 @@ func GetTorrentByID(id string) (torrent model.Torrent, err error) {
}
tmp := db.ORM.Where("torrent_id = ?", id).Preload("Comments")
if idInt > config.LastOldTorrentID {
if idInt > int64(config.Conf.Models.LastOldTorrentID) {
tmp = tmp.Preload("FileList")
}
if idInt <= config.LastOldTorrentID && !config.IsSukebei() {
if idInt <= int64(config.Conf.Models.LastOldTorrentID) && !config.IsSukebei() {
// only preload old comments if they could actually exist
tmp = tmp.Preload("OldComments")
}
@ -78,7 +78,7 @@ func GetTorrentByID(id string) (torrent model.Torrent, err error) {
torrent.Uploader = new(model.User)
db.ORM.Where("user_id = ?", torrent.UploaderID).Find(torrent.Uploader)
torrent.OldUploader = ""
if torrent.ID <= config.LastOldTorrentID && torrent.UploaderID == 0 {
if torrent.ID <= config.Conf.Models.LastOldTorrentID && torrent.UploaderID == 0 {
var tmp model.UserUploadsOld
if !db.ORM.Where("torrent_id = ?", torrent.ID).Find(&tmp).RecordNotFound() {
torrent.OldUploader = tmp.Username
@ -160,7 +160,7 @@ func getTorrentsOrderBy(parameters *serviceBase.WhereParams, orderBy string, lim
}
// build custom db query for performance reasons
dbQuery := "SELECT * FROM " + config.TorrentsTableName
dbQuery := "SELECT * FROM " + config.Conf.Models.TorrentsTableName
if conditions != "" {
dbQuery = dbQuery + " WHERE " + conditions
}

Voir le fichier

@ -49,11 +49,11 @@ func CheckTrackers(trackers []string) []string {
// IsUploadEnabled : Check if upload is enabled in config
func IsUploadEnabled(u model.User) bool {
if config.UploadsDisabled {
if config.AdminsAreStillAllowedTo && u.IsModerator() {
if config.Conf.Torrents.UploadsDisabled {
if config.Conf.Torrents.AdminsAreStillAllowedTo && u.IsModerator() {
return true
}
if config.TrustedUsersAreStillAllowedTo && u.IsTrusted() {
if config.Conf.Torrents.TrustedUsersAreStillAllowedTo && u.IsTrusted() {
return true
}
return false

Voir le fichier

@ -61,7 +61,7 @@ func EncodeCookie(userID uint) (string, error) {
// ClearCookie : Erase cookie session
func ClearCookie(w http.ResponseWriter) (int, error) {
domain := DomainName
if config.Environment == "DEVELOPMENT" {
if config.Conf.Environment == "DEVELOPMENT" {
domain = ""
}
cookie := &http.Cookie{
@ -108,7 +108,7 @@ func SetCookieHandler(w http.ResponseWriter, r *http.Request, email string, pass
return http.StatusInternalServerError, err
}
domain := DomainName
if config.Environment == "DEVELOPMENT" {
if config.Conf.Environment == "DEVELOPMENT" {
domain = ""
}
cookie := &http.Cookie{

Voir le fichier

@ -24,8 +24,8 @@ func SendEmailVerification(to string, token string) error {
if err != nil {
return err
}
content := T("link") + " : https://" + config.WebAddress + "/verify/email/" + token
contentHTML := T("verify_email_content") + "<br/>" + "<a href=\"https://" + config.WebAddress + "/verify/email/" + token + "\" target=\"_blank\">" + config.WebAddress + "/verify/email/" + token + "</a>"
content := T("link") + " : https://" + config.Conf.WebAddress + "/verify/email/" + token
contentHTML := T("verify_email_content") + "<br/>" + "<a href=\"https://" + config.Conf.WebAddress + "/verify/email/" + token + "\" target=\"_blank\">" + config.Conf.WebAddress + "/verify/email/" + token + "</a>"
return email.SendEmailFromAdmin(to, T("verify_email_title"), content, contentHTML)
}

Voir le fichier

@ -13,9 +13,9 @@ func GetCategories() map[string]string {
}
if config.IsSukebei() {
categories = config.TorrentSukebeiCategories
categories = config.Conf.Torrents.SukebeiCategories
} else {
categories = config.TorrentCleanCategories
categories = config.Conf.Torrents.CleanCategories
}
return categories

Voir le fichier

@ -17,15 +17,15 @@ var (
// InitGomail : init the gomail dialer
func InitGomail() *gomail.Dialer {
newMailer := gomail.NewDialer(config.EmailHost, config.EmailPort, config.EmailUsername, config.EmailPassword)
newMailer := gomail.NewDialer(config.Conf.Email.Host, config.Conf.Email.Port, config.Conf.Email.Username, config.Conf.Email.Password)
return newMailer
}
// SendEmailFromAdmin : send an email from system with email address in config/email.go
func SendEmailFromAdmin(to string, subject string, body string, bodyHTML string) error {
msg := gomail.NewMessage()
msg.SetHeader("From", config.EmailFrom)
msg.SetHeader("To", to, config.EmailTestTo)
msg.SetHeader("From", config.Conf.Email.From)
msg.SetHeader("To", to, config.Conf.Email.TestTo)
msg.SetHeader("Subject", subject)
msg.SetBody("text/plain", body)
msg.AddAlternative("text/html", bodyHTML)
@ -33,7 +33,7 @@ func SendEmailFromAdmin(to string, subject string, body string, bodyHTML string)
log.Debugf("subject : %s", subject)
log.Debugf("body : %s", body)
log.Debugf("bodyHTML : %s", bodyHTML)
if config.SendEmail {
if config.Conf.Email.SendEmail {
log.Debug("SendEmail performed.")
err := mailer.DialAndSend(msg)
@ -45,9 +45,9 @@ func SendEmailFromAdmin(to string, subject string, body string, bodyHTML string)
// SendTestEmail : function to send a test email to email address in config/email.go
func SendTestEmail() error {
msg := gomail.NewMessage()
msg.SetHeader("From", config.EmailFrom)
msg.SetHeader("To", config.EmailTestTo)
msg.SetAddressHeader("Cc", config.EmailTestTo, "NyaaPantsu")
msg.SetHeader("From", config.Conf.Email.From)
msg.SetHeader("To", config.Conf.Email.TestTo)
msg.SetAddressHeader("Cc", config.Conf.Email.TestTo, "NyaaPantsu")
msg.SetHeader("Subject", "Hi(안녕하세요)?!")
msg.SetBody("text/plain", "Hi(안녕하세요)?!")
msg.AddAlternative("text/html", "<p><b>Nowplay(나우플레이)</b> means <i>Let's play</i>!!?</p>")

Voir le fichier

@ -37,7 +37,7 @@ func InitLogToStdout() {
func InitLogToFile() {
logrus.SetFormatter(&logrus.JSONFormatter{})
out := LumberJackLogger(config.ErrorLogFilePath+config.ErrorLogFileExtension, config.ErrorLogMaxSize, config.ErrorLogMaxBackups, config.ErrorLogMaxAge)
out := LumberJackLogger(config.Conf.Log.ErrorLogFilePath+config.Conf.Log.ErrorLogFileExtension, config.Conf.Log.ErrorLogMaxSize, config.Conf.Log.ErrorLogMaxBackups, config.Conf.Log.ErrorLogMaxAge)
logrus.SetOutput(out)
logrus.SetLevel(logrus.WarnLevel)

Voir le fichier

@ -23,7 +23,7 @@ type UserRetriever interface {
type TemplateTfunc func(string, ...interface{}) template.HTML
var (
defaultLanguage = config.DefaultI18nConfig.DefaultLanguage
defaultLanguage = config.Conf.I18n.DefaultLanguage
userRetriever UserRetriever
)
@ -32,13 +32,13 @@ func InitI18n(conf config.I18nConfig, retriever UserRetriever) error {
defaultLanguage = conf.DefaultLanguage
userRetriever = retriever
defaultFilepath := path.Join(conf.TranslationsDirectory, defaultLanguage+".all.json")
defaultFilepath := path.Join(conf.Directory, defaultLanguage+".all.json")
err := i18n.LoadTranslationFile(defaultFilepath)
if err != nil {
panic(fmt.Sprintf("failed to load default translation file '%s': %v", defaultFilepath, err))
}
paths, err := filepath.Glob(path.Join(conf.TranslationsDirectory, "*.json"))
paths, err := filepath.Glob(path.Join(conf.Directory, "*.json"))
if err != nil {
return fmt.Errorf("failed to get translation files: %v", err)
}

Voir le fichier

@ -8,8 +8,10 @@ import (
)
func TestInitI18n(t *testing.T) {
conf := config.DefaultI18nConfig
conf.TranslationsDirectory = path.Join("..", "..", conf.TranslationsDirectory)
conf := config.I18nConfig{}
conf.Directory = "translations"
conf.DefaultLanguage = "en-us"
conf.Directory = path.Join("..", "..", conf.Directory)
var retriever UserRetriever // not required during initialization
err := InitI18n(conf, retriever)