diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..c87421b --- /dev/null +++ b/.gitignore @@ -0,0 +1,51 @@ +### OSX ### +.DS_Store +.AppleDouble +.LSOverride +Icon + +# Thumbnails +._* + +# Files that might appear on external disk +.Spotlight-V100 +.Trashes + +### Windows ### +# Windows image file caches +Thumbs.db +ehthumbs.db + +# Folder config file +Desktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +### Linux ### +.* +!.gitignore +*~ + +### JetBrains ### +.idea/* + +### Go ### +# Binaries for programs and plugins +*.exe +*.dll +*.so +*.dylib + +# Test binary, build with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736 +.glide/ + +### PROJECT SPECIFIC ### +hashstorage.struct +downloads.config \ No newline at end of file diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 0000000..4a87280 --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,13 @@ + DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE + Version 2, December 2004 + + Copyright (C) 2004 Sam Hocevar + + Everyone is permitted to copy and distribute verbatim or modified + copies of this license document, and changing it is allowed as long + as the name is changed. + + DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. You just DO WHAT THE FUCK YOU WANT TO. \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..3835141 --- /dev/null +++ b/README.md @@ -0,0 +1,38 @@ +# Vuze Tools + +### Information +Vuze Tools provides ways of recovering damaged active files and finds missing torrent files for the Vuze Bittorrent Client. + +##### Program Options + +Please make sure Vuze is shutdown before running, While it will still work it might read invalid information from your files. +I recommend running "Fix Active files", copying the recovered files, Launch Vuze and then quit, then run whichever recovery you want to use. + +1. Fix Active files - Fix damaged active files by looking for .dat._AZ and .dat.saving which are created and kept if vuze crashed while saving. +2. Simple Recovery - missing torrent files by filename from backups (Normal - Once a valid torrent is found it will move on) +3. Advanced Recovery - missing torrent files by hash from backups (Slow - Scans every torrent file in backup, however it is resumable upon completion) +4. Active Recovery - missing torrent files by extracting from its active file. (Fast and Accurate - Checks the given hash for an active file and generates a new torrent) + +Recovered files are placed in the same directory that contains your Azerus directory and is named "Azureus-recover" +Then the contents of "Azerus-recover" should be moved into your Azerus directory except for "AdvancedHashStorage.glob" (A resumable hashstroage generated by Advanced Recovery) + +### Configuration +You may override the default_config.yml by creating a config/config.yml file inside the current directory. +you can also use the command line arguments +* '-env="type" [DEV,PROD,PROD-STDOUT,PROD-JSON]' +* '-azdir="/path/to/azureus/directory"' to set the location of your vuze configuration +* '-azconfig="/path/to/azureus/downloads.config"' to override the default azdir/downloads.config path +* '-azbackups="/path/to/backupfolder1,/path/to/backupfolder2"' + +Note: Windows users will have to escape their filepath separator '\' to '\\' + +### TODO + +- [ ] Add Testing +- [ ] Add inline documentation + +### LICENSE +This project is licensed under the WTFPL, Please see LICENSE.txt for more details. + +### Disclaimer +While the tool is safe to use and will not modify any non-tool related files USE THIS AT YOUR OWN RISK. \ No newline at end of file diff --git a/config/config.go b/config/config.go new file mode 100644 index 0000000..7f77315 --- /dev/null +++ b/config/config.go @@ -0,0 +1,122 @@ +package config + +import ( + "flag" + "fmt" + "github.com/blaize9/vuze-tools/utils" + "github.com/jinzhu/configor" + "path/filepath" + "strings" + "sync" +) + +var ( + DefaultConfigPath = "config/default_config.yml" + ConfigPath = "config/config.yml" +) + +var config *Config +var once sync.Once + +type Config struct { + Port int `json:"port" yaml:"port"` + Version string `json:"build_version" yaml:"build_version"` + LockFilename string `json:"lock_filename" yaml:"lock_filename"` + AzureusDirectory string `json:"azureus_directory" yaml:"azureus_directory"` + AzureusTorrentsDirectory string `json:"azureus_torrents_directory" yaml:"azureus_torrents_directory"` + AzureusDownloadsConfig string `json:"azureus_downloads_config" yaml:"azureus_downloads_config,omitempty"` + AzureusRecoverTempDirectory string `json:"azureus_recover_temp_directory" yaml:"azureus_recover_temp_directory,omitempty"` + + SimpleRecoverWorkers int `json:"simple_recovery_workers" yaml:"simple_recovery_workers,omitempty"` + AdvancedRecoverMaxWorkers int `json:"advanced_recovery_max_workers" yaml:"advanced_recovery_max_workers,omitempty"` + AzureusBackupDirectories AzDirectories `yaml:"azureus_backup_directories,flow,omitempty"` + + Environment string `json:"environment" yaml:"environment,omitempty"` + Log LogConfig `yaml:"log,flow,omitempty"` +} + +type LogConfig struct { + AccessLogFilePath string `yaml:"access_log_filepath,omitempty"` + AccessLogFileExtension string `yaml:"access_log_fileextension,omitempty"` + AccessLogMaxSize int `yaml:"access_log_max_size,omitempty"` + AccessLogMaxBackups int `yaml:"access_log_max_backups,omitempty"` + AccessLogMaxAge int `yaml:"access_log_max_age,omitempty"` + ErrorLogFilePath string `yaml:"error_log_filepath,omitempty"` + ErrorLogFileExtension string `yaml:"error_log_fileextension,omitempty"` + ErrorLogMaxSize int `yaml:"error_log_max_size,omitempty"` + ErrorLogMaxBackups int `yaml:"error_log_max_backups,omitempty"` + ErrorLogMaxAge int `yaml:"error_log_max_age,omitempty"` +} + +type AzDirectories []struct { + Directory string +} + +func init() { + configor.Load(Get(), ConfigPath, DefaultConfigPath) +} + +func Get() *Config { + once.Do(func() { + config = &Config{} + }) + return config +} + +func GetAzDownloadsConfig() string { + if filepath.Dir(Get().AzureusDownloadsConfig) == "." { + //file + return filepath.Join(Get().AzureusDirectory, Get().AzureusDownloadsConfig) + } else { + //filepath + return Get().AzureusDownloadsConfig + } +} + +func GetAzTorrentsPath() string { + var path string + if Get().AzureusTorrentsDirectory == "" { + path = filepath.Join(Get().AzureusDirectory, "torrents") + } else { + path = filepath.Join(Get().AzureusDirectory, Get().AzureusTorrentsDirectory) + } + return path + +} + +func GetAzActivePath() string { + return filepath.Join(Get().AzureusDirectory, "active") + +} + +func GetAzRecoverPath() string { + var path string + if Get().AzureusRecoverTempDirectory == "" || Get().AzureusRecoverTempDirectory == "azureus-recover" { + path = filepath.Join(Get().AzureusDirectory, "../azureus-recover") + } else { + path = filepath.Join(Get().AzureusDirectory, Get().AzureusRecoverTempDirectory) + } + + return path +} + +func BindFLags() func() { + var backupdirs string + flag.StringVar(&Get().Environment, "env", Get().Environment, "Environment [DEV,PROD,PROD-STDOUT,PROD-JSON]") + flag.StringVar(&Get().AzureusDirectory, "azdir", Get().AzureusDirectory, "Directory that contains Azureus storage") + flag.StringVar(&Get().AzureusDownloadsConfig, "azconfig", Get().AzureusDownloadsConfig, "File or FilePath to the downloads.config") + flag.Parse() + if backupdirs != "" { + for _, dir := range strings.Split(backupdirs, ",") { + dir = strings.TrimSpace(dir) + if utils.DirExists(dir) { + Get().AzureusBackupDirectories = append(Get().AzureusBackupDirectories, struct{ Directory string }{Directory: dir}) + } else { + fmt.Println("Backup directory %s does not exist!", dir) + } + } + } + return func() { + configor.Load(Get(), ConfigPath, DefaultConfigPath) + } +} diff --git a/config/config.yml b/config/config.yml new file mode 100644 index 0000000..d49d2ac --- /dev/null +++ b/config/config.yml @@ -0,0 +1,4 @@ +#Override default config values here +#azureus_backup_directories: +# -directory: /path/to/directory/that/contains/backups +# -directory: /path/to/other/directory/that/contains/backups \ No newline at end of file diff --git a/config/default_config.yml b/config/default_config.yml new file mode 100644 index 0000000..c4e4106 --- /dev/null +++ b/config/default_config.yml @@ -0,0 +1,26 @@ +port: 9955 +build_version: "0.9.0" +environment: PRODUCTION-STDOUT +lock_filename: "vuze-tools.lck" + +azureus_directory: "" +azureus_torrents_directory: "torrents" +azureus_downloads_config: "downloads.config" +azureus_recover_temp_directory: "azureus-recover" +azureus_backup_directories: + +simple_recovery_workers: 15 +advanced_recovery_max_workers: 50 + +log: + access_log_filepath: log/access + access_log_fileextension: .txt + access_log_max_size: 5 + access_log_max_backups: 7 + access_log_max_age: 30 + error_log_filepath: log/error + error_log_fileextension: .json + error_log_max_size: 10 + error_log_max_backups: 7 + error_log_max_age: 30 + diff --git a/main.go b/main.go new file mode 100644 index 0000000..61d737d --- /dev/null +++ b/main.go @@ -0,0 +1,421 @@ +package main + +import ( + "bufio" + "encoding/hex" + "fmt" + "github.com/IncSW/go-bencode" + "github.com/blaize9/vuze-tools/config" + "github.com/blaize9/vuze-tools/utils" + "github.com/blaize9/vuze-tools/utils/log" + "github.com/blaize9/vuze-tools/vuze" + "github.com/mitchellh/go-ps" + pbar "github.com/pmalek/pb" + "io/ioutil" + "os" + "path" + "path/filepath" + "reflect" + "runtime" + "sort" + "strings" + "time" +) + +var azureusBackupDirectories []string + +// TODO: Add Tests +// TODO: Add Documentation + +func main() { + runtime.GOMAXPROCS(runtime.NumCPU()) + config.BindFLags() + log.Init(config.Get().Environment) + + log.Debugf("Config: %v", config.Get()) + log.Infof("Using %d CPUs", runtime.NumCPU()) + log.Infof("Azureus Directory: %s", config.Get().AzureusDirectory) + log.Infof("Recovery Directory: %s", config.GetAzRecoverPath()) + + if !utils.FileExists(config.GetAzDownloadsConfig()) { + log.Fatalf("Azureus downloads.config in %s does not exist. Have you set your Azureus Directory?", config.Get().AzureusDirectory) + } + + if !utils.DirExists(config.GetAzRecoverPath()) { + os.MkdirAll(config.GetAzRecoverPath(), os.FileMode(0644)) + } + + if !utils.DirExists(filepath.Join(config.GetAzRecoverPath(), "torrents")) { + os.Mkdir(filepath.Join(config.GetAzRecoverPath(), "torrents"), os.FileMode(0644)) + } + + if !utils.DirExists(filepath.Join(config.GetAzRecoverPath(), "active")) { + os.Mkdir(filepath.Join(config.GetAzRecoverPath(), "active"), os.FileMode(0644)) + } + + processes, _ := ps.Processes() + for _, process := range processes { + if strings.Contains(strings.ToLower(process.Executable()), "azureus") { + if !utils.AskForconfirmation(fmt.Sprintf("Found (%d) %s running. Would you like to continue?", process.Pid(), process.Executable())) { + os.Exit(2) + } + } + } + + if len(config.Get().AzureusBackupDirectories) == 0 { + log.Infof("You have not entered any backup directories to search. Please add them if you want to run Simple or Advanced recoveries.\n") + } + + for _, directories := range config.Get().AzureusBackupDirectories { + if directories.Directory == "" { + continue + } + for _, directory := range vuze.GetAllVuzeBackupDirectores(directories.Directory) { + if !utils.DirExists(directory) { + continue + } + azureusBackupDirectories = append(azureusBackupDirectories, directory) + } + } + + vuze.ShuffleBackupDirectories(azureusBackupDirectories) + azureusBackupDirectories = append([]string{config.Get().AzureusDirectory}, azureusBackupDirectories...) + azureusBackupDirectories = utils.UniqueStringSlice(azureusBackupDirectories) + + reader := bufio.NewReader(os.Stdin) + fmt.Println() + fmt.Printf("Please select a program to run\n" + + "1. Fix Active files (Scans active for dat files and attempts to fix them)\n" + + "2. Simple Recovery (Scans backups for missing torrents by filename)\n" + + "3. Advanced Recovery (Scans backup's torrents and recovers them using hashes.) *Long*\n" + + "4. Active Recovery (Scans backup.config and recovers torrents from active.dat files) *Fast and accurate*\n" + + "5. Exit\nSelection: ") + selection, _ := reader.ReadString('\n') + selection = strings.TrimSpace(selection) + + switch selection { + case "1": + FixActiveDatFiles() + case "2": + SimpleRecover() + case "3": + AdvancedRecover() + case "4": + ActiveRecover() + default: + fmt.Println("Exiting") + os.Exit(2) + } + + fmt.Println("Press enter key to exit.") + bufio.NewReader(os.Stdin).ReadBytes('\n') +} + +func FixActiveDatFiles() { + log.Info("Fix Active Dat Files\n-------------------------------") + + dir := config.GetAzActivePath() + fixedDir := path.Join(config.GetAzRecoverPath(), "active") + + hashes := vuze.ProcessActiveDirectory(config.GetAzActivePath()) + totalHashes := len(hashes) + bar := pbar.StartNew(totalHashes) + + valid := 0 + recovered := 0 + unrecoverable := 0 + + for hash, m := range hashes { + bar.Increment() + if m.IsDatValid && m.IsBakValid { + valid++ + } else { + // .dat -> .dat.bak + if m.IsDatValid && !m.IsBakValid { + recovered++ + err := utils.CopyFile(path.Join(dir, hash+".dat"), path.Join(fixedDir, hash+".dat.bak")) + if err != nil { + fmt.Println(err) + } + err = utils.CopyFile(path.Join(dir, hash+".dat"), path.Join(fixedDir, hash+".dat")) + if err != nil { + fmt.Println(err) + } + continue + } + // .dat.bak -> .dat + if m.IsBakValid && !m.IsDatValid { + recovered++ + err := utils.CopyFile(path.Join(dir, hash+".dat.bak"), path.Join(fixedDir, hash+".dat")) + if err != nil { + fmt.Println(err) + } + err = utils.CopyFile(path.Join(dir, hash+".dat.bak"), path.Join(fixedDir, hash+".dat.bak")) + if err != nil { + fmt.Println(err) + } + continue + } + // .dat._AZ -> .dat .dat.bak + if m.IsAZValid && !m.IsBakValid && !m.IsDatValid { + recovered++ + err := utils.CopyFile(path.Join(dir, hash+".dat._AZ"), path.Join(fixedDir, hash+".dat")) + if err != nil { + fmt.Println(err) + } + err = utils.CopyFile(path.Join(dir, hash+".dat._AZ"), path.Join(fixedDir, hash+".dat.bak")) + if err != nil { + fmt.Println(err) + } + continue + } + if m.IsSavingValid && !m.IsDatValid && !m.IsBakValid { + recovered++ + err := utils.CopyFile(path.Join(dir, hash+".dat.saving"), path.Join(fixedDir, hash+".dat")) + if err != nil { + fmt.Println(err) + } + err = utils.CopyFile(path.Join(dir, hash+".dat.saving"), path.Join(fixedDir, hash+".dat.bak")) + if err != nil { + fmt.Println(err) + } + continue + } + if !m.IsDatValid && !m.IsBakValid && !m.IsAZValid && !m.IsSavingValid { + unrecoverable++ + log.Warnf("%s is unrecoverable [%v]\n", hash, m) + } else { + unrecoverable++ + log.Warnf("%s is unrecoverable [%v]\n", hash, m) + } + } + } + bar.FinishPrint("Recovery Finished! Please copy the files from " + config.GetAzRecoverPath()) + + log.Infof("Total: %d, Valid: %d, Recoverable: %d Unrecoverable: %d", valid, totalHashes, recovered, unrecoverable) +} + +func SimpleRecover() { + log.Info("Simple Recovery\n-------------------------------") + recoveredMap := make(chan vuze.RecoveredTorrent, 1) + chFinished := make(chan bool) + chRecovered := make(chan int) + chUnrecoverable := make(chan int) + chTorrentFiles := make(chan string, 4550) + chFilesCompleted := make(chan int) + + log.Infof("Scanning Downloads config") + torrents, err := vuze.ScanDownloadsConfig() + if err != nil { + log.Fatalf("%v", err) + return + } + + torrentFiles := []string{} + files := 0 + valid := 0 + for _, torrent := range torrents { + if torrent.Valid && torrent.Found { + valid++ + continue + } + files++ + torrentFiles = append(torrentFiles, torrent.Filepath) + } + + log.Infof("Sending %d torrents to Torrent Finder Worker", len(torrentFiles)) + go func() { + for _, torrent_filepath := range torrentFiles { + for len(chTorrentFiles) == cap(chTorrentFiles) { + time.Sleep(time.Millisecond * 100) + } + chTorrentFiles <- torrent_filepath + } + }() + + files_recovered_map := map[string]vuze.RecoveredTorrent{} + files_complete := 0 + files_recovered := 0 + files_unrecoverable := 0 + + for i := 0; i < config.Get().SimpleRecoverWorkers; i++ { + go vuze.TorrentFinderWorker(i, chRecovered, chUnrecoverable, chTorrentFiles, chFinished, chFilesCompleted, recoveredMap, &azureusBackupDirectories) + } + for { + if files == files_complete && files_recovered+files_unrecoverable == files_complete { + break + } + + select { + case recovered := <-recoveredMap: + files_recovered_map[recovered.OrigFilepath] = recovered + case <-chUnrecoverable: + files_unrecoverable++ + case <-chRecovered: + files_recovered++ + case <-chFilesCompleted: + files_complete++ + } + + } + + log.Infof("Found: %d Recoverable: %d Unrecoverable %d", files, files_recovered, files_unrecoverable) + + data, err := vuze.ReadDownloadsConfig() + if err != nil { + fmt.Errorf("%v\n", err) + } + datam := data.(map[string]interface{}) + recoverTorrents(&datam, files_recovered_map, false) + +} + +func AdvancedRecover() { + log.Info("Advanced Recovery\n-------------------------------") + HashStorage := vuze.BackupHashFinder(&azureusBackupDirectories) + + log.Infof("Sorting HashStorage Hashes by newest") + for _, hash := range HashStorage.HashMap { + sort.Sort(hash) + } + + log.Infof("Scanning Downloads config") + torrents, err := vuze.ScanDownloadsConfig() + if err != nil { + log.Fatalf("%v\n", err) + return + } + + log.Infof("Selecting torrents to recover") + + valid := 0 + recovered := 0 + unrecoverable := 0 + + files_recovered_map := map[string]vuze.RecoveredTorrent{} + for i, torrent := range torrents { + if torrent.Valid && torrent.Found { + valid++ + continue + } + if _, ok := HashStorage.HashMap[hex.EncodeToString(torrent.Hash)]; ok { + recovered++ + log.Infof("[%d] Recovering %s", i, torrent.Filepath, torrent.Valid, torrent.Found) + first := HashStorage.HashMap[hex.EncodeToString(torrent.Hash)][0] + files_recovered_map[torrent.Filepath] = vuze.RecoveredTorrent{Filename: filepath.Base(torrent.Filepath), BackupFilepath: first.Filepath} + } else { + unrecoverable++ + log.Warnf("[%d] Unable to recover %s [Found: %v, Valid: %v]", i, torrent.Filepath, torrent.Found, torrent.Valid) + } + + } + + log.Infof("Total: %d, Valid: %d, Recovered: %d, Unrecoverable: %d\n", len(torrents), valid, recovered, unrecoverable) + + data, err := vuze.ReadDownloadsConfig() + if err != nil { + fmt.Errorf("%v", err) + } + datam := data.(map[string]interface{}) + recoverTorrents(&datam, files_recovered_map, false) + +} + +func ActiveRecover() { + log.Info("Active Recovery\n-------------------------------") + log.Infof("Scanning Downloads config") + files_recovered_map := map[string]vuze.RecoveredTorrent{} + torrents, err := vuze.ScanDownloadsConfig() + if err != nil { + log.Fatalf("%v", err) + return + } + + valid := len(torrents) + recovered := 0 + unrecoverable := 0 + + for _, torrent := range torrents { + log.Infof("%v\n", torrent) + activedat := filepath.Join(config.Get().AzureusDirectory, "active", strings.ToUpper(hex.EncodeToString(torrent.Hash))+".dat") + log.Infof("Active File: %s\n", activedat) + + if !utils.FileExists(torrent.Filepath) { + hashstring := strings.ToUpper(hex.EncodeToString(torrent.Hash)) + activedat := filepath.Join(config.Get().AzureusDirectory, "active", hashstring+".dat") + if utils.FileExists(activedat) { + saved, err := vuze.SaveTorrentFromActive(activedat, filepath.Join(config.GetAzRecoverPath(), "torrents", filepath.Base(torrent.Filepath))) + if err != nil { + log.Errorf("[%s] Unable to Save torrent from active to %s [%v]", hashstring, torrent.Filepath, err) + unrecoverable++ + continue + } + if saved { + recovered++ + files_recovered_map[torrent.Filepath] = vuze.RecoveredTorrent{Filename: filepath.Base(torrent.Filepath), BackupFilepath: activedat} + } + } else { + log.Warnf("[%s] Unable to find active for %s", hashstring, torrent.Filepath) + unrecoverable++ + } + + } + } + + log.Infof("Total: %d, Valid: %d, Recovered: %d, Unrecoverable: %d", len(torrents), valid, recovered, unrecoverable) +} + +func recoverTorrents(datam *map[string]interface{}, files_recovered_map map[string]vuze.RecoveredTorrent, updateOnly bool) { + recoverTorrentsDir := filepath.Join(config.GetAzRecoverPath(), config.Get().AzureusTorrentsDirectory) + + if !updateOnly { + for _, recovered := range files_recovered_map { + if recovered.Err != nil { + continue + } + backupfile := recovered.BackupFilepath + newfile := filepath.Join(recoverTorrentsDir, recovered.Filename) + + if !utils.FileExists(backupfile) { + log.Errorf("recover file %s does not exist in expected location", backupfile) + } + + if !utils.FileExists(newfile) && !utils.FileExists(filepath.Join(config.GetAzTorrentsPath(), recovered.Filename)) { + err := utils.CopyFile(backupfile, newfile) + if err != nil { + log.Warnf("Unable to copy %s to %s [%v]", backupfile, newfile, err.Error()) + } + } + } + + } + + for _, v := range *datam { + for _, vv := range v.([]interface{}) { + for kkk, vvv := range vv.(map[string]interface{}) { + if kkk == "torrent" { + torrent_filepath := utils.ByteToString(vvv.([]uint8)) + if recovered, ok := files_recovered_map[torrent_filepath]; ok { + if recovered.Err != nil { + continue + } + newfile := filepath.Join(config.GetAzTorrentsPath(), recovered.Filename) + reflect.ValueOf(vv).SetMapIndex(reflect.ValueOf("torrent"), reflect.ValueOf([]uint8(newfile))) + } + } + } + } + } + + dataMarshal, err := bencode.Marshal(*datam) + if err != nil { + log.Fatal("Unable to marshal downloads.config") + } + + err = ioutil.WriteFile(filepath.Join(config.GetAzRecoverPath(), "downloads.config"), dataMarshal, 0644) + if err != nil { + log.Errorf("Unabel to write new download config [%v]", err) + } + + fmt.Println("Recovery Complete") +} diff --git a/utils/log/logger.go b/utils/log/logger.go new file mode 100644 index 0000000..124d654 --- /dev/null +++ b/utils/log/logger.go @@ -0,0 +1,143 @@ +package log + +import ( + "fmt" + "github.com/blaize9/vuze-tools/config" + "go.uber.org/zap" + "go.uber.org/zap/zapcore" + "gopkg.in/natefinch/lumberjack.v2" +) + +var logger *zap.Logger +var sugar *zap.SugaredLogger + +func Init(environment string) { + switch environment { + case "DEVELOPMENT": + InitLogToStdoutDebug() + case "DEV": + InitLogToStdoutDebug() + + case "PRODUCTION-STDOUT": + InitLogToStdout() + case "PROD-STDOUT": + InitLogToStdout() + + case "PRODUCTION-JSON": + InitLogToJsonFile() + fmt.Printf("Outputting Log to %s\n", config.Get().Log.ErrorLogFilePath+config.Get().Log.ErrorLogFileExtension) + case "PROD-JSON": + InitLogToJsonFile() + fmt.Printf("Outputting Log to %s\n", config.Get().Log.ErrorLogFilePath+config.Get().Log.ErrorLogFileExtension) + + case "PRODUCTION": + InitLogToFile() + fmt.Printf("Outputting Log to %s.log\n", config.Get().Log.ErrorLogFilePath) + case "PROD": + InitLogToFile() + fmt.Printf("Outputting Log to %s.log\n", config.Get().Log.ErrorLogFilePath) + + default: + InitLogToStdoutDebug() + } + Debugf("Environment: %s\n", environment) +} + +func InitLogToStdoutDebug() { + configZ := zap.NewDevelopmentConfig() + configZ.EncoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder + logger, _ = configZ.Build() + sugar = logger.Sugar() +} + +func InitLogToStdout() { + configZ := zap.NewDevelopmentConfig() + configZ.Level = zap.NewAtomicLevelAt(zap.InfoLevel) + configZ.EncoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder + logger, _ = configZ.Build() + sugar = logger.Sugar() +} + +func InitLogToFile() { + w := zapcore.AddSync(&lumberjack.Logger{ + Filename: config.Get().Log.ErrorLogFilePath + ".log", + MaxSize: config.Get().Log.ErrorLogMaxSize, // megabytes + MaxBackups: config.Get().Log.ErrorLogMaxBackups, + MaxAge: config.Get().Log.ErrorLogMaxAge, // days + }) + configZ := zap.NewProductionEncoderConfig() + configZ.EncodeTime = zapcore.ISO8601TimeEncoder + core := zapcore.NewCore( + zapcore.NewConsoleEncoder(configZ), + w, + zap.InfoLevel, + ) + logger = zap.New(core) + sugar = logger.Sugar() +} + +func InitLogToJsonFile() { + w := zapcore.AddSync(&lumberjack.Logger{ + Filename: config.Get().Log.ErrorLogFilePath + config.Get().Log.ErrorLogFileExtension, + MaxSize: config.Get().Log.ErrorLogMaxSize, // megabytes + MaxBackups: config.Get().Log.ErrorLogMaxBackups, + MaxAge: config.Get().Log.ErrorLogMaxAge, // days + }) + configZ := zap.NewProductionEncoderConfig() + configZ.EncodeTime = zapcore.ISO8601TimeEncoder + core := zapcore.NewCore( + zapcore.NewJSONEncoder(configZ), + w, + zap.InfoLevel, + ) + logger = zap.New(core) + sugar = logger.Sugar() +} + +func Debug(msg string) { + logger.Debug(msg) +} + +func Debugf(msg string, args ...interface{}) { + sugar.Debugf(msg, args...) +} + +func Info(msg string) { + logger.Info(msg) +} + +func Infof(msg string, args ...interface{}) { + sugar.Infof(msg, args...) +} + +func Warn(msg string) { + logger.Warn(msg) +} + +func Warnf(msg string, args ...interface{}) { + sugar.Warnf(msg, args...) +} + +func Error(msg string) { + logger.Error(msg) +} + +func Errorf(msg string, args ...interface{}) { + sugar.Errorf(msg, args...) +} + +func Fatal(msg string) { + logger.Fatal(msg) +} + +func Fatalf(msg string, args ...interface{}) { + sugar.Fatalf(msg, args...) +} + +func Panic(msg string) { + logger.Panic(msg) +} + +func Panicf(msg string, args ...interface{}) { + sugar.Panicf(msg, args...) +} diff --git a/utils/util.go b/utils/util.go new file mode 100644 index 0000000..3af4105 --- /dev/null +++ b/utils/util.go @@ -0,0 +1,268 @@ +package utils + +import ( + "bufio" + "encoding/gob" + "errors" + "fmt" + "github.com/IncSW/go-bencode" + "github.com/djherbis/times" + "io" + "io/ioutil" + "math/rand" + "os" + "path/filepath" + "reflect" + "strings" + "time" +) + +func FileExists(Path string) bool { + if _, err := os.Stat(Path); os.IsNotExist(err) { + return false + } + return true +} + +func DirExists(Path string) bool { + return FileExists(Path) +} + +func GetAllSubDirectories(Path string) (dirs []string) { + files, _ := ioutil.ReadDir(Path) + for _, f := range files { + if f.IsDir() { + dirs = append(dirs, filepath.Join(Path, f.Name())) + } + } + return dirs +} + +func CopyFile(Filepath string, destFilepath string) error { + srcFile, err := os.Open(Filepath) + if err != nil { + return err + } + defer srcFile.Close() + + srcStat, err := times.Stat(Filepath) + if err != nil { + return err + } + + destFile, err := os.Create(destFilepath) + if err != nil { + return err + } + defer destFile.Close() + + _, err = io.Copy(destFile, srcFile) + if err != nil { + return err + } + + err = destFile.Sync() + + if err := os.Chtimes(destFilepath, srcStat.AccessTime(), srcStat.ModTime()); err != nil { + return err + } + + return nil +} + +func CopyFileDateCreated(Filepath string, destFilepath string) error { + srcFile, err := os.Open(Filepath) + if err != nil { + return err + } + defer srcFile.Close() + + srcStat, err := times.Stat(Filepath) + if err != nil { + return err + } + + destFile, err := os.Create(destFilepath) + if err != nil { + return err + } + defer destFile.Close() + + _, err = io.Copy(destFile, srcFile) + if err != nil { + return err + } + + err = destFile.Sync() + var createdTime time.Time + if srcStat.HasBirthTime() { + createdTime = srcStat.BirthTime() + } else { + createdTime = srcStat.ModTime() + } + + if err := os.Chtimes(destFilepath, createdTime, createdTime); err != nil { + return err + } + + return nil +} + +func IsTorrentValid(filepath string) error { + file, er := ioutil.ReadFile(filepath) + if er != nil { + return errors.New("Read") + } + _, err := bencode.Unmarshal(file) + if err != nil { + return errors.New("Torrent") + } + return nil +} + +// Encode via Gob to file +func SaveStruct(path string, object interface{}) error { + file, err := os.Create(path) + if err == nil { + encoder := gob.NewEncoder(file) + encoder.Encode(object) + } + file.Close() + return err +} + +// Decode Gob file +func LoadStruct(path string, object interface{}) error { + file, err := os.Open(path) + if err == nil { + decoder := gob.NewDecoder(file) + err = decoder.Decode(object) + } + file.Close() + return err +} + +func ByteToString(bs []uint8) string { + b := make([]byte, len(bs)) + for i, v := range bs { + b[i] = byte(v) + } + return string(b) +} + +func IsMap(in interface{}) bool { + va := reflect.ValueOf(in) + if va.Kind() == reflect.Map { + return true + } + return false +} + +func SliceContains(slice []string, item string) bool { + set := make(map[string]struct{}, len(slice)) + for _, s := range slice { + set[s] = struct{}{} + } + + _, ok := set[item] + return ok +} + +func AskForconfirmation(msg string) bool { + var s string + + reader := bufio.NewReader(os.Stdin) + fmt.Printf("%s (Y/N): ", msg) + + s, _ = reader.ReadString('\n') + + s = strings.TrimSpace(s) + s = strings.ToLower(s) + + if s == "y" || s == "yes" { + return true + } + return false +} + +func ShuffleSlice(slice []string) { + for i := range slice { + j := rand.Intn(i + 1) + slice[i], slice[j] = slice[j], slice[i] + } +} + +func IsBencodeFileValid(path string) bool { + file, err := ioutil.ReadFile(path) + if err != nil { + return false + } + _, err = bencode.Unmarshal(file) + if err == nil { + return true + } + return false +} + +func UniqueStringSlice(input []string) []string { + unique := make([]string, 0, len(input)) + existsStringMap := make(map[string]bool) + + for _, string := range input { + if _, ok := existsStringMap[string]; !ok { + existsStringMap[string] = true + unique = append(unique, string) + } + } + + return unique +} + +func DisplayInterfaceValues(in interface{}) { + va := reflect.ValueOf(in) + if va.Kind() == reflect.Map { + fmt.Printf("%v\n", va) + m := in.(map[string]interface{}) + for k, v := range m { + switch vv := v.(type) { + case []interface{}: + fmt.Printf("Type: %T\n", vv) + fmt.Println(k, "is an array:") + for i, u := range vv { + fmt.Printf("Type: %T\n", u) + switch vvv := u.(type) { + case map[string]interface{}: + fmt.Println(k, "is map of strings", vvv) + } + fmt.Println(i, u) + } + default: + fmt.Printf("Type: %T\n", vv) + } + + } + } else { + m := in.(map[string]interface{}) + for k, v := range m { + switch vv := v.(type) { + case string: + fmt.Println(k, "is string", vv) + case int: + fmt.Println(k, "is int", vv) + case []uint8: + fmt.Print(k, " is ", ByteToString(vv), "\n") + case []interface{}: + fmt.Println(k, "is an array:") + for i, u := range vv { + fmt.Println(i) + DisplayInterfaceValues(u) + } + case map[string]interface{}: + fmt.Println(k, "is map of strings", vv) + + default: + fmt.Println(k, "is of a type I don't know how to handle ", vv) + } + } + } +} diff --git a/vuze/structs.go b/vuze/structs.go new file mode 100644 index 0000000..e29dab0 --- /dev/null +++ b/vuze/structs.go @@ -0,0 +1,37 @@ +package vuze + +type RecoveredTorrent struct { + Filename string + OrigFilepath string + BackupFilepath string + Err error +} + +type RecoveredHash struct { + Filename string + OrigFilepath string + BackupFilepath string + Err error +} + +type FoundTorrent struct { + Filepath string + Err error +} + +type TorrentPathHash struct { + Filepath string + Hash []uint8 + Found bool + Valid bool +} + +type VuzeDat struct { + IsDatValid bool + HasAZ bool + IsAZValid bool + HasBak bool + IsBakValid bool + HasSaving bool + IsSavingValid bool +} diff --git a/vuze/vuze.go b/vuze/vuze.go new file mode 100644 index 0000000..e66ab18 --- /dev/null +++ b/vuze/vuze.go @@ -0,0 +1,221 @@ +package vuze + +import ( + "errors" + "github.com/IncSW/go-bencode" + "github.com/KyleBanks/go-kit/log" + "github.com/blaize9/vuze-tools/config" + "github.com/blaize9/vuze-tools/utils" + "github.com/djherbis/times" + pbar "github.com/pmalek/pb" + "io" + "io/ioutil" + "math/rand" + "os" + "path/filepath" + "regexp" + "sort" + "strings" + "time" +) + +func CheckHashStorage() (storage HashStorage, lastMod time.Time, BackupDirCount int, UniqueHashCount int) { + err := utils.LoadStruct(filepath.Join(config.GetAzRecoverPath(), "hashstorage.struct"), &storage) + if err != nil { + log.Errorf("Error reading hashstorage [%s]", err) + } + lastMod = storage.LastModified + BackupDirCount = len(storage.BackupDirectories) + UniqueHashCount = len(storage.HashMap) + + log.Infof("HashStorage was last modified %s and contains %d directories with %d unique hashes", lastMod, BackupDirCount, UniqueHashCount) + return +} + +func ReadDownloadsConfig() (interface{}, error) { + file, er := ioutil.ReadFile(config.GetAzDownloadsConfig()) + if er != nil { + return nil, errors.New("Unable to open vuze downloads config") + } + data, err := bencode.Unmarshal(file) + if err != nil { + return nil, errors.New("Unable to unmarshal vuze downloads config") + } + return data, nil +} + +func SaveTorrentFromActive(ActivePath string, destFilepath string) (bool, error) { + file, err := ioutil.ReadFile(ActivePath) + if err != nil { + return false, err + } + data, err := bencode.Unmarshal(file) + if err != nil { + return false, err + } + + datam := data.(map[string]interface{}) + bencodeM := map[string]interface{}{} + + for k, v := range datam { + if k == "comment" || k == "created by" || k == "creation date" || k == "encoding" || + k == "info" || k == "announce" || k == "announce-list" { + bencodeM[k] = v + } + } + + m, _ := bencode.Marshal(bencodeM) + + destFile, err := os.Create(destFilepath) + if err != nil { + return false, err + } + defer destFile.Close() + + srcStat, err := times.Stat(ActivePath) + if err != nil { + return false, err + } + + _, err = io.WriteString(destFile, utils.ByteToString(m)) + if err != nil { + return false, err + } + err = destFile.Sync() + + var createdTime time.Time + if srcStat.HasBirthTime() { + createdTime = srcStat.BirthTime() + } else { + createdTime = srcStat.ModTime() + } + + if err := os.Chtimes(destFilepath, createdTime, createdTime); err != nil { + return false, err + } + + return true, err +} + +// Directories inside Path must contain ####-##-## +func GetAllVuzeBackupDirectores(Path string) (dirs []string) { + dirmatch, _ := regexp.Compile("\\d{4}-\\d{2}-\\d{2}") + foundDirs := utils.GetAllSubDirectories(Path) + sort.Sort(sort.Reverse(sort.StringSlice(foundDirs))) + + for _, dir := range foundDirs { + if dirmatch.MatchString(dir) { + if utils.DirExists(dir) { + dirs = append(dirs, dir) + } + } + } + + return dirs +} + +func ProcessActiveDirectory(activePath string) map[string]VuzeDat { + Hashes := map[string]VuzeDat{} + + datFileCount := 0 + files, _ := ioutil.ReadDir(activePath) + for _, finfo := range files { + if filepath.Ext(finfo.Name()) == ".dat" { + datFileCount++ + } + } + + bar := pbar.StartNew(datFileCount) + for _, finfo := range files { + ActivePath := config.GetAzActivePath() + "/" + finfo.Name() + + hash := strings.TrimSuffix(ActivePath, filepath.Ext(ActivePath)) + baseFilename := filepath.Base(ActivePath) + baseFilenameWithoutExt := strings.TrimSuffix(baseFilename, filepath.Ext(baseFilename)) + if filepath.Ext(ActivePath) == ".dat" { + vuzeDat := VuzeDat{} + + AZ := hash + ".dat._AZ" + BAK := hash + ".dat.bak" + SAVING := hash + ".dat.saving" + + if utils.IsBencodeFileValid(ActivePath) { + vuzeDat.IsDatValid = true + } + if utils.FileExists(AZ) { + vuzeDat.HasAZ = true + if utils.IsBencodeFileValid(AZ) { + vuzeDat.IsAZValid = true + } + } + if utils.FileExists(BAK) { + vuzeDat.HasBak = true + if utils.IsBencodeFileValid(BAK) { + vuzeDat.IsBakValid = true + } + } + if utils.FileExists(SAVING) { + vuzeDat.HasSaving = true + if utils.IsBencodeFileValid(SAVING) { + vuzeDat.IsSavingValid = true + } + } + + Hashes[baseFilenameWithoutExt] = vuzeDat + bar.Increment() + } + + } + + bar.FinishPrint("Finished Scanning. Start the recovery!") + return Hashes +} + +func ScanDownloadsConfig() ([]TorrentPathHash, error) { + data, err := ReadDownloadsConfig() + if err != nil { + log.Errorf("%v", err) + return nil, err + } + datam := data.(map[string]interface{}) + torrents := []TorrentPathHash{} + if utils.IsMap(datam) { + for k, v := range datam { + if k == "downloads" { + for _, vv := range v.([]interface{}) { + torrent := TorrentPathHash{} + for kkk, vvv := range vv.(map[string]interface{}) { + if kkk == "torrent" { + //files++ + torrent_filepath := utils.ByteToString(vvv.([]uint8)) + if torrent_filepath != "" { + torrent.Filepath = torrent_filepath + if utils.FileExists(torrent_filepath) { + torrent.Found = true + if utils.IsTorrentValid(torrent_filepath) == nil { + torrent.Valid = true + } + } + } + + } + + if kkk == "torrent_hash" { + torrent.Hash = vvv.([]uint8) + } + } + torrents = append(torrents, torrent) + } + } + } + return torrents, nil + } + return torrents, errors.New("downloads.config is not valid!") +} + +func ShuffleBackupDirectories(slice []string) { + for i := range slice { + j := rand.Intn(i + 1) + slice[i], slice[j] = slice[j], slice[i] + } +} diff --git a/vuze/worker.go b/vuze/worker.go new file mode 100644 index 0000000..10464f1 --- /dev/null +++ b/vuze/worker.go @@ -0,0 +1,171 @@ +package vuze + +import ( + "fmt" + "github.com/blaize9/vuze-tools/config" + "github.com/blaize9/vuze-tools/utils" + "github.com/blaize9/vuze-tools/utils/log" + "github.com/djherbis/times" + torrentParser "github.com/j-muller/go-torrent-parser" + "io/ioutil" + "path/filepath" + "sort" + "sync" + "time" +) + +type HashStorage struct { + BackupDirectories []string + HashMap HashMap + LastModified time.Time +} + +type HashMap map[string]FilepathSlice + +type FilepathSlice []Filepath + +type Filepath struct { + Filepath string + DateModified time.Time +} + +type HashMapSorter struct { + hashes []FilepathSlice +} + +func (f FilepathSlice) Sort() { + sort.Sort(f) +} + +func (f FilepathSlice) Len() int { + return len(f) +} + +func (f FilepathSlice) Swap(i, j int) { + f[i], f[j] = f[j], f[i] +} + +func (f FilepathSlice) Less(i, j int) bool { + return f[i].DateModified.After(f[j].DateModified) +} + +func BackupHashFinder(vuzeBackupDirectories *[]string) HashStorage { + var HashStorage = HashStorage{BackupDirectories: *vuzeBackupDirectories} + HashStoragePath := filepath.Join(config.GetAzRecoverPath(), "hashstorage.struct") + var ResumeHashStorage bool + var NewBackupDirectories []string + if utils.FileExists(HashStoragePath) { + fmt.Printf("Checking existing hashstorage.struct\n") + FileHashStorage, _, hashStorageDirCount, _ := CheckHashStorage() + fmt.Printf("Current Backup Dirs: %d\nHashStorage File Dirs: %d\n", len(*vuzeBackupDirectories), hashStorageDirCount) + + for _, dir := range *vuzeBackupDirectories { + if !utils.SliceContains(FileHashStorage.BackupDirectories, dir) { + NewBackupDirectories = append(NewBackupDirectories, dir) + fmt.Printf("%s was not found in HashStorage file\n", dir) + } + } + fmt.Println() + + if utils.AskForconfirmation("Would you like to load hashstorage.struct?") { + if len(*vuzeBackupDirectories) != len(FileHashStorage.BackupDirectories) { + if utils.AskForconfirmation("Would you like to scan new directories?") { + ResumeHashStorage = true + HashStorage = FileHashStorage + } else { + return FileHashStorage + } + } else { + return FileHashStorage + } + } else { + HashStorage.BackupDirectories = *vuzeBackupDirectories + } + } + + var wg sync.WaitGroup + var BackupDirectories []string + if ResumeHashStorage { + BackupDirectories = NewBackupDirectories + HashStorage.BackupDirectories = append(HashStorage.BackupDirectories, NewBackupDirectories...) + HashStorage.BackupDirectories = utils.UniqueStringSlice(HashStorage.BackupDirectories) + } else { + BackupDirectories = *vuzeBackupDirectories + } + + wg.Add(len(BackupDirectories)) + start := time.Now() + var mutex = &sync.Mutex{} + + var hashMap = make(map[string]FilepathSlice) + workers := 0 + for _, bkdir := range BackupDirectories { + for workers > config.Get().AdvancedRecoverMaxWorkers { + time.Sleep(time.Second * 25) + } + workers++ + go func(bkdir string) { + defer wg.Done() + torrentDir := filepath.Join(bkdir, config.Get().AzureusTorrentsDirectory) + if utils.DirExists(torrentDir) { + files, _ := ioutil.ReadDir(torrentDir + "/") + for _, tfile := range files { + if filepath.Ext(tfile.Name()) == ".torrent" { + tfilepath := filepath.Join(torrentDir, tfile.Name()) + torrent, err := torrentParser.ParseFromFile(tfilepath) + if err != nil { + continue + } + ftime, _ := times.Stat(tfilepath) + mutex.Lock() + hashMap[torrent.InfoHash] = append(hashMap[torrent.InfoHash], Filepath{Filepath: tfilepath, DateModified: ftime.ModTime()}) + mutex.Unlock() + } + } + log.Infof("[W%s] Finished scanning %s (%d files)\n", time.Since(start), torrentDir, len(files)) + } + workers-- + }(bkdir) + } + wg.Wait() + HashStorage.LastModified = time.Now() + HashStorage.HashMap = hashMap + err := utils.SaveStruct(HashStoragePath, HashStorage) + if err != nil { + log.Errorf("Error saving hashstorage [%s]", err) + } + + log.Infof("Total time taken to scan %s", time.Since(start).String()) + return HashStorage +} + +func TorrentFinderWorker(worker int, recovered chan<- int, unrecovered chan<- int, torrentFiles <-chan string, finished chan<- bool, chFilesCompleted chan<- int, recoveredMap chan<- RecoveredTorrent, vuzeBackupDirectories *[]string) { + log.Infof("Worker %d started", worker) + + //defer wg.Done() + + for torrentFilepath := range torrentFiles { + tfilepath := torrentFilepath + log.Debugf("Worker %d Working on %s\n", worker, tfilepath) + filename := filepath.Base(tfilepath) + var foundTorrentFile bool + if !utils.FileExists(torrentFilepath) { + for _, bkdir := range *vuzeBackupDirectories { + findTorrent := filepath.Join(bkdir, config.Get().AzureusTorrentsDirectory, filename) + if utils.FileExists(findTorrent) && utils.IsTorrentValid(findTorrent) == nil { + log.Debugf("[%d] %s FOUND\n", worker, findTorrent) + recoveredMap <- RecoveredTorrent{Filename: filename, OrigFilepath: torrentFilepath, BackupFilepath: findTorrent} + recovered <- 1 + foundTorrentFile = true + break + } + } + } + if foundTorrentFile == false { + log.Warnf("[W%d] %s NOT FOUND", worker, tfilepath) + unrecovered <- 1 + } + chFilesCompleted <- 1 + } + log.Infof("Worker %d Closed", worker) +}