diff --git a/RombaSharp/RombaSharp.Help.cs b/RombaSharp/RombaSharp.Help.cs index 1838123a..1ea6922b 100644 --- a/RombaSharp/RombaSharp.Help.cs +++ b/RombaSharp/RombaSharp.Help.cs @@ -5,667 +5,667 @@ using SabreTools.Library.Help; namespace RombaSharp { - public partial class RombaSharp - { - #region Private Flag features + public partial class RombaSharp + { + #region Private Flag features - private static Feature copyFlag - { - get - { - return new Feature( - "copy", - "-copy", - "Copy files to output instead of rebuilding", - FeatureType.Flag); - } - } // Unique to RombaSharp - private static Feature fixdatOnlyFlag - { - get - { - return new Feature( - "fixdatOnly", - "-fixdatOnly", - "only fix dats and don't generate torrentzips", - FeatureType.Flag); - } - } - private static Feature logOnlyFlag - { - get - { - return new Feature( - "log-only", - "-log-only", - "Only write out actions to log", - FeatureType.Flag); - } - } - private static Feature noDbFlag - { - get - { - return new Feature( - "no-db", - "-no-db", - "archive into depot but do not touch DB index and ignore only-needed flag", - FeatureType.Flag); - } - } - private static Feature onlyNeededFlag - { - get - { - return new Feature( - "only-needed", - "-only-needed", - "only archive ROM files actually referenced by DAT files from the DAT index", - FeatureType.Flag); - } - } - private static Feature skipInitialScanFlag - { - get - { - return new Feature( - "skip-initial-scan", - "-skip-initial-scan", - "skip the initial scan of the files to determine amount of work", - FeatureType.Flag); - } - } - private static Feature useGolangZipFlag - { - get - { - return new Feature( - "use-golang-zip", - "-use-golang-zip", - "use go zip implementation instead of zlib", - FeatureType.Flag); - } - } + private static Feature copyFlag + { + get + { + return new Feature( + "copy", + "-copy", + "Copy files to output instead of rebuilding", + FeatureType.Flag); + } + } // Unique to RombaSharp + private static Feature fixdatOnlyFlag + { + get + { + return new Feature( + "fixdatOnly", + "-fixdatOnly", + "only fix dats and don't generate torrentzips", + FeatureType.Flag); + } + } + private static Feature logOnlyFlag + { + get + { + return new Feature( + "log-only", + "-log-only", + "Only write out actions to log", + FeatureType.Flag); + } + } + private static Feature noDbFlag + { + get + { + return new Feature( + "no-db", + "-no-db", + "archive into depot but do not touch DB index and ignore only-needed flag", + FeatureType.Flag); + } + } + private static Feature onlyNeededFlag + { + get + { + return new Feature( + "only-needed", + "-only-needed", + "only archive ROM files actually referenced by DAT files from the DAT index", + FeatureType.Flag); + } + } + private static Feature skipInitialScanFlag + { + get + { + return new Feature( + "skip-initial-scan", + "-skip-initial-scan", + "skip the initial scan of the files to determine amount of work", + FeatureType.Flag); + } + } + private static Feature useGolangZipFlag + { + get + { + return new Feature( + "use-golang-zip", + "-use-golang-zip", + "use go zip implementation instead of zlib", + FeatureType.Flag); + } + } - #endregion + #endregion - #region Private Int32 features + #region Private Int32 features - private static Feature include7ZipsInt32Input - { - get - { - return new Feature( - "include-7zips", - "-include-7zips", - "flag value == 0 means: add 7zip files themselves into the depot in addition to their contents, flag value == 2 means add 7zip files themselves but don't add content", - FeatureType.Int32); - } - } - private static Feature includeGZipsInt32Input - { - get - { - return new Feature( - "include-gzips", - "-include-gzips", - "flag value == 0 means: add gzip files themselves into the depot in addition to their contents, flag value == 2 means add gzip files themselves but don't add content", - FeatureType.Int32); - } - } - private static Feature includeZipsInt32Input - { - get - { - return new Feature( - "include-zips", - "-include-zips", - "flag value == 0 means: add zip files themselves into the depot in addition to their contents, flag value == 2 means add zip files themselves but don't add content", - FeatureType.Int32); - } - } - private static Feature subworkersInt32Input - { - get - { - return new Feature( - "subworkers", - "-subworkers", - "how many subworkers to launch for each worker", - FeatureType.Int32); - } - } // Defaults to Workers count in config - private static Feature workersInt32Input - { - get - { - return new Feature( - "workers", - "-workers", - "how many workers to launch for the job", - FeatureType.Int32); - } - } // Defaults to Workers count in config + private static Feature include7ZipsInt32Input + { + get + { + return new Feature( + "include-7zips", + "-include-7zips", + "flag value == 0 means: add 7zip files themselves into the depot in addition to their contents, flag value == 2 means add 7zip files themselves but don't add content", + FeatureType.Int32); + } + } + private static Feature includeGZipsInt32Input + { + get + { + return new Feature( + "include-gzips", + "-include-gzips", + "flag value == 0 means: add gzip files themselves into the depot in addition to their contents, flag value == 2 means add gzip files themselves but don't add content", + FeatureType.Int32); + } + } + private static Feature includeZipsInt32Input + { + get + { + return new Feature( + "include-zips", + "-include-zips", + "flag value == 0 means: add zip files themselves into the depot in addition to their contents, flag value == 2 means add zip files themselves but don't add content", + FeatureType.Int32); + } + } + private static Feature subworkersInt32Input + { + get + { + return new Feature( + "subworkers", + "-subworkers", + "how many subworkers to launch for each worker", + FeatureType.Int32); + } + } // Defaults to Workers count in config + private static Feature workersInt32Input + { + get + { + return new Feature( + "workers", + "-workers", + "how many workers to launch for the job", + FeatureType.Int32); + } + } // Defaults to Workers count in config - #endregion + #endregion - #region Private Int64 features + #region Private Int64 features - private static Feature sizeInt64Input - { - get - { - return new Feature( - "size", - "-size", - "size of the rom to lookup", - FeatureType.Int64); - } - } + private static Feature sizeInt64Input + { + get + { + return new Feature( + "size", + "-size", + "size of the rom to lookup", + FeatureType.Int64); + } + } - #endregion + #endregion - #region Private List features + #region Private List features - private static Feature datsListStringInput - { - get - { - return new Feature( - "dats", - "-dats", - "purge only roms declared in these dats", - FeatureType.List); - } - } - private static Feature depotListStringInput - { - get - { - return new Feature( - "depot", - "-depot", - "work only on specified depot path", - FeatureType.List); - } - } + private static Feature datsListStringInput + { + get + { + return new Feature( + "dats", + "-dats", + "purge only roms declared in these dats", + FeatureType.List); + } + } + private static Feature depotListStringInput + { + get + { + return new Feature( + "depot", + "-depot", + "work only on specified depot path", + FeatureType.List); + } + } - #endregion + #endregion - #region Private String features + #region Private String features - private static Feature backupStringInput - { - get - { - return new Feature( - "backup", - "-backup", - "backup directory where backup files are moved to", - FeatureType.String); - } - } - private static Feature descriptionStringInput - { - get - { - return new Feature( - "description", - "-description", - "description value in DAT header", - FeatureType.String); - } - } - private static Feature missingSha1sStringInput - { - get - { - return new Feature( - "missingSha1s", - "-missingSha1s", - "write paths of dats with missing sha1s into this file", - FeatureType.String); - } - } - private static Feature nameStringInput - { - get - { - return new Feature( - "name", - "-name", - "name value in DAT header", - FeatureType.String); - } - } - private static Feature newStringInput - { - get - { - return new Feature( - "new", - "-new", - "new DAT file", - FeatureType.String); - } - } - private static Feature oldStringInput - { - get - { - return new Feature( - "old", - "-old", - "old DAT file", - FeatureType.String); - } - } - private static Feature outStringInput - { - get - { - return new Feature( - "out", - "-out", - "output file", - FeatureType.String); - } - } - private static Feature resumeStringInput - { - get - { - return new Feature( - "resume", - "-resume", - "resume a previously interrupted operation from the specified path", - FeatureType.String); - } - } - private static Feature sourceStringInput - { - get - { - return new Feature( - "source", - "-source", - "source directory", - FeatureType.String); - } - } + private static Feature backupStringInput + { + get + { + return new Feature( + "backup", + "-backup", + "backup directory where backup files are moved to", + FeatureType.String); + } + } + private static Feature descriptionStringInput + { + get + { + return new Feature( + "description", + "-description", + "description value in DAT header", + FeatureType.String); + } + } + private static Feature missingSha1sStringInput + { + get + { + return new Feature( + "missingSha1s", + "-missingSha1s", + "write paths of dats with missing sha1s into this file", + FeatureType.String); + } + } + private static Feature nameStringInput + { + get + { + return new Feature( + "name", + "-name", + "name value in DAT header", + FeatureType.String); + } + } + private static Feature newStringInput + { + get + { + return new Feature( + "new", + "-new", + "new DAT file", + FeatureType.String); + } + } + private static Feature oldStringInput + { + get + { + return new Feature( + "old", + "-old", + "old DAT file", + FeatureType.String); + } + } + private static Feature outStringInput + { + get + { + return new Feature( + "out", + "-out", + "output file", + FeatureType.String); + } + } + private static Feature resumeStringInput + { + get + { + return new Feature( + "resume", + "-resume", + "resume a previously interrupted operation from the specified path", + FeatureType.String); + } + } + private static Feature sourceStringInput + { + get + { + return new Feature( + "source", + "-source", + "source directory", + FeatureType.String); + } + } - #endregion + #endregion - public static Help RetrieveHelp() - { - // Create and add the header to the Help object - string barrier = "-----------------------------------------"; - List helpHeader = new List() - { - "RombaSharp - C# port of the Romba rom management tool", - barrier, - "Usage: RombaSharp [option] [filename|dirname] ...", - "" - }; - Help help = new Help(helpHeader); + public static Help RetrieveHelp() + { + // Create and add the header to the Help object + string barrier = "-----------------------------------------"; + List helpHeader = new List() + { + "RombaSharp - C# port of the Romba rom management tool", + barrier, + "Usage: RombaSharp [option] [filename|dirname] ...", + "" + }; + Help help = new Help(helpHeader); - #region Help + #region Help - Feature helpFeature = new Feature( - "Help", - new List() { "-?", "-h", "--help" }, - "Show this help", - FeatureType.Flag); + Feature helpFeature = new Feature( + "Help", + new List() { "-?", "-h", "--help" }, + "Show this help", + FeatureType.Flag); - #endregion + #endregion - #region Script + #region Script - Feature script = new Feature( - "Script", - "--script", - "Enable script mode (no clear screen)", - FeatureType.Flag, - longDescription: "For times when RombaSharp is being used in a scripted environment, the user may not want the screen to be cleared every time that it is called. This flag allows the user to skip clearing the screen on run just like if the console was being redirected."); + Feature script = new Feature( + "Script", + "--script", + "Enable script mode (no clear screen)", + FeatureType.Flag, + longDescription: "For times when RombaSharp is being used in a scripted environment, the user may not want the screen to be cleared every time that it is called. This flag allows the user to skip clearing the screen on run just like if the console was being redirected."); - #endregion + #endregion - #region Archive + #region Archive - Feature archive = new Feature( - "Archive", - "archive", - "Adds ROM files from the specified directories to the ROM archive.", - FeatureType.Flag, - longDescription: @"Adds ROM files from the specified directories to the ROM archive. + Feature archive = new Feature( + "Archive", + "archive", + "Adds ROM files from the specified directories to the ROM archive.", + FeatureType.Flag, + longDescription: @"Adds ROM files from the specified directories to the ROM archive. Traverses the specified directory trees looking for zip files and normal files. Unpacked files will be stored as individual entries. Prior to unpacking a zip file, the external SHA1 is checked against the DAT index. If -only-needed is set, only those files are put in the ROM archive that have a current entry in the DAT index."); - archive.AddFeature(onlyNeededFlag); - archive.AddFeature(resumeStringInput); - archive.AddFeature(includeZipsInt32Input); // Defaults to 0 - archive.AddFeature(workersInt32Input); - archive.AddFeature(includeGZipsInt32Input); // Defaults to 0 - archive.AddFeature(include7ZipsInt32Input); // Defaults to 0 - archive.AddFeature(skipInitialScanFlag); - archive.AddFeature(useGolangZipFlag); - archive.AddFeature(noDbFlag); + archive.AddFeature(onlyNeededFlag); + archive.AddFeature(resumeStringInput); + archive.AddFeature(includeZipsInt32Input); // Defaults to 0 + archive.AddFeature(workersInt32Input); + archive.AddFeature(includeGZipsInt32Input); // Defaults to 0 + archive.AddFeature(include7ZipsInt32Input); // Defaults to 0 + archive.AddFeature(skipInitialScanFlag); + archive.AddFeature(useGolangZipFlag); + archive.AddFeature(noDbFlag); - #endregion + #endregion - #region Build + #region Build - Feature build = new Feature( - "Build", - "build", - "For each specified DAT file it creates the torrentzip files.", - FeatureType.Flag, - longDescription: @"For each specified DAT file it creates the torrentzip files in the specified + Feature build = new Feature( + "Build", + "build", + "For each specified DAT file it creates the torrentzip files.", + FeatureType.Flag, + longDescription: @"For each specified DAT file it creates the torrentzip files in the specified output dir. The files will be placed in the specified location using a folder structure according to the original DAT master directory tree structure."); - build.AddFeature(outStringInput); - build.AddFeature(fixdatOnlyFlag); - build.AddFeature(copyFlag); - build.AddFeature(workersInt32Input); - build.AddFeature(subworkersInt32Input); + build.AddFeature(outStringInput); + build.AddFeature(fixdatOnlyFlag); + build.AddFeature(copyFlag); + build.AddFeature(workersInt32Input); + build.AddFeature(subworkersInt32Input); - #endregion + #endregion - #region Cancel + #region Cancel - Feature cancel = new Feature( - "Cancel", - "cancel", - "Cancels current long-running job", - FeatureType.Flag, - longDescription: @"Cancels current long-running job."); + Feature cancel = new Feature( + "Cancel", + "cancel", + "Cancels current long-running job", + FeatureType.Flag, + longDescription: @"Cancels current long-running job."); - #endregion + #endregion - #region DatStats + #region DatStats - Feature datstats = new Feature( - "DatStats", - "datstats", - "Prints dat stats.", - FeatureType.Flag, - longDescription: @"Print dat stats."); + Feature datstats = new Feature( + "DatStats", + "datstats", + "Prints dat stats.", + FeatureType.Flag, + longDescription: @"Print dat stats."); - #endregion + #endregion - #region DbStats + #region DbStats - Feature dbstats = new Feature( - "DbStats", - "dbstats", - "Prints db stats.", - FeatureType.Flag, - longDescription: @"Print db stats."); + Feature dbstats = new Feature( + "DbStats", + "dbstats", + "Prints db stats.", + FeatureType.Flag, + longDescription: @"Print db stats."); - #endregion + #endregion - #region Diffdat + #region Diffdat - Feature diffdat = new Feature( - "Diffdat", - "diffdat", - "Creates a DAT file with those entries that are in -new DAT.", - FeatureType.Flag, - longDescription: @"Creates a DAT file with those entries that are in -new DAT file and not + Feature diffdat = new Feature( + "Diffdat", + "diffdat", + "Creates a DAT file with those entries that are in -new DAT.", + FeatureType.Flag, + longDescription: @"Creates a DAT file with those entries that are in -new DAT file and not in -old DAT file. Ignores those entries in -old that are not in -new."); - diffdat.AddFeature(outStringInput); - diffdat.AddFeature(oldStringInput); - diffdat.AddFeature(newStringInput); - diffdat.AddFeature(nameStringInput); - diffdat.AddFeature(descriptionStringInput); + diffdat.AddFeature(outStringInput); + diffdat.AddFeature(oldStringInput); + diffdat.AddFeature(newStringInput); + diffdat.AddFeature(nameStringInput); + diffdat.AddFeature(descriptionStringInput); - #endregion + #endregion - #region Dir2Dat + #region Dir2Dat - Feature dir2dat = new Feature( - "Dir2Dat", - "dir2dat", - "Creates a DAT file for the specified input directory and saves it to the -out filename.", - FeatureType.Flag); - dir2dat.AddFeature(outStringInput); - dir2dat.AddFeature(sourceStringInput); - dir2dat.AddFeature(nameStringInput); // Defaults to "untitled" - dir2dat.AddFeature(descriptionStringInput); + Feature dir2dat = new Feature( + "Dir2Dat", + "dir2dat", + "Creates a DAT file for the specified input directory and saves it to the -out filename.", + FeatureType.Flag); + dir2dat.AddFeature(outStringInput); + dir2dat.AddFeature(sourceStringInput); + dir2dat.AddFeature(nameStringInput); // Defaults to "untitled" + dir2dat.AddFeature(descriptionStringInput); - #endregion + #endregion - #region EDiffdat + #region EDiffdat - Feature ediffdat = new Feature( - "EDiffdat", - "ediffdat", - "Creates a DAT file with those entries that are in -new DAT.", - FeatureType.Flag, - longDescription: @"Creates a DAT file with those entries that are in -new DAT files and not + Feature ediffdat = new Feature( + "EDiffdat", + "ediffdat", + "Creates a DAT file with those entries that are in -new DAT.", + FeatureType.Flag, + longDescription: @"Creates a DAT file with those entries that are in -new DAT files and not in -old DAT files. Ignores those entries in -old that are not in -new."); - ediffdat.AddFeature(outStringInput); - ediffdat.AddFeature(oldStringInput); - ediffdat.AddFeature(newStringInput); + ediffdat.AddFeature(outStringInput); + ediffdat.AddFeature(oldStringInput); + ediffdat.AddFeature(newStringInput); - #endregion + #endregion - #region Export + #region Export - // Unique to RombaSharp - Feature export = new Feature( - "Export", - "export", - "Exports db to export.csv", - FeatureType.Flag, - longDescription: "Exports db to standardized export.csv"); + // Unique to RombaSharp + Feature export = new Feature( + "Export", + "export", + "Exports db to export.csv", + FeatureType.Flag, + longDescription: "Exports db to standardized export.csv"); - #endregion + #endregion - #region Fixdat + #region Fixdat - Feature fixdat = new Feature( - "Fixdat", - "fixdat", - "For each specified DAT file it creates a fix DAT.", - FeatureType.Flag, - longDescription: @"For each specified DAT file it creates a fix DAT with the missing entries for + Feature fixdat = new Feature( + "Fixdat", + "fixdat", + "For each specified DAT file it creates a fix DAT.", + FeatureType.Flag, + longDescription: @"For each specified DAT file it creates a fix DAT with the missing entries for that DAT. If nothing is missing it doesn't create a fix DAT for that particular DAT."); - fixdat.AddFeature(outStringInput); - fixdat.AddFeature(fixdatOnlyFlag); // Enabled by default - fixdat.AddFeature(workersInt32Input); - fixdat.AddFeature(subworkersInt32Input); + fixdat.AddFeature(outStringInput); + fixdat.AddFeature(fixdatOnlyFlag); // Enabled by default + fixdat.AddFeature(workersInt32Input); + fixdat.AddFeature(subworkersInt32Input); - #endregion + #endregion - #region Import + #region Import - // Unique to RombaSharp - Feature import = new Feature( - "Import", - "import", - "Import a database from a formatted CSV file", - FeatureType.Flag, - longDescription: @"Import a database from a formatted CSV file"); + // Unique to RombaSharp + Feature import = new Feature( + "Import", + "import", + "Import a database from a formatted CSV file", + FeatureType.Flag, + longDescription: @"Import a database from a formatted CSV file"); - #endregion + #endregion - #region Lookup + #region Lookup - Feature lookup = new Feature( - "Lookup", - "lookup", - "For each specified hash it looks up any available information.", - FeatureType.Flag, - longDescription: @"For each specified hash it looks up any available information (dat or rom)."); - lookup.AddFeature(sizeInt64Input); // Defaults to -1 - lookup.AddFeature(outStringInput); + Feature lookup = new Feature( + "Lookup", + "lookup", + "For each specified hash it looks up any available information.", + FeatureType.Flag, + longDescription: @"For each specified hash it looks up any available information (dat or rom)."); + lookup.AddFeature(sizeInt64Input); // Defaults to -1 + lookup.AddFeature(outStringInput); - #endregion + #endregion - #region Memstats + #region Memstats - Feature memstats = new Feature( - "Memstats", - "memstats", - "Prints memory stats.", - FeatureType.Flag, - longDescription: @"Print memory stats."); + Feature memstats = new Feature( + "Memstats", + "memstats", + "Prints memory stats.", + FeatureType.Flag, + longDescription: @"Print memory stats."); - #endregion + #endregion - #region Merge + #region Merge - Feature merge = new Feature( - "Merge", - "merge", - "Merges depot", - FeatureType.Flag, - longDescription: @"Merges specified depot into current depot."); - merge.AddFeature(onlyNeededFlag); - merge.AddFeature(resumeStringInput); - merge.AddFeature(workersInt32Input); - merge.AddFeature(skipInitialScanFlag); + Feature merge = new Feature( + "Merge", + "merge", + "Merges depot", + FeatureType.Flag, + longDescription: @"Merges specified depot into current depot."); + merge.AddFeature(onlyNeededFlag); + merge.AddFeature(resumeStringInput); + merge.AddFeature(workersInt32Input); + merge.AddFeature(skipInitialScanFlag); - #endregion + #endregion - #region Miss + #region Miss - // Unique to RombaSharp - Feature miss = new Feature( - "Miss", - "miss", - "Create miss and have file", - FeatureType.Flag, - longDescription: @"For each specified DAT file, create miss and have file"); + // Unique to RombaSharp + Feature miss = new Feature( + "Miss", + "miss", + "Create miss and have file", + FeatureType.Flag, + longDescription: @"For each specified DAT file, create miss and have file"); - #endregion + #endregion - #region Progress + #region Progress - Feature progress = new Feature( - "Progress", - "progress", - "Shows progress of the currently running command.", - FeatureType.Flag, - longDescription: @"Shows progress of the currently running command."); + Feature progress = new Feature( + "Progress", + "progress", + "Shows progress of the currently running command.", + FeatureType.Flag, + longDescription: @"Shows progress of the currently running command."); - #endregion + #endregion - #region Purge Backup + #region Purge Backup - Feature purgeBackup = new Feature( - "Purge Backup", - "purge-backup", - "Moves DAT index entries for orphaned DATs.", - FeatureType.Flag, - longDescription: @"Deletes DAT index entries for orphaned DATs and moves ROM files that are no + Feature purgeBackup = new Feature( + "Purge Backup", + "purge-backup", + "Moves DAT index entries for orphaned DATs.", + FeatureType.Flag, + longDescription: @"Deletes DAT index entries for orphaned DATs and moves ROM files that are no longer associated with any current DATs to the specified backup folder. The files will be placed in the backup location using a folder structure according to the original DAT master directory tree structure. It also deletes the specified DATs from the DAT index."); - purgeBackup.AddFeature(backupStringInput); - purgeBackup.AddFeature(workersInt32Input); - purgeBackup.AddFeature(depotListStringInput); - purgeBackup.AddFeature(datsListStringInput); - purgeBackup.AddFeature(logOnlyFlag); + purgeBackup.AddFeature(backupStringInput); + purgeBackup.AddFeature(workersInt32Input); + purgeBackup.AddFeature(depotListStringInput); + purgeBackup.AddFeature(datsListStringInput); + purgeBackup.AddFeature(logOnlyFlag); - #endregion + #endregion - #region Purge Delete + #region Purge Delete - // Unique to RombaSharp - Feature purgeDelete = new Feature( - "Purge Delete", - "purge-delete", - "Deletes DAT index entries for orphaned DATs", - FeatureType.Flag); - purgeDelete.AddFeature(workersInt32Input); - purgeDelete.AddFeature(depotListStringInput); - purgeDelete.AddFeature(datsListStringInput); - purgeDelete.AddFeature(logOnlyFlag); + // Unique to RombaSharp + Feature purgeDelete = new Feature( + "Purge Delete", + "purge-delete", + "Deletes DAT index entries for orphaned DATs", + FeatureType.Flag); + purgeDelete.AddFeature(workersInt32Input); + purgeDelete.AddFeature(depotListStringInput); + purgeDelete.AddFeature(datsListStringInput); + purgeDelete.AddFeature(logOnlyFlag); - #endregion + #endregion - #region Refresh DATs + #region Refresh DATs - Feature refreshDats = new Feature( - "Refresh DATs", - "refresh-dats", - "Refreshes the DAT index from the files in the DAT master directory tree.", - FeatureType.Flag, - longDescription: @"Refreshes the DAT index from the files in the DAT master directory tree. + Feature refreshDats = new Feature( + "Refresh DATs", + "refresh-dats", + "Refreshes the DAT index from the files in the DAT master directory tree.", + FeatureType.Flag, + longDescription: @"Refreshes the DAT index from the files in the DAT master directory tree. Detects any changes in the DAT master directory tree and updates the DAT index accordingly, marking deleted or overwritten dats as orphaned and updating contents of any changed dats."); - refreshDats.AddFeature(workersInt32Input); - refreshDats.AddFeature(missingSha1sStringInput); + refreshDats.AddFeature(workersInt32Input); + refreshDats.AddFeature(missingSha1sStringInput); - #endregion + #endregion - #region Recan Depots + #region Recan Depots - // Unique to RombaSharp - Feature rescanDepots = new Feature( - "Rescan Depots", - "depot-rescan", - "Rescan a specific depot to get new information", - FeatureType.Flag); + // Unique to RombaSharp + Feature rescanDepots = new Feature( + "Rescan Depots", + "depot-rescan", + "Rescan a specific depot to get new information", + FeatureType.Flag); - #endregion + #endregion - #region Shutdown + #region Shutdown - Feature shutdown = new Feature( - "Shutdown", - "shutdown", - "Gracefully shuts down server.", - FeatureType.Flag, - longDescription: @"Gracefully shuts down server saving all the cached data."); + Feature shutdown = new Feature( + "Shutdown", + "shutdown", + "Gracefully shuts down server.", + FeatureType.Flag, + longDescription: @"Gracefully shuts down server saving all the cached data."); - #endregion + #endregion - #region Version + #region Version - Feature version = new Feature( - "Version", - "version", - "Prints version", - FeatureType.Flag, - longDescription: @"Prints version."); + Feature version = new Feature( + "Version", + "version", + "Prints version", + FeatureType.Flag, + longDescription: @"Prints version."); - #endregion + #endregion - // Now, add all of the main features to the Help object - help.Add(helpFeature); - help.Add(script); - help.Add(archive); - help.Add(build); - help.Add(cancel); - help.Add(datstats); - help.Add(dbstats); - help.Add(diffdat); - help.Add(dir2dat); - help.Add(ediffdat); - help.Add(export); - help.Add(fixdat); - help.Add(import); - help.Add(lookup); - help.Add(memstats); - help.Add(merge); - help.Add(miss); - help.Add(purgeBackup); - help.Add(purgeDelete); - help.Add(refreshDats); - help.Add(rescanDepots); - help.Add(progress); - help.Add(shutdown); - help.Add(version); + // Now, add all of the main features to the Help object + help.Add(helpFeature); + help.Add(script); + help.Add(archive); + help.Add(build); + help.Add(cancel); + help.Add(datstats); + help.Add(dbstats); + help.Add(diffdat); + help.Add(dir2dat); + help.Add(ediffdat); + help.Add(export); + help.Add(fixdat); + help.Add(import); + help.Add(lookup); + help.Add(memstats); + help.Add(merge); + help.Add(miss); + help.Add(purgeBackup); + help.Add(purgeDelete); + help.Add(refreshDats); + help.Add(rescanDepots); + help.Add(progress); + help.Add(shutdown); + help.Add(version); - return help; - } - } + return help; + } + } } diff --git a/RombaSharp/RombaSharp.Helpers.cs b/RombaSharp/RombaSharp.Helpers.cs index abd2b518..aa00cc9e 100644 --- a/RombaSharp/RombaSharp.Helpers.cs +++ b/RombaSharp/RombaSharp.Helpers.cs @@ -21,362 +21,362 @@ using StreamWriter = System.IO.StreamWriter; namespace RombaSharp { - public partial class RombaSharp - { - #region Helper methods + public partial class RombaSharp + { + #region Helper methods - /// - /// Gets all valid DATs that match in the DAT root - /// - /// List of input strings to check for, presumably file names - /// Dictionary of hash/full path for each of the valid DATs - private static Dictionary GetValidDats(List inputs) - { - // Get a dictionary of filenames that actually exist in the DATRoot, logging which ones are not - List datRootDats = Directory.EnumerateFiles(_dats, "*", SearchOption.AllDirectories).ToList(); - List lowerCaseDats = datRootDats.ConvertAll(i => Path.GetFileName(i).ToLowerInvariant()); - Dictionary foundDats = new Dictionary(); - foreach (string input in inputs) - { - if (lowerCaseDats.Contains(input.ToLowerInvariant())) - { - string fullpath = Path.GetFullPath(datRootDats[lowerCaseDats.IndexOf(input.ToLowerInvariant())]); - string sha1 = Utilities.ByteArrayToString(Utilities.GetFileInfo(fullpath).SHA1); - foundDats.Add(sha1, fullpath); - } - else - { - Globals.Logger.Warning("The file '{0}' could not be found in the DAT root", input); - } - } + /// + /// Gets all valid DATs that match in the DAT root + /// + /// List of input strings to check for, presumably file names + /// Dictionary of hash/full path for each of the valid DATs + private static Dictionary GetValidDats(List inputs) + { + // Get a dictionary of filenames that actually exist in the DATRoot, logging which ones are not + List datRootDats = Directory.EnumerateFiles(_dats, "*", SearchOption.AllDirectories).ToList(); + List lowerCaseDats = datRootDats.ConvertAll(i => Path.GetFileName(i).ToLowerInvariant()); + Dictionary foundDats = new Dictionary(); + foreach (string input in inputs) + { + if (lowerCaseDats.Contains(input.ToLowerInvariant())) + { + string fullpath = Path.GetFullPath(datRootDats[lowerCaseDats.IndexOf(input.ToLowerInvariant())]); + string sha1 = Utilities.ByteArrayToString(Utilities.GetFileInfo(fullpath).SHA1); + foundDats.Add(sha1, fullpath); + } + else + { + Globals.Logger.Warning("The file '{0}' could not be found in the DAT root", input); + } + } - return foundDats; - } + return foundDats; + } - /// - /// Initialize the Romba application from XML config - /// - private static void InitializeConfiguration() - { - // Get default values if they're not written - int workers = 4, - verbosity = 1, - cores = 4, - port = 4003; - string logdir = "logs", - tmpdir = "tmp", - webdir = "web", - baddir = "bad", - dats = "dats", - db = "db", - connectionString = ""; - Dictionary> depots = new Dictionary>(); + /// + /// Initialize the Romba application from XML config + /// + private static void InitializeConfiguration() + { + // Get default values if they're not written + int workers = 4, + verbosity = 1, + cores = 4, + port = 4003; + string logdir = "logs", + tmpdir = "tmp", + webdir = "web", + baddir = "bad", + dats = "dats", + db = "db", + connectionString = ""; + Dictionary> depots = new Dictionary>(); - // Get the XML text reader for the configuration file, if possible - XmlReader xtr = Utilities.GetXmlTextReader(_config); + // Get the XML text reader for the configuration file, if possible + XmlReader xtr = Utilities.GetXmlTextReader(_config); - // Now parse the XML file for settings - if (xtr != null) - { - xtr.MoveToContent(); - while (!xtr.EOF) - { - // We only want elements - if (xtr.NodeType != XmlNodeType.Element) - { - xtr.Read(); - continue; - } + // Now parse the XML file for settings + if (xtr != null) + { + xtr.MoveToContent(); + while (!xtr.EOF) + { + // We only want elements + if (xtr.NodeType != XmlNodeType.Element) + { + xtr.Read(); + continue; + } - switch (xtr.Name) - { - case "workers": - workers = xtr.ReadElementContentAsInt(); - break; - case "logdir": - logdir = xtr.ReadElementContentAsString(); - break; - case "tmpdir": - tmpdir = xtr.ReadElementContentAsString(); - break; - case "webdir": - webdir = xtr.ReadElementContentAsString(); - break; - case "baddir": - baddir = xtr.ReadElementContentAsString(); - break; - case "verbosity": - verbosity = xtr.ReadElementContentAsInt(); - break; - case "cores": - cores = xtr.ReadElementContentAsInt(); - break; - case "dats": - dats = xtr.ReadElementContentAsString(); - break; - case "db": - db = xtr.ReadElementContentAsString(); - break; - case "depot": - XmlReader subreader = xtr.ReadSubtree(); - if (subreader != null) - { - string root = ""; - long maxsize = -1; - bool online = true; + switch (xtr.Name) + { + case "workers": + workers = xtr.ReadElementContentAsInt(); + break; + case "logdir": + logdir = xtr.ReadElementContentAsString(); + break; + case "tmpdir": + tmpdir = xtr.ReadElementContentAsString(); + break; + case "webdir": + webdir = xtr.ReadElementContentAsString(); + break; + case "baddir": + baddir = xtr.ReadElementContentAsString(); + break; + case "verbosity": + verbosity = xtr.ReadElementContentAsInt(); + break; + case "cores": + cores = xtr.ReadElementContentAsInt(); + break; + case "dats": + dats = xtr.ReadElementContentAsString(); + break; + case "db": + db = xtr.ReadElementContentAsString(); + break; + case "depot": + XmlReader subreader = xtr.ReadSubtree(); + if (subreader != null) + { + string root = ""; + long maxsize = -1; + bool online = true; - while (!subreader.EOF) - { - // We only want elements - if (subreader.NodeType != XmlNodeType.Element) - { - subreader.Read(); - continue; - } + while (!subreader.EOF) + { + // We only want elements + if (subreader.NodeType != XmlNodeType.Element) + { + subreader.Read(); + continue; + } - switch (subreader.Name) - { - case "root": - root = subreader.ReadElementContentAsString(); - break; - case "maxsize": - maxsize = subreader.ReadElementContentAsLong(); - break; - case "online": - online = subreader.ReadElementContentAsBoolean(); - break; - default: - subreader.Read(); - break; - } - } + switch (subreader.Name) + { + case "root": + root = subreader.ReadElementContentAsString(); + break; + case "maxsize": + maxsize = subreader.ReadElementContentAsLong(); + break; + case "online": + online = subreader.ReadElementContentAsBoolean(); + break; + default: + subreader.Read(); + break; + } + } - try - { - depots.Add(root, new Tuple(maxsize, online)); - } - catch - { - // Ignore add errors - } - } + try + { + depots.Add(root, new Tuple(maxsize, online)); + } + catch + { + // Ignore add errors + } + } - xtr.Skip(); - break; - case "port": - port = xtr.ReadElementContentAsInt(); - break; - default: - xtr.Read(); - break; - } - } - } + xtr.Skip(); + break; + case "port": + port = xtr.ReadElementContentAsInt(); + break; + default: + xtr.Read(); + break; + } + } + } - // Now validate the values given - if (workers < 1) - { - workers = 1; - } - if (workers > 8) - { - workers = 8; - } - if (!Directory.Exists(logdir)) - { - Directory.CreateDirectory(logdir); - } - if (!Directory.Exists(tmpdir)) - { - Directory.CreateDirectory(tmpdir); - } - if (!Directory.Exists(webdir)) - { - Directory.CreateDirectory(webdir); - } - if (!Directory.Exists(baddir)) - { - Directory.CreateDirectory(baddir); - } - if (verbosity < 0) - { - verbosity = 0; - } - if (verbosity > 3) - { - verbosity = 3; - } - if (cores < 1) - { - cores = 1; - } - if (cores > 16) - { - cores = 16; - } - if (!Directory.Exists(dats)) - { - Directory.CreateDirectory(dats); - } - db = Path.GetFileNameWithoutExtension(db) + ".sqlite"; - connectionString = "Data Source=" + db + ";Version = 3;"; - foreach (string key in depots.Keys) - { - if (!Directory.Exists(key)) - { - Directory.CreateDirectory(key); - File.CreateText(Path.Combine(key, ".romba_size")); - File.CreateText(Path.Combine(key, ".romba_size.backup")); - } - else - { - if (!File.Exists(Path.Combine(key, ".romba_size"))) - { - File.CreateText(Path.Combine(key, ".romba_size")); - } - if (!File.Exists(Path.Combine(key, ".romba_size.backup"))) - { - File.CreateText(Path.Combine(key, ".romba_size.backup")); - } - } - } - if (port < 0) - { - port = 0; - } - if (port > 65535) - { - port = 65535; - } + // Now validate the values given + if (workers < 1) + { + workers = 1; + } + if (workers > 8) + { + workers = 8; + } + if (!Directory.Exists(logdir)) + { + Directory.CreateDirectory(logdir); + } + if (!Directory.Exists(tmpdir)) + { + Directory.CreateDirectory(tmpdir); + } + if (!Directory.Exists(webdir)) + { + Directory.CreateDirectory(webdir); + } + if (!Directory.Exists(baddir)) + { + Directory.CreateDirectory(baddir); + } + if (verbosity < 0) + { + verbosity = 0; + } + if (verbosity > 3) + { + verbosity = 3; + } + if (cores < 1) + { + cores = 1; + } + if (cores > 16) + { + cores = 16; + } + if (!Directory.Exists(dats)) + { + Directory.CreateDirectory(dats); + } + db = Path.GetFileNameWithoutExtension(db) + ".sqlite"; + connectionString = "Data Source=" + db + ";Version = 3;"; + foreach (string key in depots.Keys) + { + if (!Directory.Exists(key)) + { + Directory.CreateDirectory(key); + File.CreateText(Path.Combine(key, ".romba_size")); + File.CreateText(Path.Combine(key, ".romba_size.backup")); + } + else + { + if (!File.Exists(Path.Combine(key, ".romba_size"))) + { + File.CreateText(Path.Combine(key, ".romba_size")); + } + if (!File.Exists(Path.Combine(key, ".romba_size.backup"))) + { + File.CreateText(Path.Combine(key, ".romba_size.backup")); + } + } + } + if (port < 0) + { + port = 0; + } + if (port > 65535) + { + port = 65535; + } - // Finally set all of the fields - Globals.MaxThreads = workers; - _logdir = logdir; - _tmpdir = tmpdir; - _webdir = webdir; - _baddir = baddir; - _verbosity = verbosity; - _cores = cores; - _dats = dats; - _db = db; - _connectionString = connectionString; - _depots = depots; - _port = port; - } + // Finally set all of the fields + Globals.MaxThreads = workers; + _logdir = logdir; + _tmpdir = tmpdir; + _webdir = webdir; + _baddir = baddir; + _verbosity = verbosity; + _cores = cores; + _dats = dats; + _db = db; + _connectionString = connectionString; + _depots = depots; + _port = port; + } - /// - /// Add a new DAT to the database - /// - /// DatFile hash information to add - /// Database connection to use - private static void AddDatToDatabase(Rom dat, SqliteConnection dbc) - { - // Get the dat full path - string fullpath = Path.Combine(_dats, (dat.MachineName == "dats" ? "" : dat.MachineName), dat.Name); + /// + /// Add a new DAT to the database + /// + /// DatFile hash information to add + /// Database connection to use + private static void AddDatToDatabase(Rom dat, SqliteConnection dbc) + { + // Get the dat full path + string fullpath = Path.Combine(_dats, (dat.MachineName == "dats" ? "" : dat.MachineName), dat.Name); - // Parse the Dat if possible - Globals.Logger.User("Adding from '" + dat.Name + "'"); - DatFile tempdat = new DatFile(); - tempdat.Parse(fullpath, 0, 0); + // Parse the Dat if possible + Globals.Logger.User("Adding from '" + dat.Name + "'"); + DatFile tempdat = new DatFile(); + tempdat.Parse(fullpath, 0, 0); - // If the Dat wasn't empty, add the information - SqliteCommand slc = new SqliteCommand(); - if (tempdat.Count != 0) - { - string crcquery = "INSERT OR IGNORE INTO crc (crc) VALUES"; - string md5query = "INSERT OR IGNORE INTO md5 (md5) VALUES"; - string sha1query = "INSERT OR IGNORE INTO sha1 (sha1) VALUES"; - string crcsha1query = "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES"; - string md5sha1query = "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES"; + // If the Dat wasn't empty, add the information + SqliteCommand slc = new SqliteCommand(); + if (tempdat.Count != 0) + { + string crcquery = "INSERT OR IGNORE INTO crc (crc) VALUES"; + string md5query = "INSERT OR IGNORE INTO md5 (md5) VALUES"; + string sha1query = "INSERT OR IGNORE INTO sha1 (sha1) VALUES"; + string crcsha1query = "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES"; + string md5sha1query = "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES"; - // Loop through the parsed entries - foreach (string romkey in tempdat.Keys) - { - foreach (DatItem datItem in tempdat[romkey]) - { - Globals.Logger.Verbose("Checking and adding file '{0}'", datItem.Name); + // Loop through the parsed entries + foreach (string romkey in tempdat.Keys) + { + foreach (DatItem datItem in tempdat[romkey]) + { + Globals.Logger.Verbose("Checking and adding file '{0}'", datItem.Name); - if (datItem.ItemType == ItemType.Rom) - { - Rom rom = (Rom)datItem; + if (datItem.ItemType == ItemType.Rom) + { + Rom rom = (Rom)datItem; - if (!String.IsNullOrWhiteSpace(rom.CRC)) - { - crcquery += " (\"" + rom.CRC + "\"),"; - } - if (!String.IsNullOrWhiteSpace(rom.MD5)) - { - md5query += " (\"" + rom.MD5 + "\"),"; - } - if (!String.IsNullOrWhiteSpace(rom.SHA1)) - { - sha1query += " (\"" + rom.SHA1 + "\"),"; + if (!String.IsNullOrWhiteSpace(rom.CRC)) + { + crcquery += " (\"" + rom.CRC + "\"),"; + } + if (!String.IsNullOrWhiteSpace(rom.MD5)) + { + md5query += " (\"" + rom.MD5 + "\"),"; + } + if (!String.IsNullOrWhiteSpace(rom.SHA1)) + { + sha1query += " (\"" + rom.SHA1 + "\"),"; - if (!String.IsNullOrWhiteSpace(rom.CRC)) - { - crcsha1query += " (\"" + rom.CRC + "\", \"" + rom.SHA1 + "\"),"; - } - if (!String.IsNullOrWhiteSpace(rom.MD5)) - { - md5sha1query += " (\"" + rom.MD5 + "\", \"" + rom.SHA1 + "\"),"; - } - } - } - else if (datItem.ItemType == ItemType.Disk) - { - Disk disk = (Disk)datItem; + if (!String.IsNullOrWhiteSpace(rom.CRC)) + { + crcsha1query += " (\"" + rom.CRC + "\", \"" + rom.SHA1 + "\"),"; + } + if (!String.IsNullOrWhiteSpace(rom.MD5)) + { + md5sha1query += " (\"" + rom.MD5 + "\", \"" + rom.SHA1 + "\"),"; + } + } + } + else if (datItem.ItemType == ItemType.Disk) + { + Disk disk = (Disk)datItem; - if (!String.IsNullOrWhiteSpace(disk.MD5)) - { - md5query += " (\"" + disk.MD5 + "\"),"; - } - if (!String.IsNullOrWhiteSpace(disk.SHA1)) - { - sha1query += " (\"" + disk.SHA1 + "\"),"; + if (!String.IsNullOrWhiteSpace(disk.MD5)) + { + md5query += " (\"" + disk.MD5 + "\"),"; + } + if (!String.IsNullOrWhiteSpace(disk.SHA1)) + { + sha1query += " (\"" + disk.SHA1 + "\"),"; - if (!String.IsNullOrWhiteSpace(disk.MD5)) - { - md5sha1query += " (\"" + disk.MD5 + "\", \"" + disk.SHA1 + "\"),"; - } - } - } - } - } + if (!String.IsNullOrWhiteSpace(disk.MD5)) + { + md5sha1query += " (\"" + disk.MD5 + "\", \"" + disk.SHA1 + "\"),"; + } + } + } + } + } - // Now run the queries after fixing them - if (crcquery != "INSERT OR IGNORE INTO crc (crc) VALUES") - { - slc = new SqliteCommand(crcquery.TrimEnd(','), dbc); - slc.ExecuteNonQuery(); - } - if (md5query != "INSERT OR IGNORE INTO md5 (md5) VALUES") - { - slc = new SqliteCommand(md5query.TrimEnd(','), dbc); - slc.ExecuteNonQuery(); - } - if (sha1query != "INSERT OR IGNORE INTO sha1 (sha1) VALUES") - { - slc = new SqliteCommand(sha1query.TrimEnd(','), dbc); - slc.ExecuteNonQuery(); - } - if (crcsha1query != "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES") - { - slc = new SqliteCommand(crcsha1query.TrimEnd(','), dbc); - slc.ExecuteNonQuery(); - } - if (md5sha1query != "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES") - { - slc = new SqliteCommand(md5sha1query.TrimEnd(','), dbc); - slc.ExecuteNonQuery(); - } - } + // Now run the queries after fixing them + if (crcquery != "INSERT OR IGNORE INTO crc (crc) VALUES") + { + slc = new SqliteCommand(crcquery.TrimEnd(','), dbc); + slc.ExecuteNonQuery(); + } + if (md5query != "INSERT OR IGNORE INTO md5 (md5) VALUES") + { + slc = new SqliteCommand(md5query.TrimEnd(','), dbc); + slc.ExecuteNonQuery(); + } + if (sha1query != "INSERT OR IGNORE INTO sha1 (sha1) VALUES") + { + slc = new SqliteCommand(sha1query.TrimEnd(','), dbc); + slc.ExecuteNonQuery(); + } + if (crcsha1query != "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES") + { + slc = new SqliteCommand(crcsha1query.TrimEnd(','), dbc); + slc.ExecuteNonQuery(); + } + if (md5sha1query != "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES") + { + slc = new SqliteCommand(md5sha1query.TrimEnd(','), dbc); + slc.ExecuteNonQuery(); + } + } - string datquery = "INSERT OR IGNORE INTO dat (hash) VALUES (\"" + dat.SHA1 + "\")"; - slc = new SqliteCommand(datquery, dbc); - slc.ExecuteNonQuery(); - slc.Dispose(); - } + string datquery = "INSERT OR IGNORE INTO dat (hash) VALUES (\"" + dat.SHA1 + "\")"; + slc = new SqliteCommand(datquery, dbc); + slc.ExecuteNonQuery(); + slc.Dispose(); + } - #endregion - } + #endregion + } } diff --git a/RombaSharp/RombaSharp.Inits.cs b/RombaSharp/RombaSharp.Inits.cs index 0fc22124..1c87a861 100644 --- a/RombaSharp/RombaSharp.Inits.cs +++ b/RombaSharp/RombaSharp.Inits.cs @@ -20,1113 +20,1113 @@ using StreamWriter = System.IO.StreamWriter; namespace RombaSharp { - public partial class RombaSharp - { - #region Init Methods - - /// - /// Wrap adding files to the depots - /// - /// List of input folders to use - /// True if only files in the database and don't exist are added, false otherwise - /// Resume a previously interrupted operation from the specified path - /// flag value == 0 means: add Zip files themselves into the depot in addition to their contents, flag value == 2 means add Zip files themselves but don't add content - /// How many workers to launch for the job, default from config - /// flag value == 0 means: add GZip files themselves into the depot in addition to their contents, flag value == 2 means add GZip files themselves but don't add content - /// flag value == 0 means: add 7Zip files themselves into the depot in addition to their contents, flag value == 2 means add 7Zip files themselves but don't add content - /// True to skip the initial scan of the files to determine amount of work, false otherwise - /// True to use go zip implementation instead of zlib, false otherwise - /// True to archive into depot but do not touch DB index and ignore only-needed flag, false otherwise - /// TODO: Add ability to update .romba files with proper size AND use the correct depot if it fills up - /// TODO: Add ability correctly to mark which depot the files are being rebuilt to in the DB - private static void InitArchive( - List inputs, - bool onlyNeeded, - string resume, - int includeZips, - int workers, - int includeGZips, - int include7Zips, - bool skipInitialScan, - bool useGolangZip, // Obsolete - bool noDb) - { - // First we want to get just all directories from the inputs - List onlyDirs = new List(); - foreach (string input in inputs) - { - if (Directory.Exists(input)) - { - onlyDirs.Add(Path.GetFullPath(input)); - } - } - - // Then process all of the input directories into an internal DAT - DatFile df = new DatFile(); - foreach (string dir in onlyDirs) - { - // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually - df.PopulateFromDir(dir, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, null, true, null); - df.PopulateFromDir(dir, Hash.DeepHashes, false, true, SkipFileType.None, false, false, _tmpdir, false, null, true, null); - } - - // Create an empty Dat for files that need to be rebuilt - DatFile need = new DatFile(); - - // Open the database connection - SqliteConnection dbc = new SqliteConnection(_connectionString); - dbc.Open(); - - // Now that we have the Dats, add the files to the database - string crcquery = "INSERT OR IGNORE INTO crc (crc) VALUES"; - string md5query = "INSERT OR IGNORE INTO md5 (md5) VALUES"; - string sha1query = "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES"; - string crcsha1query = "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES"; - string md5sha1query = "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES"; - - foreach (string key in df.Keys) - { - List datItems = df[key]; - foreach (Rom rom in datItems) - { - // If we care about if the file exists, check the databse first - if (onlyNeeded && !noDb) - { - string query = "SELECT * FROM crcsha1 JOIN md5sha1 ON crcsha1.sha1=md5sha1.sha1" - + " WHERE crcsha1.crc=\"" + rom.CRC + "\"" - + " OR md5sha1.md5=\"" + rom.MD5 + "\"" - + " OR md5sha1.sha1=\"" + rom.SHA1 + "\""; - SqliteCommand slc = new SqliteCommand(query, dbc); - SqliteDataReader sldr = slc.ExecuteReader(); - - if (sldr.HasRows) - { - // Add to the queries - if (!String.IsNullOrWhiteSpace(rom.CRC)) - { - crcquery += " (\"" + rom.CRC + "\"),"; - } - if (!String.IsNullOrWhiteSpace(rom.MD5)) - { - md5query += " (\"" + rom.MD5 + "\"),"; - } - if (!String.IsNullOrWhiteSpace(rom.SHA1)) - { - sha1query += " (\"" + rom.SHA1 + "\", \"" + _depots.Keys.ToList()[0] + "\"),"; - - if (!String.IsNullOrWhiteSpace(rom.CRC)) - { - crcsha1query += " (\"" + rom.CRC + "\", \"" + rom.SHA1 + "\"),"; - } - if (!String.IsNullOrWhiteSpace(rom.MD5)) - { - md5sha1query += " (\"" + rom.MD5 + "\", \"" + rom.SHA1 + "\"),"; - } - } - - // Add to the Dat - need.Add(key, rom); - } - } - // Otherwise, just add the file to the list - else - { - // Add to the queries - if (!noDb) - { - if (!String.IsNullOrWhiteSpace(rom.CRC)) - { - crcquery += " (\"" + rom.CRC + "\"),"; - } - if (!String.IsNullOrWhiteSpace(rom.MD5)) - { - md5query += " (\"" + rom.MD5 + "\"),"; - } - if (!String.IsNullOrWhiteSpace(rom.SHA1)) - { - sha1query += " (\"" + rom.SHA1 + "\", \"" + _depots.Keys.ToList()[0] + "\"),"; - - if (!String.IsNullOrWhiteSpace(rom.CRC)) - { - crcsha1query += " (\"" + rom.CRC + "\", \"" + rom.SHA1 + "\"),"; - } - if (!String.IsNullOrWhiteSpace(rom.MD5)) - { - md5sha1query += " (\"" + rom.MD5 + "\", \"" + rom.SHA1 + "\"),"; - } - } - } - - // Add to the Dat - need.Add(key, rom); - } - } - } - - // Now run the queries, if they're populated - if (crcquery != "INSERT OR IGNORE INTO crc (crc) VALUES") - { - SqliteCommand slc = new SqliteCommand(crcquery.TrimEnd(','), dbc); - slc.ExecuteNonQuery(); - slc.Dispose(); - } - if (md5query != "INSERT OR IGNORE INTO md5 (md5) VALUES") - { - SqliteCommand slc = new SqliteCommand(md5query.TrimEnd(','), dbc); - slc.ExecuteNonQuery(); - slc.Dispose(); - } - if (sha1query != "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES") - { - SqliteCommand slc = new SqliteCommand(sha1query.TrimEnd(','), dbc); - slc.ExecuteNonQuery(); - slc.Dispose(); - } - if (crcsha1query != "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES") - { - SqliteCommand slc = new SqliteCommand(crcsha1query.TrimEnd(','), dbc); - slc.ExecuteNonQuery(); - slc.Dispose(); - } - if (md5sha1query != "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES") - { - SqliteCommand slc = new SqliteCommand(md5sha1query.TrimEnd(','), dbc); - slc.ExecuteNonQuery(); - slc.Dispose(); - } - - // Create the sorting object to use and rebuild the needed files - ArchiveScanLevel asl = Utilities.GetArchiveScanLevelFromNumbers(include7Zips, includeGZips, 2, includeZips); - need.RebuildGeneric(onlyDirs, _depots.Keys.ToList()[0], false /*quickScan*/, false /*date*/, - false /*delete*/, false /*inverse*/, OutputFormat.TorrentGzip, true /*romba*/, asl, false /*updateDat*/, - null /*headerToCheckAgainst*/, true /* chdsAsFiles */); - } - - /// - /// Wrap building all files from a set of DATs - /// - /// List of input DATs to rebuild from - /// Output file - /// True to only fix dats and don't generate torrentzips, false otherwise - /// True if files should be copied to output, false for rebuild - /// How many workers to launch for the job, default from config - /// How many subworkers to launch for each worker, default from config - private static void InitBuild( - List inputs, - string outdat, - bool fixdatOnly, - bool copy, - int workers, - int subworkers) - { - // Verify the filenames - Dictionary foundDats = GetValidDats(inputs); - - // Ensure the output directory is set - if (String.IsNullOrWhiteSpace(outdat)) - { - outdat = "out"; - } - - // Now that we have the dictionary, we can loop through and output to a new folder for each - foreach (string key in foundDats.Keys) - { - // Get the DAT file associated with the key - DatFile datFile = new DatFile(); - datFile.Parse(Path.Combine(_dats, foundDats[key]), 0, 0); - - // Create the new output directory if it doesn't exist - string outputFolder = Path.Combine(outdat, Path.GetFileNameWithoutExtension(foundDats[key])); - Utilities.EnsureOutputDirectory(outputFolder, create: true); - - // Get all online depots - List onlineDepots = _depots.Where(d => d.Value.Item2).Select(d => d.Key).ToList(); - - // Now scan all of those depots and rebuild - ArchiveScanLevel asl = Utilities.GetArchiveScanLevelFromNumbers(1, 1, 1, 1); - datFile.RebuildDepot(onlineDepots, outputFolder, false /*date*/, - false /*delete*/, false /*inverse*/, (copy ? OutputFormat.TorrentGzip : OutputFormat.TorrentZip), copy, - false /*updateDat*/, null /*headerToCheckAgainst*/); - } - } - - /// - /// Wrap cancelling a long-running job - /// - /// TODO: Implement - private static void InitCancel() - { - Globals.Logger.User("This feature is not yet implemented: cancel"); - } - - /// - /// Wrap printing dat stats - /// - /// List of input DATs to get stats from - private static void InitDatStats(List inputs) - { - // If we have no inputs listed, we want to use datroot - if (inputs == null || inputs.Count == 0) - { - inputs = new List(); - inputs.Add(Path.GetFullPath(_dats)); - } - - // Now output the stats for all inputs - DatFile.OutputStats(inputs, "rombasharp-datstats", null /* outDir */, true /* single */, true /* baddumpCol */, true /* nodumpCol */, StatReportFormat.Textfile); - } - - /// - /// Wrap printing db stats - /// - private static void InitDbStats() - { - SqliteConnection dbc = new SqliteConnection(_connectionString); - dbc.Open(); - - // Total number of CRCs - string query = "SELECT COUNT(*) FROM crc"; - SqliteCommand slc = new SqliteCommand(query, dbc); - Globals.Logger.User("Total CRCs: {0}", (long)slc.ExecuteScalar()); - - // Total number of MD5s - query = "SELECT COUNT(*) FROM md5"; - slc = new SqliteCommand(query, dbc); - Globals.Logger.User("Total MD5s: {0}", (long)slc.ExecuteScalar()); - - // Total number of SHA1s - query = "SELECT COUNT(*) FROM sha1"; - slc = new SqliteCommand(query, dbc); - Globals.Logger.User("Total SHA1s: {0}", (long)slc.ExecuteScalar()); - - // Total number of DATs - query = "SELECT COUNT(*) FROM dat"; - slc = new SqliteCommand(query, dbc); - Globals.Logger.User("Total DATs: {0}", (long)slc.ExecuteScalar()); - - slc.Dispose(); - dbc.Dispose(); - } - - /// - /// Wrap creating a diffdat for a given old and new dat - /// - /// Output file - /// Old DAT file - /// New DAT file - /// Name value in DAT header - /// Description value in DAT header - private static void InitDiffDat( - string outdat, - string old, - string newdat, - string name, - string description) - { - // Ensure the output directory - Utilities.EnsureOutputDirectory(outdat, create: true); - - // Check that all required files exist - if (!File.Exists(old)) - { - Globals.Logger.Error("File '{0}' does not exist!", old); - return; - } - if (!File.Exists(newdat)) - { - Globals.Logger.Error("File '{0}' does not exist!", newdat); - return; - } - - // Create the encapsulating datfile - DatFile datfile = new DatFile() - { - Name = name, - Description = description, - }; - - // Create the inputs - List dats = new List(); - dats.Add(newdat); - List basedats = new List(); - basedats.Add(old); - - // Now run the diff on the inputs - datfile.DetermineUpdateType(dats, basedats, outdat, UpdateMode.DiffAgainst, false /* inplace */, false /* skip */, - false /* clean */, false /* remUnicode */, false /* descAsName */, new Filter(), SplitType.None, - ReplaceMode.None, false /* onlySame */); - } - - /// - /// Wrap creating a dir2dat from a given source - /// - /// Output file - /// Source directory - /// Name value in DAT header - /// Description value in DAT header - private static void InitDir2Dat( - string outdat, - string source, - string name, - string description) - { - // Ensure the output directory - Utilities.EnsureOutputDirectory(outdat, create: true); - - // Check that all required directories exist - if (!Directory.Exists(source)) - { - Globals.Logger.Error("File '{0}' does not exist!", source); - return; - } - - // Create the encapsulating datfile - DatFile datfile = new DatFile() - { - Name = (String.IsNullOrWhiteSpace(name) ? "untitled" : name), - Description = description, - }; - - // Now run the D2D on the input and write out - // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually - datfile.PopulateFromDir(source, Hash.DeepHashes, true /* bare */, false /* archivesAsFiles */, SkipFileType.None, false /* addBlanks */, - false /* addDate */, _tmpdir, false /* copyFiles */, null /* headerToCheckAgainst */, true /* chdsAsFiles */, null /* filter */); - datfile.Write(outDir: outdat); - } - - /// - /// Wrap creating a diffdat for a given old and new dat - /// - /// Output file - /// Old DAT file - /// New DAT file - private static void InitEDiffDat( - string outdat, - string old, - string newdat) - { - // Ensure the output directory - Utilities.EnsureOutputDirectory(outdat, create: true); - - // Check that all required files exist - if (!File.Exists(old)) - { - Globals.Logger.Error("File '{0}' does not exist!", old); - return; - } - if (!File.Exists(newdat)) - { - Globals.Logger.Error("File '{0}' does not exist!", newdat); - return; - } - - // Create the encapsulating datfile - DatFile datfile = new DatFile(); - - // Create the inputs - List dats = new List(); - dats.Add(newdat); - List basedats = new List(); - basedats.Add(old); - - // Now run the diff on the inputs - datfile.DetermineUpdateType(dats, basedats, outdat, UpdateMode.DiffAgainst, false /* inplace */, false /* skip */, - false /* clean */, false /* remUnicode */, false /* descAsName */, new Filter(), SplitType.None, - ReplaceMode.None, false /* onlySame */); - } - - /// - /// Wrap exporting the database to CSV - /// - /// TODO: Add ability to say which depot the files are found in - private static void InitExport() - { - SqliteConnection dbc = new SqliteConnection(_connectionString); - dbc.Open(); - StreamWriter sw = new StreamWriter(Utilities.TryCreate("export.csv")); - - // First take care of all file hashes - sw.WriteLine("CRC,MD5,SHA-1"); // ,Depot - - string query = "SELECT crcsha1.crc, md5sha1.md5, md5sha1.sha1 FROM crcsha1 JOIN md5sha1 ON crcsha1.sha1=md5sha1.sha1"; // md5sha1.sha1=sha1depot.sha1 - SqliteCommand slc = new SqliteCommand(query, dbc); - SqliteDataReader sldr = slc.ExecuteReader(); - - if (sldr.HasRows) - { - while (sldr.Read()) - { - string line = sldr.GetString(0) + "," - + sldr.GetString(1) + "," - + sldr.GetString(2); // + "," - // + sldr.GetString(3); - sw.WriteLine(line); - } - } - - // Then take care of all DAT hashes - sw.WriteLine(); - sw.WriteLine("DAT Hash"); - - query = "SELECT hash FROM dat"; - slc = new SqliteCommand(query, dbc); - sldr = slc.ExecuteReader(); - - if (sldr.HasRows) - { - while (sldr.Read()) - { - sw.WriteLine(sldr.GetString(0)); - } - } - - sldr.Dispose(); - slc.Dispose(); - sw.Dispose(); - dbc.Dispose(); - } - - /// - /// Wrap creating a fixdat for each Dat - /// - /// List of input DATs to get fixdats for - /// Output directory - /// True to only fix dats and don't generate torrentzips, false otherwise - /// How many workers to launch for the job, default from config - /// How many subworkers to launch for each worker, default from config - /// TODO: Implement - private static void InitFixdat( - List inputs, - string outdat, - bool fixdatOnly, - int workers, - int subworkers) - { - Globals.Logger.Error("This feature is not yet implemented: fixdat"); - } - - /// - /// Wrap importing CSVs into the database - /// - /// List of input CSV files to import information from - private static void InitImport(List inputs) - { - Globals.Logger.Error("This feature is not yet implemented: import"); - - // First ensure the inputs and database connection - inputs = Utilities.GetOnlyFilesFromInputs(inputs); - SqliteConnection dbc = new SqliteConnection(_connectionString); - SqliteCommand slc = new SqliteCommand(); - dbc.Open(); - - // Now, for each of these files, attempt to add the data found inside - foreach (string input in inputs) - { - StreamReader sr = new StreamReader(Utilities.TryOpenRead(input)); - - // The first line should be the hash header - string line = sr.ReadLine(); - if (line != "CRC,MD5,SHA-1") // ,Depot - { - Globals.Logger.Error("{0} is not a valid export file"); - continue; - } - - // Define the insert queries - string crcquery = "INSERT OR IGNORE INTO crc (crc) VALUES"; - string md5query = "INSERT OR IGNORE INTO md5 (md5) VALUES"; - string sha1query = "INSERT OR IGNORE INTO sha1 (sha1) VALUES"; - string crcsha1query = "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES"; - string md5sha1query = "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES"; - - // For each line until we hit a blank line... - while (!sr.EndOfStream && line != "") - { - line = sr.ReadLine(); - string[] hashes = line.Split(','); - - // Loop through the parsed entries - if (!String.IsNullOrWhiteSpace(hashes[0])) - { - crcquery += " (\"" + hashes[0] + "\"),"; - } - if (!String.IsNullOrWhiteSpace(hashes[1])) - { - md5query += " (\"" + hashes[1] + "\"),"; - } - if (!String.IsNullOrWhiteSpace(hashes[2])) - { - sha1query += " (\"" + hashes[2] + "\"),"; - - if (!String.IsNullOrWhiteSpace(hashes[0])) - { - crcsha1query += " (\"" + hashes[0] + "\", \"" + hashes[2] + "\"),"; - } - if (!String.IsNullOrWhiteSpace(hashes[1])) - { - md5sha1query += " (\"" + hashes[1] + "\", \"" + hashes[2] + "\"),"; - } - } - } - - // Now run the queries after fixing them - if (crcquery != "INSERT OR IGNORE INTO crc (crc) VALUES") - { - slc = new SqliteCommand(crcquery.TrimEnd(','), dbc); - slc.ExecuteNonQuery(); - } - if (md5query != "INSERT OR IGNORE INTO md5 (md5) VALUES") - { - slc = new SqliteCommand(md5query.TrimEnd(','), dbc); - slc.ExecuteNonQuery(); - } - if (sha1query != "INSERT OR IGNORE INTO sha1 (sha1) VALUES") - { - slc = new SqliteCommand(sha1query.TrimEnd(','), dbc); - slc.ExecuteNonQuery(); - } - if (crcsha1query != "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES") - { - slc = new SqliteCommand(crcsha1query.TrimEnd(','), dbc); - slc.ExecuteNonQuery(); - } - if (md5sha1query != "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES") - { - slc = new SqliteCommand(md5sha1query.TrimEnd(','), dbc); - slc.ExecuteNonQuery(); - } - - // Now add all of the DAT hashes - // TODO: Do we really need to save the DAT hashes? - - sr.Dispose(); - } - - slc.Dispose(); - dbc.Dispose(); - } - - /// - /// Wrap looking up if hashes exist in the database - /// - /// List of input strings representing hashes to check for - /// Size to limit hash by, -1 otherwise - /// Output directory - private static void InitLookup( - List inputs, - long size, - string outdat) - { - // First, try to figure out what type of hash each is by length and clean it - List crc = new List(); - List md5 = new List(); - List sha1 = new List(); - foreach (string input in inputs) - { - string temp = ""; - if (input.Length == Constants.CRCLength) - { - temp = Utilities.CleanHashData(input, Constants.CRCLength); - if (!String.IsNullOrWhiteSpace(temp)) - { - crc.Add(temp); - } - } - else if (input.Length == Constants.MD5Length) - { - temp = Utilities.CleanHashData(input, Constants.MD5Length); - if (!String.IsNullOrWhiteSpace(temp)) - { - md5.Add(temp); - } - } - else if (input.Length == Constants.SHA1Length) - { - temp = Utilities.CleanHashData(input, Constants.SHA1Length); - if (!String.IsNullOrWhiteSpace(temp)) - { - sha1.Add(temp); - } - } - } - - SqliteConnection dbc = new SqliteConnection(_connectionString); - dbc.Open(); - - // Now, search for each of them and return true or false for each - foreach (string input in crc) - { - string query = "SELECT * FROM crc WHERE crc=\"" + input + "\""; - SqliteCommand slc = new SqliteCommand(query, dbc); - SqliteDataReader sldr = slc.ExecuteReader(); - if (sldr.HasRows) - { - int count = 0; - while (sldr.Read()) - { - count++; - } - - Globals.Logger.User("For hash '{0}' there were {1} matches in the database", input, count); - } - else - { - Globals.Logger.User("Hash '{0}' had no matches in the database", input); - } - - sldr.Dispose(); - slc.Dispose(); - } - foreach (string input in md5) - { - string query = "SELECT * FROM md5 WHERE md5=\"" + input + "\""; - SqliteCommand slc = new SqliteCommand(query, dbc); - SqliteDataReader sldr = slc.ExecuteReader(); - if (sldr.HasRows) - { - int count = 0; - while (sldr.Read()) - { - count++; - } - - Globals.Logger.User("For hash '{0}' there were {1} matches in the database", input, count); - } - else - { - Globals.Logger.User("Hash '{0}' had no matches in the database", input); - } - - sldr.Dispose(); - slc.Dispose(); - } - foreach (string input in sha1) - { - string query = "SELECT * FROM sha1 WHERE sha1=\"" + input + "\""; - SqliteCommand slc = new SqliteCommand(query, dbc); - SqliteDataReader sldr = slc.ExecuteReader(); - if (sldr.HasRows) - { - int count = 0; - while (sldr.Read()) - { - count++; - } - - Globals.Logger.User("For hash '{0}' there were {1} matches in the database", input, count); - } - else - { - Globals.Logger.User("Hash '{0}' had no matches in the database", input); - } - - sldr.Dispose(); - slc.Dispose(); - } - - dbc.Dispose(); - } - - /// - /// Wrap printing memory stats - /// - /// TODO: Implement - private static void InitMemstats() - { - Globals.Logger.User("This feature is not yet implemented: memstats"); - } - - /// - /// Wrap merging an external depot into an existing one - /// - /// List of input depots to merge in - /// True if only files in the database and don't exist are added, false otherwise - /// Resume a previously interrupted operation from the specified path - /// How many workers to launch for the job, default from config - /// True to skip the initial scan of the files to determine amount of work, false otherwise - /// TODO: Add way of specifying "current depot" since that's what Romba relies on - /// TODO: Implement - private static void InitMerge( - List inputs, - bool onlyNeeded, - string resume, - int workers, - bool skipInitialscan) - { - Globals.Logger.Error("This feature is not yet implemented: merge"); - - // Verify that the inputs are valid directories - inputs = Utilities.GetOnlyDirectoriesFromInputs(inputs); - - // Loop over all input directories - foreach (string input in inputs) - { - List depotFiles = Directory.EnumerateFiles(input, "*.gz", SearchOption.AllDirectories).ToList(); - - // If we are copying all that is possible but we want to scan first - if (!onlyNeeded && !skipInitialscan) - { - - } - // If we are copying all that is possible but we don't care to scan first - else if (!onlyNeeded && skipInitialscan) - { - - } - // If we are copying only what is needed but we want to scan first - else if (onlyNeeded && !skipInitialscan) - { - - } - // If we are copying only what is needed but we don't care to scan first - else if (onlyNeeded && skipInitialscan) - { - - } - } - } - - /// - /// Wrap creating a havefile and a missfile for each Dat - /// - /// List of DAT files to get a miss and have for, empty means all - /// TODO: Implement - private static void InitMiss(List inputs) - { - // Verify the filenames - Dictionary foundDats = GetValidDats(inputs); - - // Create the new output directory if it doesn't exist - Utilities.EnsureOutputDirectory(Path.Combine(Globals.ExeDir, "out"), create: true); - - // Now that we have the dictionary, we can loop through and output to a new folder for each - foreach (string key in foundDats.Keys) - { - // Get the DAT file associated with the key - DatFile datFile = new DatFile(); - datFile.Parse(Path.Combine(_dats, foundDats[key]), 0, 0); - - // Now loop through and see if all of the hash combinations exist in the database - /* ended here */ - } - - Globals.Logger.Error("This feature is not yet implemented: miss"); - } - - /// - /// Wrap showing progress of currently running command - /// - /// TODO: Implement - private static void InitProgress() - { - Globals.Logger.User("This feature is not yet implemented: progress"); - } - - /// - /// Wrap backing up of no longer needed files from the depots - /// - /// Backup directory where backup files are moved to - /// How many workers to launch for the job, default from config - /// List of depots to scan files in, empty means all - /// List of DATs to use as the basis of scanning, empty means all - /// True if only the output of the operation is shown, false to actually run - /// TODO: Implement - private static void InitPurgeBackup( - string backup, - int workers, - List depot, - List dats, - bool logOnly) - { - Globals.Logger.Error("This feature is not yet implemented: purge-backup"); - } - - /// - /// Wrap deleting of no longer needed files from the depots - /// - /// How many workers to launch for the job, default from config - /// List of depots to scan files in, empty means all - /// List of DATs to use as the basis of scanning, empty means all - /// True if only the output of the operation is shown, false to actually run - /// TODO: Implement - private static void InitPurgeDelete( - int workers, - List depot, - List dats, - bool logOnly) - { - Globals.Logger.Error("This feature is not yet implemented: purge-delete"); - } - - /// - /// Wrap refreshing the database with potentially new dats - /// - /// How many workers to launch for the job, default from config - /// Write paths of dats with missing sha1s into this file - private static void InitRefreshDats( - int workers, - string missingSha1s) - { - // Make sure the db is set - if (String.IsNullOrWhiteSpace(_db)) - { - _db = "db.sqlite"; - _connectionString = "Data Source=" + _db + ";Version = 3;"; - } - - // Make sure the file exists - if (!File.Exists(_db)) - { - DatabaseTools.EnsureDatabase(_dbSchema, _db, _connectionString); - } - - // Make sure the dats dir is set - if (String.IsNullOrWhiteSpace(_dats)) - { - _dats = "dats"; - } - - _dats = Path.Combine(Globals.ExeDir, _dats); - - // Make sure the folder exists - if (!Directory.Exists(_dats)) - { - Directory.CreateDirectory(_dats); - } - - // First get a list of SHA-1's from the input DATs - DatFile datroot = new DatFile { Type = "SuperDAT", }; - // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually - datroot.PopulateFromDir(_dats, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, null, true, null); - datroot.BucketBy(SortedBy.SHA1, DedupeType.None); - - // Create a List of dat hashes in the database (SHA-1) - List databaseDats = new List(); - List unneeded = new List(); - - SqliteConnection dbc = new SqliteConnection(_connectionString); - dbc.Open(); - - // Populate the List from the database - InternalStopwatch watch = new InternalStopwatch("Populating the list of existing DATs"); - - string query = "SELECT DISTINCT hash FROM dat"; - SqliteCommand slc = new SqliteCommand(query, dbc); - SqliteDataReader sldr = slc.ExecuteReader(); - if (sldr.HasRows) - { - sldr.Read(); - string hash = sldr.GetString(0); - if (datroot.Contains(hash)) - { - datroot.Remove(hash); - databaseDats.Add(hash); - } - else if (!databaseDats.Contains(hash)) - { - unneeded.Add(hash); - } - } - datroot.BucketBy(SortedBy.Game, DedupeType.None, norename: true); - - watch.Stop(); - - slc.Dispose(); - sldr.Dispose(); - - // Loop through the Dictionary and add all data - watch.Start("Adding new DAT information"); - foreach (string key in datroot.Keys) - { - foreach (Rom value in datroot[key]) - { - AddDatToDatabase(value, dbc); - } - } - - watch.Stop(); - - // Now loop through and remove all references to old Dats - if (unneeded.Count > 0) - { - watch.Start("Removing unmatched DAT information"); - - query = "DELETE FROM dat WHERE"; - foreach (string dathash in unneeded) - { - query += " OR hash=\"" + dathash + "\""; - } - - query = query.Replace("WHERE OR", "WHERE"); - slc = new SqliteCommand(query, dbc); - slc.ExecuteNonQuery(); - slc.Dispose(); - - watch.Stop(); - } - - dbc.Dispose(); - } - - /// - /// Wrap rescanning depots - /// - /// List of depots to rescan, empty means all - /// TODO: Verify implementation - private static void InitRescanDepots(List inputs) - { - Globals.Logger.Error("This feature is not yet implemented: rescan-depots"); - - foreach (string depotname in inputs) - { - // Check that it's a valid depot first - if (!_depots.ContainsKey(depotname)) - { - Globals.Logger.User("'{0}' is not a recognized depot. Please add it to your configuration file and try again", depotname); - return; - } - - // Then check that the depot is online - if (!Directory.Exists(depotname)) - { - Globals.Logger.User("'{0}' does not appear to be online. Please check its status and try again", depotname); - return; - } - - // Open the database connection - SqliteConnection dbc = new SqliteConnection(_connectionString); - dbc.Open(); - - // If we have it, then check for all hashes that are in that depot - List hashes = new List(); - string query = "SELECT sha1 FROM sha1 WHERE depot=\"" + depotname + "\""; - SqliteCommand slc = new SqliteCommand(query, dbc); - SqliteDataReader sldr = slc.ExecuteReader(); - if (sldr.HasRows) - { - while (sldr.Read()) - { - hashes.Add(sldr.GetString(0)); - } - } - - // Now rescan the depot itself - DatFile depot = new DatFile(); - // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually - depot.PopulateFromDir(depotname, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, null, true, null); - depot.BucketBy(SortedBy.SHA1, DedupeType.None); - - // Set the base queries to use - string crcquery = "INSERT OR IGNORE INTO crc (crc) VALUES"; - string md5query = "INSERT OR IGNORE INTO md5 (md5) VALUES"; - string sha1query = "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES"; - string crcsha1query = "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES"; - string md5sha1query = "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES"; - - // Once we have both, check for any new files - List dupehashes = new List(); - List keys = depot.Keys; - foreach (string key in keys) - { - List roms = depot[key]; - foreach (Rom rom in roms) - { - if (hashes.Contains(rom.SHA1)) - { - dupehashes.Add(rom.SHA1); - hashes.Remove(rom.SHA1); - } - else if (!dupehashes.Contains(rom.SHA1)) - { - if (!String.IsNullOrWhiteSpace(rom.CRC)) - { - crcquery += " (\"" + rom.CRC + "\"),"; - } - if (!String.IsNullOrWhiteSpace(rom.MD5)) - { - md5query += " (\"" + rom.MD5 + "\"),"; - } - if (!String.IsNullOrWhiteSpace(rom.SHA1)) - { - sha1query += " (\"" + rom.SHA1 + "\", \"" + depotname + "\"),"; - - if (!String.IsNullOrWhiteSpace(rom.CRC)) - { - crcsha1query += " (\"" + rom.CRC + "\", \"" + rom.SHA1 + "\"),"; - } - if (!String.IsNullOrWhiteSpace(rom.MD5)) - { - md5sha1query += " (\"" + rom.MD5 + "\", \"" + rom.SHA1 + "\"),"; - } - } - } - } - } - - // Now run the queries after fixing them - if (crcquery != "INSERT OR IGNORE INTO crc (crc) VALUES") - { - slc = new SqliteCommand(crcquery.TrimEnd(','), dbc); - slc.ExecuteNonQuery(); - } - if (md5query != "INSERT OR IGNORE INTO md5 (md5) VALUES") - { - slc = new SqliteCommand(md5query.TrimEnd(','), dbc); - slc.ExecuteNonQuery(); - } - if (sha1query != "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES") - { - slc = new SqliteCommand(sha1query.TrimEnd(','), dbc); - slc.ExecuteNonQuery(); - } - if (crcsha1query != "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES") - { - slc = new SqliteCommand(crcsha1query.TrimEnd(','), dbc); - slc.ExecuteNonQuery(); - } - if (md5sha1query != "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES") - { - slc = new SqliteCommand(md5sha1query.TrimEnd(','), dbc); - slc.ExecuteNonQuery(); - } - - // Now that we've added the information, we get to remove all of the hashes that we want to - query = @"DELETE FROM sha1 + public partial class RombaSharp + { + #region Init Methods + + /// + /// Wrap adding files to the depots + /// + /// List of input folders to use + /// True if only files in the database and don't exist are added, false otherwise + /// Resume a previously interrupted operation from the specified path + /// flag value == 0 means: add Zip files themselves into the depot in addition to their contents, flag value == 2 means add Zip files themselves but don't add content + /// How many workers to launch for the job, default from config + /// flag value == 0 means: add GZip files themselves into the depot in addition to their contents, flag value == 2 means add GZip files themselves but don't add content + /// flag value == 0 means: add 7Zip files themselves into the depot in addition to their contents, flag value == 2 means add 7Zip files themselves but don't add content + /// True to skip the initial scan of the files to determine amount of work, false otherwise + /// True to use go zip implementation instead of zlib, false otherwise + /// True to archive into depot but do not touch DB index and ignore only-needed flag, false otherwise + /// TODO: Add ability to update .romba files with proper size AND use the correct depot if it fills up + /// TODO: Add ability correctly to mark which depot the files are being rebuilt to in the DB + private static void InitArchive( + List inputs, + bool onlyNeeded, + string resume, + int includeZips, + int workers, + int includeGZips, + int include7Zips, + bool skipInitialScan, + bool useGolangZip, // Obsolete + bool noDb) + { + // First we want to get just all directories from the inputs + List onlyDirs = new List(); + foreach (string input in inputs) + { + if (Directory.Exists(input)) + { + onlyDirs.Add(Path.GetFullPath(input)); + } + } + + // Then process all of the input directories into an internal DAT + DatFile df = new DatFile(); + foreach (string dir in onlyDirs) + { + // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually + df.PopulateFromDir(dir, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, null, true, null); + df.PopulateFromDir(dir, Hash.DeepHashes, false, true, SkipFileType.None, false, false, _tmpdir, false, null, true, null); + } + + // Create an empty Dat for files that need to be rebuilt + DatFile need = new DatFile(); + + // Open the database connection + SqliteConnection dbc = new SqliteConnection(_connectionString); + dbc.Open(); + + // Now that we have the Dats, add the files to the database + string crcquery = "INSERT OR IGNORE INTO crc (crc) VALUES"; + string md5query = "INSERT OR IGNORE INTO md5 (md5) VALUES"; + string sha1query = "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES"; + string crcsha1query = "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES"; + string md5sha1query = "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES"; + + foreach (string key in df.Keys) + { + List datItems = df[key]; + foreach (Rom rom in datItems) + { + // If we care about if the file exists, check the databse first + if (onlyNeeded && !noDb) + { + string query = "SELECT * FROM crcsha1 JOIN md5sha1 ON crcsha1.sha1=md5sha1.sha1" + + " WHERE crcsha1.crc=\"" + rom.CRC + "\"" + + " OR md5sha1.md5=\"" + rom.MD5 + "\"" + + " OR md5sha1.sha1=\"" + rom.SHA1 + "\""; + SqliteCommand slc = new SqliteCommand(query, dbc); + SqliteDataReader sldr = slc.ExecuteReader(); + + if (sldr.HasRows) + { + // Add to the queries + if (!String.IsNullOrWhiteSpace(rom.CRC)) + { + crcquery += " (\"" + rom.CRC + "\"),"; + } + if (!String.IsNullOrWhiteSpace(rom.MD5)) + { + md5query += " (\"" + rom.MD5 + "\"),"; + } + if (!String.IsNullOrWhiteSpace(rom.SHA1)) + { + sha1query += " (\"" + rom.SHA1 + "\", \"" + _depots.Keys.ToList()[0] + "\"),"; + + if (!String.IsNullOrWhiteSpace(rom.CRC)) + { + crcsha1query += " (\"" + rom.CRC + "\", \"" + rom.SHA1 + "\"),"; + } + if (!String.IsNullOrWhiteSpace(rom.MD5)) + { + md5sha1query += " (\"" + rom.MD5 + "\", \"" + rom.SHA1 + "\"),"; + } + } + + // Add to the Dat + need.Add(key, rom); + } + } + // Otherwise, just add the file to the list + else + { + // Add to the queries + if (!noDb) + { + if (!String.IsNullOrWhiteSpace(rom.CRC)) + { + crcquery += " (\"" + rom.CRC + "\"),"; + } + if (!String.IsNullOrWhiteSpace(rom.MD5)) + { + md5query += " (\"" + rom.MD5 + "\"),"; + } + if (!String.IsNullOrWhiteSpace(rom.SHA1)) + { + sha1query += " (\"" + rom.SHA1 + "\", \"" + _depots.Keys.ToList()[0] + "\"),"; + + if (!String.IsNullOrWhiteSpace(rom.CRC)) + { + crcsha1query += " (\"" + rom.CRC + "\", \"" + rom.SHA1 + "\"),"; + } + if (!String.IsNullOrWhiteSpace(rom.MD5)) + { + md5sha1query += " (\"" + rom.MD5 + "\", \"" + rom.SHA1 + "\"),"; + } + } + } + + // Add to the Dat + need.Add(key, rom); + } + } + } + + // Now run the queries, if they're populated + if (crcquery != "INSERT OR IGNORE INTO crc (crc) VALUES") + { + SqliteCommand slc = new SqliteCommand(crcquery.TrimEnd(','), dbc); + slc.ExecuteNonQuery(); + slc.Dispose(); + } + if (md5query != "INSERT OR IGNORE INTO md5 (md5) VALUES") + { + SqliteCommand slc = new SqliteCommand(md5query.TrimEnd(','), dbc); + slc.ExecuteNonQuery(); + slc.Dispose(); + } + if (sha1query != "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES") + { + SqliteCommand slc = new SqliteCommand(sha1query.TrimEnd(','), dbc); + slc.ExecuteNonQuery(); + slc.Dispose(); + } + if (crcsha1query != "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES") + { + SqliteCommand slc = new SqliteCommand(crcsha1query.TrimEnd(','), dbc); + slc.ExecuteNonQuery(); + slc.Dispose(); + } + if (md5sha1query != "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES") + { + SqliteCommand slc = new SqliteCommand(md5sha1query.TrimEnd(','), dbc); + slc.ExecuteNonQuery(); + slc.Dispose(); + } + + // Create the sorting object to use and rebuild the needed files + ArchiveScanLevel asl = Utilities.GetArchiveScanLevelFromNumbers(include7Zips, includeGZips, 2, includeZips); + need.RebuildGeneric(onlyDirs, _depots.Keys.ToList()[0], false /*quickScan*/, false /*date*/, + false /*delete*/, false /*inverse*/, OutputFormat.TorrentGzip, true /*romba*/, asl, false /*updateDat*/, + null /*headerToCheckAgainst*/, true /* chdsAsFiles */); + } + + /// + /// Wrap building all files from a set of DATs + /// + /// List of input DATs to rebuild from + /// Output file + /// True to only fix dats and don't generate torrentzips, false otherwise + /// True if files should be copied to output, false for rebuild + /// How many workers to launch for the job, default from config + /// How many subworkers to launch for each worker, default from config + private static void InitBuild( + List inputs, + string outdat, + bool fixdatOnly, + bool copy, + int workers, + int subworkers) + { + // Verify the filenames + Dictionary foundDats = GetValidDats(inputs); + + // Ensure the output directory is set + if (String.IsNullOrWhiteSpace(outdat)) + { + outdat = "out"; + } + + // Now that we have the dictionary, we can loop through and output to a new folder for each + foreach (string key in foundDats.Keys) + { + // Get the DAT file associated with the key + DatFile datFile = new DatFile(); + datFile.Parse(Path.Combine(_dats, foundDats[key]), 0, 0); + + // Create the new output directory if it doesn't exist + string outputFolder = Path.Combine(outdat, Path.GetFileNameWithoutExtension(foundDats[key])); + Utilities.EnsureOutputDirectory(outputFolder, create: true); + + // Get all online depots + List onlineDepots = _depots.Where(d => d.Value.Item2).Select(d => d.Key).ToList(); + + // Now scan all of those depots and rebuild + ArchiveScanLevel asl = Utilities.GetArchiveScanLevelFromNumbers(1, 1, 1, 1); + datFile.RebuildDepot(onlineDepots, outputFolder, false /*date*/, + false /*delete*/, false /*inverse*/, (copy ? OutputFormat.TorrentGzip : OutputFormat.TorrentZip), copy, + false /*updateDat*/, null /*headerToCheckAgainst*/); + } + } + + /// + /// Wrap cancelling a long-running job + /// + /// TODO: Implement + private static void InitCancel() + { + Globals.Logger.User("This feature is not yet implemented: cancel"); + } + + /// + /// Wrap printing dat stats + /// + /// List of input DATs to get stats from + private static void InitDatStats(List inputs) + { + // If we have no inputs listed, we want to use datroot + if (inputs == null || inputs.Count == 0) + { + inputs = new List(); + inputs.Add(Path.GetFullPath(_dats)); + } + + // Now output the stats for all inputs + DatFile.OutputStats(inputs, "rombasharp-datstats", null /* outDir */, true /* single */, true /* baddumpCol */, true /* nodumpCol */, StatReportFormat.Textfile); + } + + /// + /// Wrap printing db stats + /// + private static void InitDbStats() + { + SqliteConnection dbc = new SqliteConnection(_connectionString); + dbc.Open(); + + // Total number of CRCs + string query = "SELECT COUNT(*) FROM crc"; + SqliteCommand slc = new SqliteCommand(query, dbc); + Globals.Logger.User("Total CRCs: {0}", (long)slc.ExecuteScalar()); + + // Total number of MD5s + query = "SELECT COUNT(*) FROM md5"; + slc = new SqliteCommand(query, dbc); + Globals.Logger.User("Total MD5s: {0}", (long)slc.ExecuteScalar()); + + // Total number of SHA1s + query = "SELECT COUNT(*) FROM sha1"; + slc = new SqliteCommand(query, dbc); + Globals.Logger.User("Total SHA1s: {0}", (long)slc.ExecuteScalar()); + + // Total number of DATs + query = "SELECT COUNT(*) FROM dat"; + slc = new SqliteCommand(query, dbc); + Globals.Logger.User("Total DATs: {0}", (long)slc.ExecuteScalar()); + + slc.Dispose(); + dbc.Dispose(); + } + + /// + /// Wrap creating a diffdat for a given old and new dat + /// + /// Output file + /// Old DAT file + /// New DAT file + /// Name value in DAT header + /// Description value in DAT header + private static void InitDiffDat( + string outdat, + string old, + string newdat, + string name, + string description) + { + // Ensure the output directory + Utilities.EnsureOutputDirectory(outdat, create: true); + + // Check that all required files exist + if (!File.Exists(old)) + { + Globals.Logger.Error("File '{0}' does not exist!", old); + return; + } + if (!File.Exists(newdat)) + { + Globals.Logger.Error("File '{0}' does not exist!", newdat); + return; + } + + // Create the encapsulating datfile + DatFile datfile = new DatFile() + { + Name = name, + Description = description, + }; + + // Create the inputs + List dats = new List(); + dats.Add(newdat); + List basedats = new List(); + basedats.Add(old); + + // Now run the diff on the inputs + datfile.DetermineUpdateType(dats, basedats, outdat, UpdateMode.DiffAgainst, false /* inplace */, false /* skip */, + false /* clean */, false /* remUnicode */, false /* descAsName */, new Filter(), SplitType.None, + ReplaceMode.None, false /* onlySame */); + } + + /// + /// Wrap creating a dir2dat from a given source + /// + /// Output file + /// Source directory + /// Name value in DAT header + /// Description value in DAT header + private static void InitDir2Dat( + string outdat, + string source, + string name, + string description) + { + // Ensure the output directory + Utilities.EnsureOutputDirectory(outdat, create: true); + + // Check that all required directories exist + if (!Directory.Exists(source)) + { + Globals.Logger.Error("File '{0}' does not exist!", source); + return; + } + + // Create the encapsulating datfile + DatFile datfile = new DatFile() + { + Name = (String.IsNullOrWhiteSpace(name) ? "untitled" : name), + Description = description, + }; + + // Now run the D2D on the input and write out + // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually + datfile.PopulateFromDir(source, Hash.DeepHashes, true /* bare */, false /* archivesAsFiles */, SkipFileType.None, false /* addBlanks */, + false /* addDate */, _tmpdir, false /* copyFiles */, null /* headerToCheckAgainst */, true /* chdsAsFiles */, null /* filter */); + datfile.Write(outDir: outdat); + } + + /// + /// Wrap creating a diffdat for a given old and new dat + /// + /// Output file + /// Old DAT file + /// New DAT file + private static void InitEDiffDat( + string outdat, + string old, + string newdat) + { + // Ensure the output directory + Utilities.EnsureOutputDirectory(outdat, create: true); + + // Check that all required files exist + if (!File.Exists(old)) + { + Globals.Logger.Error("File '{0}' does not exist!", old); + return; + } + if (!File.Exists(newdat)) + { + Globals.Logger.Error("File '{0}' does not exist!", newdat); + return; + } + + // Create the encapsulating datfile + DatFile datfile = new DatFile(); + + // Create the inputs + List dats = new List(); + dats.Add(newdat); + List basedats = new List(); + basedats.Add(old); + + // Now run the diff on the inputs + datfile.DetermineUpdateType(dats, basedats, outdat, UpdateMode.DiffAgainst, false /* inplace */, false /* skip */, + false /* clean */, false /* remUnicode */, false /* descAsName */, new Filter(), SplitType.None, + ReplaceMode.None, false /* onlySame */); + } + + /// + /// Wrap exporting the database to CSV + /// + /// TODO: Add ability to say which depot the files are found in + private static void InitExport() + { + SqliteConnection dbc = new SqliteConnection(_connectionString); + dbc.Open(); + StreamWriter sw = new StreamWriter(Utilities.TryCreate("export.csv")); + + // First take care of all file hashes + sw.WriteLine("CRC,MD5,SHA-1"); // ,Depot + + string query = "SELECT crcsha1.crc, md5sha1.md5, md5sha1.sha1 FROM crcsha1 JOIN md5sha1 ON crcsha1.sha1=md5sha1.sha1"; // md5sha1.sha1=sha1depot.sha1 + SqliteCommand slc = new SqliteCommand(query, dbc); + SqliteDataReader sldr = slc.ExecuteReader(); + + if (sldr.HasRows) + { + while (sldr.Read()) + { + string line = sldr.GetString(0) + "," + + sldr.GetString(1) + "," + + sldr.GetString(2); // + "," + // + sldr.GetString(3); + sw.WriteLine(line); + } + } + + // Then take care of all DAT hashes + sw.WriteLine(); + sw.WriteLine("DAT Hash"); + + query = "SELECT hash FROM dat"; + slc = new SqliteCommand(query, dbc); + sldr = slc.ExecuteReader(); + + if (sldr.HasRows) + { + while (sldr.Read()) + { + sw.WriteLine(sldr.GetString(0)); + } + } + + sldr.Dispose(); + slc.Dispose(); + sw.Dispose(); + dbc.Dispose(); + } + + /// + /// Wrap creating a fixdat for each Dat + /// + /// List of input DATs to get fixdats for + /// Output directory + /// True to only fix dats and don't generate torrentzips, false otherwise + /// How many workers to launch for the job, default from config + /// How many subworkers to launch for each worker, default from config + /// TODO: Implement + private static void InitFixdat( + List inputs, + string outdat, + bool fixdatOnly, + int workers, + int subworkers) + { + Globals.Logger.Error("This feature is not yet implemented: fixdat"); + } + + /// + /// Wrap importing CSVs into the database + /// + /// List of input CSV files to import information from + private static void InitImport(List inputs) + { + Globals.Logger.Error("This feature is not yet implemented: import"); + + // First ensure the inputs and database connection + inputs = Utilities.GetOnlyFilesFromInputs(inputs); + SqliteConnection dbc = new SqliteConnection(_connectionString); + SqliteCommand slc = new SqliteCommand(); + dbc.Open(); + + // Now, for each of these files, attempt to add the data found inside + foreach (string input in inputs) + { + StreamReader sr = new StreamReader(Utilities.TryOpenRead(input)); + + // The first line should be the hash header + string line = sr.ReadLine(); + if (line != "CRC,MD5,SHA-1") // ,Depot + { + Globals.Logger.Error("{0} is not a valid export file"); + continue; + } + + // Define the insert queries + string crcquery = "INSERT OR IGNORE INTO crc (crc) VALUES"; + string md5query = "INSERT OR IGNORE INTO md5 (md5) VALUES"; + string sha1query = "INSERT OR IGNORE INTO sha1 (sha1) VALUES"; + string crcsha1query = "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES"; + string md5sha1query = "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES"; + + // For each line until we hit a blank line... + while (!sr.EndOfStream && line != "") + { + line = sr.ReadLine(); + string[] hashes = line.Split(','); + + // Loop through the parsed entries + if (!String.IsNullOrWhiteSpace(hashes[0])) + { + crcquery += " (\"" + hashes[0] + "\"),"; + } + if (!String.IsNullOrWhiteSpace(hashes[1])) + { + md5query += " (\"" + hashes[1] + "\"),"; + } + if (!String.IsNullOrWhiteSpace(hashes[2])) + { + sha1query += " (\"" + hashes[2] + "\"),"; + + if (!String.IsNullOrWhiteSpace(hashes[0])) + { + crcsha1query += " (\"" + hashes[0] + "\", \"" + hashes[2] + "\"),"; + } + if (!String.IsNullOrWhiteSpace(hashes[1])) + { + md5sha1query += " (\"" + hashes[1] + "\", \"" + hashes[2] + "\"),"; + } + } + } + + // Now run the queries after fixing them + if (crcquery != "INSERT OR IGNORE INTO crc (crc) VALUES") + { + slc = new SqliteCommand(crcquery.TrimEnd(','), dbc); + slc.ExecuteNonQuery(); + } + if (md5query != "INSERT OR IGNORE INTO md5 (md5) VALUES") + { + slc = new SqliteCommand(md5query.TrimEnd(','), dbc); + slc.ExecuteNonQuery(); + } + if (sha1query != "INSERT OR IGNORE INTO sha1 (sha1) VALUES") + { + slc = new SqliteCommand(sha1query.TrimEnd(','), dbc); + slc.ExecuteNonQuery(); + } + if (crcsha1query != "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES") + { + slc = new SqliteCommand(crcsha1query.TrimEnd(','), dbc); + slc.ExecuteNonQuery(); + } + if (md5sha1query != "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES") + { + slc = new SqliteCommand(md5sha1query.TrimEnd(','), dbc); + slc.ExecuteNonQuery(); + } + + // Now add all of the DAT hashes + // TODO: Do we really need to save the DAT hashes? + + sr.Dispose(); + } + + slc.Dispose(); + dbc.Dispose(); + } + + /// + /// Wrap looking up if hashes exist in the database + /// + /// List of input strings representing hashes to check for + /// Size to limit hash by, -1 otherwise + /// Output directory + private static void InitLookup( + List inputs, + long size, + string outdat) + { + // First, try to figure out what type of hash each is by length and clean it + List crc = new List(); + List md5 = new List(); + List sha1 = new List(); + foreach (string input in inputs) + { + string temp = ""; + if (input.Length == Constants.CRCLength) + { + temp = Utilities.CleanHashData(input, Constants.CRCLength); + if (!String.IsNullOrWhiteSpace(temp)) + { + crc.Add(temp); + } + } + else if (input.Length == Constants.MD5Length) + { + temp = Utilities.CleanHashData(input, Constants.MD5Length); + if (!String.IsNullOrWhiteSpace(temp)) + { + md5.Add(temp); + } + } + else if (input.Length == Constants.SHA1Length) + { + temp = Utilities.CleanHashData(input, Constants.SHA1Length); + if (!String.IsNullOrWhiteSpace(temp)) + { + sha1.Add(temp); + } + } + } + + SqliteConnection dbc = new SqliteConnection(_connectionString); + dbc.Open(); + + // Now, search for each of them and return true or false for each + foreach (string input in crc) + { + string query = "SELECT * FROM crc WHERE crc=\"" + input + "\""; + SqliteCommand slc = new SqliteCommand(query, dbc); + SqliteDataReader sldr = slc.ExecuteReader(); + if (sldr.HasRows) + { + int count = 0; + while (sldr.Read()) + { + count++; + } + + Globals.Logger.User("For hash '{0}' there were {1} matches in the database", input, count); + } + else + { + Globals.Logger.User("Hash '{0}' had no matches in the database", input); + } + + sldr.Dispose(); + slc.Dispose(); + } + foreach (string input in md5) + { + string query = "SELECT * FROM md5 WHERE md5=\"" + input + "\""; + SqliteCommand slc = new SqliteCommand(query, dbc); + SqliteDataReader sldr = slc.ExecuteReader(); + if (sldr.HasRows) + { + int count = 0; + while (sldr.Read()) + { + count++; + } + + Globals.Logger.User("For hash '{0}' there were {1} matches in the database", input, count); + } + else + { + Globals.Logger.User("Hash '{0}' had no matches in the database", input); + } + + sldr.Dispose(); + slc.Dispose(); + } + foreach (string input in sha1) + { + string query = "SELECT * FROM sha1 WHERE sha1=\"" + input + "\""; + SqliteCommand slc = new SqliteCommand(query, dbc); + SqliteDataReader sldr = slc.ExecuteReader(); + if (sldr.HasRows) + { + int count = 0; + while (sldr.Read()) + { + count++; + } + + Globals.Logger.User("For hash '{0}' there were {1} matches in the database", input, count); + } + else + { + Globals.Logger.User("Hash '{0}' had no matches in the database", input); + } + + sldr.Dispose(); + slc.Dispose(); + } + + dbc.Dispose(); + } + + /// + /// Wrap printing memory stats + /// + /// TODO: Implement + private static void InitMemstats() + { + Globals.Logger.User("This feature is not yet implemented: memstats"); + } + + /// + /// Wrap merging an external depot into an existing one + /// + /// List of input depots to merge in + /// True if only files in the database and don't exist are added, false otherwise + /// Resume a previously interrupted operation from the specified path + /// How many workers to launch for the job, default from config + /// True to skip the initial scan of the files to determine amount of work, false otherwise + /// TODO: Add way of specifying "current depot" since that's what Romba relies on + /// TODO: Implement + private static void InitMerge( + List inputs, + bool onlyNeeded, + string resume, + int workers, + bool skipInitialscan) + { + Globals.Logger.Error("This feature is not yet implemented: merge"); + + // Verify that the inputs are valid directories + inputs = Utilities.GetOnlyDirectoriesFromInputs(inputs); + + // Loop over all input directories + foreach (string input in inputs) + { + List depotFiles = Directory.EnumerateFiles(input, "*.gz", SearchOption.AllDirectories).ToList(); + + // If we are copying all that is possible but we want to scan first + if (!onlyNeeded && !skipInitialscan) + { + + } + // If we are copying all that is possible but we don't care to scan first + else if (!onlyNeeded && skipInitialscan) + { + + } + // If we are copying only what is needed but we want to scan first + else if (onlyNeeded && !skipInitialscan) + { + + } + // If we are copying only what is needed but we don't care to scan first + else if (onlyNeeded && skipInitialscan) + { + + } + } + } + + /// + /// Wrap creating a havefile and a missfile for each Dat + /// + /// List of DAT files to get a miss and have for, empty means all + /// TODO: Implement + private static void InitMiss(List inputs) + { + // Verify the filenames + Dictionary foundDats = GetValidDats(inputs); + + // Create the new output directory if it doesn't exist + Utilities.EnsureOutputDirectory(Path.Combine(Globals.ExeDir, "out"), create: true); + + // Now that we have the dictionary, we can loop through and output to a new folder for each + foreach (string key in foundDats.Keys) + { + // Get the DAT file associated with the key + DatFile datFile = new DatFile(); + datFile.Parse(Path.Combine(_dats, foundDats[key]), 0, 0); + + // Now loop through and see if all of the hash combinations exist in the database + /* ended here */ + } + + Globals.Logger.Error("This feature is not yet implemented: miss"); + } + + /// + /// Wrap showing progress of currently running command + /// + /// TODO: Implement + private static void InitProgress() + { + Globals.Logger.User("This feature is not yet implemented: progress"); + } + + /// + /// Wrap backing up of no longer needed files from the depots + /// + /// Backup directory where backup files are moved to + /// How many workers to launch for the job, default from config + /// List of depots to scan files in, empty means all + /// List of DATs to use as the basis of scanning, empty means all + /// True if only the output of the operation is shown, false to actually run + /// TODO: Implement + private static void InitPurgeBackup( + string backup, + int workers, + List depot, + List dats, + bool logOnly) + { + Globals.Logger.Error("This feature is not yet implemented: purge-backup"); + } + + /// + /// Wrap deleting of no longer needed files from the depots + /// + /// How many workers to launch for the job, default from config + /// List of depots to scan files in, empty means all + /// List of DATs to use as the basis of scanning, empty means all + /// True if only the output of the operation is shown, false to actually run + /// TODO: Implement + private static void InitPurgeDelete( + int workers, + List depot, + List dats, + bool logOnly) + { + Globals.Logger.Error("This feature is not yet implemented: purge-delete"); + } + + /// + /// Wrap refreshing the database with potentially new dats + /// + /// How many workers to launch for the job, default from config + /// Write paths of dats with missing sha1s into this file + private static void InitRefreshDats( + int workers, + string missingSha1s) + { + // Make sure the db is set + if (String.IsNullOrWhiteSpace(_db)) + { + _db = "db.sqlite"; + _connectionString = "Data Source=" + _db + ";Version = 3;"; + } + + // Make sure the file exists + if (!File.Exists(_db)) + { + DatabaseTools.EnsureDatabase(_dbSchema, _db, _connectionString); + } + + // Make sure the dats dir is set + if (String.IsNullOrWhiteSpace(_dats)) + { + _dats = "dats"; + } + + _dats = Path.Combine(Globals.ExeDir, _dats); + + // Make sure the folder exists + if (!Directory.Exists(_dats)) + { + Directory.CreateDirectory(_dats); + } + + // First get a list of SHA-1's from the input DATs + DatFile datroot = new DatFile { Type = "SuperDAT", }; + // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually + datroot.PopulateFromDir(_dats, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, null, true, null); + datroot.BucketBy(SortedBy.SHA1, DedupeType.None); + + // Create a List of dat hashes in the database (SHA-1) + List databaseDats = new List(); + List unneeded = new List(); + + SqliteConnection dbc = new SqliteConnection(_connectionString); + dbc.Open(); + + // Populate the List from the database + InternalStopwatch watch = new InternalStopwatch("Populating the list of existing DATs"); + + string query = "SELECT DISTINCT hash FROM dat"; + SqliteCommand slc = new SqliteCommand(query, dbc); + SqliteDataReader sldr = slc.ExecuteReader(); + if (sldr.HasRows) + { + sldr.Read(); + string hash = sldr.GetString(0); + if (datroot.Contains(hash)) + { + datroot.Remove(hash); + databaseDats.Add(hash); + } + else if (!databaseDats.Contains(hash)) + { + unneeded.Add(hash); + } + } + datroot.BucketBy(SortedBy.Game, DedupeType.None, norename: true); + + watch.Stop(); + + slc.Dispose(); + sldr.Dispose(); + + // Loop through the Dictionary and add all data + watch.Start("Adding new DAT information"); + foreach (string key in datroot.Keys) + { + foreach (Rom value in datroot[key]) + { + AddDatToDatabase(value, dbc); + } + } + + watch.Stop(); + + // Now loop through and remove all references to old Dats + if (unneeded.Count > 0) + { + watch.Start("Removing unmatched DAT information"); + + query = "DELETE FROM dat WHERE"; + foreach (string dathash in unneeded) + { + query += " OR hash=\"" + dathash + "\""; + } + + query = query.Replace("WHERE OR", "WHERE"); + slc = new SqliteCommand(query, dbc); + slc.ExecuteNonQuery(); + slc.Dispose(); + + watch.Stop(); + } + + dbc.Dispose(); + } + + /// + /// Wrap rescanning depots + /// + /// List of depots to rescan, empty means all + /// TODO: Verify implementation + private static void InitRescanDepots(List inputs) + { + Globals.Logger.Error("This feature is not yet implemented: rescan-depots"); + + foreach (string depotname in inputs) + { + // Check that it's a valid depot first + if (!_depots.ContainsKey(depotname)) + { + Globals.Logger.User("'{0}' is not a recognized depot. Please add it to your configuration file and try again", depotname); + return; + } + + // Then check that the depot is online + if (!Directory.Exists(depotname)) + { + Globals.Logger.User("'{0}' does not appear to be online. Please check its status and try again", depotname); + return; + } + + // Open the database connection + SqliteConnection dbc = new SqliteConnection(_connectionString); + dbc.Open(); + + // If we have it, then check for all hashes that are in that depot + List hashes = new List(); + string query = "SELECT sha1 FROM sha1 WHERE depot=\"" + depotname + "\""; + SqliteCommand slc = new SqliteCommand(query, dbc); + SqliteDataReader sldr = slc.ExecuteReader(); + if (sldr.HasRows) + { + while (sldr.Read()) + { + hashes.Add(sldr.GetString(0)); + } + } + + // Now rescan the depot itself + DatFile depot = new DatFile(); + // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually + depot.PopulateFromDir(depotname, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, null, true, null); + depot.BucketBy(SortedBy.SHA1, DedupeType.None); + + // Set the base queries to use + string crcquery = "INSERT OR IGNORE INTO crc (crc) VALUES"; + string md5query = "INSERT OR IGNORE INTO md5 (md5) VALUES"; + string sha1query = "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES"; + string crcsha1query = "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES"; + string md5sha1query = "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES"; + + // Once we have both, check for any new files + List dupehashes = new List(); + List keys = depot.Keys; + foreach (string key in keys) + { + List roms = depot[key]; + foreach (Rom rom in roms) + { + if (hashes.Contains(rom.SHA1)) + { + dupehashes.Add(rom.SHA1); + hashes.Remove(rom.SHA1); + } + else if (!dupehashes.Contains(rom.SHA1)) + { + if (!String.IsNullOrWhiteSpace(rom.CRC)) + { + crcquery += " (\"" + rom.CRC + "\"),"; + } + if (!String.IsNullOrWhiteSpace(rom.MD5)) + { + md5query += " (\"" + rom.MD5 + "\"),"; + } + if (!String.IsNullOrWhiteSpace(rom.SHA1)) + { + sha1query += " (\"" + rom.SHA1 + "\", \"" + depotname + "\"),"; + + if (!String.IsNullOrWhiteSpace(rom.CRC)) + { + crcsha1query += " (\"" + rom.CRC + "\", \"" + rom.SHA1 + "\"),"; + } + if (!String.IsNullOrWhiteSpace(rom.MD5)) + { + md5sha1query += " (\"" + rom.MD5 + "\", \"" + rom.SHA1 + "\"),"; + } + } + } + } + } + + // Now run the queries after fixing them + if (crcquery != "INSERT OR IGNORE INTO crc (crc) VALUES") + { + slc = new SqliteCommand(crcquery.TrimEnd(','), dbc); + slc.ExecuteNonQuery(); + } + if (md5query != "INSERT OR IGNORE INTO md5 (md5) VALUES") + { + slc = new SqliteCommand(md5query.TrimEnd(','), dbc); + slc.ExecuteNonQuery(); + } + if (sha1query != "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES") + { + slc = new SqliteCommand(sha1query.TrimEnd(','), dbc); + slc.ExecuteNonQuery(); + } + if (crcsha1query != "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES") + { + slc = new SqliteCommand(crcsha1query.TrimEnd(','), dbc); + slc.ExecuteNonQuery(); + } + if (md5sha1query != "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES") + { + slc = new SqliteCommand(md5sha1query.TrimEnd(','), dbc); + slc.ExecuteNonQuery(); + } + + // Now that we've added the information, we get to remove all of the hashes that we want to + query = @"DELETE FROM sha1 JOIN crcsha1 - ON sha1.sha1=crcsha1.sha1 + ON sha1.sha1=crcsha1.sha1 JOIN md5sha1 - ON sha1.sha1=md5sha1.sha1 + ON sha1.sha1=md5sha1.sha1 JOIN crc - ON crcsha1.crc=crc.crc + ON crcsha1.crc=crc.crc JOIN md5 - ON md5sha1.md5=md5.md5 + ON md5sha1.md5=md5.md5 WHERE sha1.sha1 IN (""" + String.Join("\",\"", hashes) + "\")"; - slc = new SqliteCommand(query, dbc); - slc.ExecuteNonQuery(); + slc = new SqliteCommand(query, dbc); + slc.ExecuteNonQuery(); - // Dispose of the database connection - slc.Dispose(); - dbc.Dispose(); - } - } + // Dispose of the database connection + slc.Dispose(); + dbc.Dispose(); + } + } - /// - /// Wrap gracefully shutting down the server - /// - /// TODO: Implement - private static void InitShutdown() - { - Globals.Logger.User("This feature is not yet implemented: shutdown"); - } + /// + /// Wrap gracefully shutting down the server + /// + /// TODO: Implement + private static void InitShutdown() + { + Globals.Logger.User("This feature is not yet implemented: shutdown"); + } - /// - /// Wrap printing the version - /// - private static void InitVersion() - { - Globals.Logger.User("RombaSharp version: {0}", Constants.Version); - } + /// + /// Wrap printing the version + /// + private static void InitVersion() + { + Globals.Logger.User("RombaSharp version: {0}", Constants.Version); + } - #endregion - } + #endregion + } } diff --git a/RombaSharp/RombaSharp.cs b/RombaSharp/RombaSharp.cs index 148b0a2d..56daafb3 100644 --- a/RombaSharp/RombaSharp.cs +++ b/RombaSharp/RombaSharp.cs @@ -14,389 +14,389 @@ using Alphaleonis.Win32.Filesystem; namespace RombaSharp { - /// - /// Entry class for the RombaSharp application - /// - /// - /// In the database, we want to enable "offline mode". That is, when a user does an operation - /// that needs to read from the depot themselves, if the depot folder cannot be found, the - /// user is prompted to reconnect the depot OR skip that depot entirely. - /// - public partial class RombaSharp - { - // General settings - private static string _logdir; // Log folder location - private static string _tmpdir; // Temp folder location - private static string _webdir; // Web frontend location - private static string _baddir; // Fail-to-unpack file folder location - private static int _verbosity; // Verbosity of the output - private static int _cores; // Forced CPU cores + /// + /// Entry class for the RombaSharp application + /// + /// + /// In the database, we want to enable "offline mode". That is, when a user does an operation + /// that needs to read from the depot themselves, if the depot folder cannot be found, the + /// user is prompted to reconnect the depot OR skip that depot entirely. + /// + public partial class RombaSharp + { + // General settings + private static string _logdir; // Log folder location + private static string _tmpdir; // Temp folder location + private static string _webdir; // Web frontend location + private static string _baddir; // Fail-to-unpack file folder location + private static int _verbosity; // Verbosity of the output + private static int _cores; // Forced CPU cores - // DatRoot settings - private static string _dats; // DatRoot folder location - private static string _db; // Database name + // DatRoot settings + private static string _dats; // DatRoot folder location + private static string _db; // Database name - // Depot settings - private static Dictionary> _depots; // Folder location, Max size + // Depot settings + private static Dictionary> _depots; // Folder location, Max size - // Server settings - private static int _port; // Web server port + // Server settings + private static int _port; // Web server port - // Other private variables - private static string _config = "config.xml"; - private static string _dbSchema = "rombasharp"; - private static string _connectionString; - private static Help _help; + // Other private variables + private static string _config = "config.xml"; + private static string _dbSchema = "rombasharp"; + private static string _connectionString; + private static Help _help; - /// - /// Entry class for the RombaSharp application - /// - public static void Main(string[] args) - { - // Perform initial setup and verification - Globals.Logger = new Logger(true, "romba.log"); + /// + /// Entry class for the RombaSharp application + /// + public static void Main(string[] args) + { + // Perform initial setup and verification + Globals.Logger = new Logger(true, "romba.log"); - InitializeConfiguration(); - DatabaseTools.EnsureDatabase(_dbSchema, _db, _connectionString); + InitializeConfiguration(); + DatabaseTools.EnsureDatabase(_dbSchema, _db, _connectionString); - // Create a new Help object for this program - _help = RombaSharp.RetrieveHelp(); + // Create a new Help object for this program + _help = RombaSharp.RetrieveHelp(); - // Get the location of the script tag, if it exists - int scriptLocation = (new List(args)).IndexOf("--script"); + // Get the location of the script tag, if it exists + int scriptLocation = (new List(args)).IndexOf("--script"); - // If output is being redirected or we are in script mode, don't allow clear screens - if (!Console.IsOutputRedirected && scriptLocation == -1) - { - Console.Clear(); - Build.PrepareConsole("RombaSharp"); - } + // If output is being redirected or we are in script mode, don't allow clear screens + if (!Console.IsOutputRedirected && scriptLocation == -1) + { + Console.Clear(); + Build.PrepareConsole("RombaSharp"); + } - // Now we remove the script tag because it messes things up - if (scriptLocation > -1) - { - List newargs = new List(args); - newargs.RemoveAt(scriptLocation); - args = newargs.ToArray(); - } + // Now we remove the script tag because it messes things up + if (scriptLocation > -1) + { + List newargs = new List(args); + newargs.RemoveAt(scriptLocation); + args = newargs.ToArray(); + } - // Credits take precidence over all - if ((new List(args)).Contains("--credits")) - { - _help.OutputCredits(); - Globals.Logger.Close(); - return; - } + // Credits take precidence over all + if ((new List(args)).Contains("--credits")) + { + _help.OutputCredits(); + Globals.Logger.Close(); + return; + } - // If there's no arguments, show help - if (args.Length == 0) - { - _help.OutputGenericHelp(); - Globals.Logger.Close(); - return; - } + // If there's no arguments, show help + if (args.Length == 0) + { + _help.OutputGenericHelp(); + Globals.Logger.Close(); + return; + } - // User flags - bool copy = false, - fixdatOnly = false, - logOnly = false, - noDb = false, - onlyNeeded = false, - skipInitialScan = false, - useGolangZip = false; + // User flags + bool copy = false, + fixdatOnly = false, + logOnly = false, + noDb = false, + onlyNeeded = false, + skipInitialScan = false, + useGolangZip = false; - // User inputs - string backup = "", - description = "", - missingSha1s = "", - name = "", - newdat = "", - old = "", - outdat = "", - resume = "", - source = ""; - int include7Zips = 1, - includeGZips = 1, - includeZips = 1, - subworkers = 0, - workers = 0; - long size = -1; - List dats = new List(); - List depot = new List(); - List inputs = new List(); + // User inputs + string backup = "", + description = "", + missingSha1s = "", + name = "", + newdat = "", + old = "", + outdat = "", + resume = "", + source = ""; + int include7Zips = 1, + includeGZips = 1, + includeZips = 1, + subworkers = 0, + workers = 0; + long size = -1; + List dats = new List(); + List depot = new List(); + List inputs = new List(); - // Get the first argument as a feature flag - string feature = args[0]; + // Get the first argument as a feature flag + string feature = args[0]; - // Verify that the flag is valid - if (!_help.TopLevelFlag(feature)) - { - Globals.Logger.User("'{0}' is not valid feature flag", feature); - _help.OutputIndividualFeature(feature); - Globals.Logger.Close(); - return; - } + // Verify that the flag is valid + if (!_help.TopLevelFlag(feature)) + { + Globals.Logger.User("'{0}' is not valid feature flag", feature); + _help.OutputIndividualFeature(feature); + Globals.Logger.Close(); + return; + } - // Now get the proper name for the feature - feature = _help.GetFeatureName(feature); + // Now get the proper name for the feature + feature = _help.GetFeatureName(feature); - // If we had the help feature first - if (feature == "Help") - { - // If we had something else after help - if (args.Length > 1) - { - _help.OutputIndividualFeature(args[1]); - Globals.Logger.Close(); - return; - } - // Otherwise, show generic help - else - { - _help.OutputGenericHelp(); - Globals.Logger.Close(); - return; - } - } + // If we had the help feature first + if (feature == "Help") + { + // If we had something else after help + if (args.Length > 1) + { + _help.OutputIndividualFeature(args[1]); + Globals.Logger.Close(); + return; + } + // Otherwise, show generic help + else + { + _help.OutputGenericHelp(); + Globals.Logger.Close(); + return; + } + } - // Now verify that all other flags are valid - for (int i = 1; i < args.Length; i++) - { - // Verify that the current flag is proper for the feature - if (!_help[feature].ValidateInput(args[i])) - { - // Everything else is treated as a generic input - inputs.Add(args[i]); - } - } + // Now verify that all other flags are valid + for (int i = 1; i < args.Length; i++) + { + // Verify that the current flag is proper for the feature + if (!_help[feature].ValidateInput(args[i])) + { + // Everything else is treated as a generic input + inputs.Add(args[i]); + } + } - // Now loop through all inputs - Dictionary features = _help.GetEnabledFeatures(); - foreach (KeyValuePair feat in features) - { - // Check all of the flag names and translate to arguments - switch (feat.Key) - { - #region User Flags + // Now loop through all inputs + Dictionary features = _help.GetEnabledFeatures(); + foreach (KeyValuePair feat in features) + { + // Check all of the flag names and translate to arguments + switch (feat.Key) + { + #region User Flags - case "copy": - copy = true; - break; - case "fixdatOnly": - fixdatOnly = true; - break; - case "log-only": - logOnly = true; - break; - case "no-db": - noDb = true; - break; - case "only-needed": - onlyNeeded = true; - break; - case "skip-initial-scan": - skipInitialScan = true; - break; - case "use-golang-zip": - useGolangZip = true; - break; + case "copy": + copy = true; + break; + case "fixdatOnly": + fixdatOnly = true; + break; + case "log-only": + logOnly = true; + break; + case "no-db": + noDb = true; + break; + case "only-needed": + onlyNeeded = true; + break; + case "skip-initial-scan": + skipInitialScan = true; + break; + case "use-golang-zip": + useGolangZip = true; + break; - #endregion + #endregion - #region User Int32 Inputs + #region User Int32 Inputs - case "include-7zips": - include7Zips = (int)feat.Value.GetValue() == Int32.MinValue ? (int)feat.Value.GetValue() : 0; - break; - case "include-gzips": - includeGZips = (int)feat.Value.GetValue() == Int32.MinValue ? (int)feat.Value.GetValue() : 0; - break; - case "include-zips": - includeZips = (int)feat.Value.GetValue() == Int32.MinValue ? (int)feat.Value.GetValue() : 0; - break; - case "subworkers": - subworkers = (int)feat.Value.GetValue() == Int32.MinValue ? (int)feat.Value.GetValue() : _cores; - break; - case "workers": - workers = (int)feat.Value.GetValue() == Int32.MinValue ? (int)feat.Value.GetValue() : _cores; - break; + case "include-7zips": + include7Zips = (int)feat.Value.GetValue() == Int32.MinValue ? (int)feat.Value.GetValue() : 0; + break; + case "include-gzips": + includeGZips = (int)feat.Value.GetValue() == Int32.MinValue ? (int)feat.Value.GetValue() : 0; + break; + case "include-zips": + includeZips = (int)feat.Value.GetValue() == Int32.MinValue ? (int)feat.Value.GetValue() : 0; + break; + case "subworkers": + subworkers = (int)feat.Value.GetValue() == Int32.MinValue ? (int)feat.Value.GetValue() : _cores; + break; + case "workers": + workers = (int)feat.Value.GetValue() == Int32.MinValue ? (int)feat.Value.GetValue() : _cores; + break; - #endregion + #endregion - #region User Int64 Inputs + #region User Int64 Inputs - case "size": - size = (long)feat.Value.GetValue() == Int64.MinValue ? (long)feat.Value.GetValue() : 0; - break; + case "size": + size = (long)feat.Value.GetValue() == Int64.MinValue ? (long)feat.Value.GetValue() : 0; + break; - #endregion + #endregion - #region User List Inputs + #region User List Inputs - case "dats": - dats.AddRange((List)feat.Value.GetValue()); - break; - case "depot": - depot.AddRange((List)feat.Value.GetValue()); - break; + case "dats": + dats.AddRange((List)feat.Value.GetValue()); + break; + case "depot": + depot.AddRange((List)feat.Value.GetValue()); + break; - #endregion + #endregion - #region User String Inputs + #region User String Inputs - case "backup": - backup = (string)feat.Value.GetValue(); - break; - case "description": - description = (string)feat.Value.GetValue(); - break; - case "missingSha1s": - missingSha1s = (string)feat.Value.GetValue(); - break; - case "name": - name = (string)feat.Value.GetValue(); - break; - case "new": - newdat = (string)feat.Value.GetValue(); - break; - case "old": - old = (string)feat.Value.GetValue(); - break; - case "out": - outdat = (string)feat.Value.GetValue(); - break; - case "resume": - resume = (string)feat.Value.GetValue(); - break; - case "source": - source = (string)feat.Value.GetValue(); - break; + case "backup": + backup = (string)feat.Value.GetValue(); + break; + case "description": + description = (string)feat.Value.GetValue(); + break; + case "missingSha1s": + missingSha1s = (string)feat.Value.GetValue(); + break; + case "name": + name = (string)feat.Value.GetValue(); + break; + case "new": + newdat = (string)feat.Value.GetValue(); + break; + case "old": + old = (string)feat.Value.GetValue(); + break; + case "out": + outdat = (string)feat.Value.GetValue(); + break; + case "resume": + resume = (string)feat.Value.GetValue(); + break; + case "source": + source = (string)feat.Value.GetValue(); + break; - #endregion - } - } + #endregion + } + } - // Now take care of each mode in succesion - switch(feature) - { - case "Help": - // No-op as this should be caught - break; - // Adds ROM files from the specified directories to the ROM archive - case "Archive": - VerifyInputs(inputs, feature); - InitArchive(inputs, onlyNeeded, resume, includeZips, workers, includeGZips, include7Zips, skipInitialScan, useGolangZip, noDb); - break; - // For each specified DAT file it creates the torrentzip files - case "Build": - VerifyInputs(inputs, feature); - InitBuild(inputs, outdat, fixdatOnly, copy, workers, subworkers); - break; - // Cancels current long-running job - case "Cancel": - InitCancel(); - break; - // Prints dat stats - case "DatStats": - VerifyInputs(inputs, feature); - InitDatStats(inputs); - break; - // Prints db stats - case "DbStats": - InitDbStats(); - break; - // Creates a DAT file with those entries that are in -new DAT - case "Diffdat": - InitDiffDat(outdat, old, newdat, name, description); - break; - // Creates a DAT file for the specified input directory and saves it to the -out filename - case "Dir2Dat": - InitDir2Dat(outdat, source, name, description); - break; - // Creates a DAT file with those entries that are in -new DAT - case "EDiffdat": - InitEDiffDat(outdat, old, newdat); - break; - // Exports db to export.csv - case "Export": - InitExport(); - break; - // For each specified DAT file it creates a fix DAT - case "Fixdat": - VerifyInputs(inputs, feature); - InitFixdat(inputs, outdat, fixdatOnly, workers, subworkers); - break; - // Import a database from a formatted CSV file - case "Import": - VerifyInputs(inputs, feature); - InitImport(inputs); - break; - // For each specified hash it looks up any available information - case "Lookup": - VerifyInputs(inputs, feature); - InitLookup(inputs, size, outdat); - break; - // Prints memory stats - case "Memstats": - InitMemstats(); - break; - // Merges depot - case "Merge": - VerifyInputs(inputs, feature); - InitMerge(inputs, onlyNeeded, resume, workers, skipInitialScan); - break; - // Create miss and have file - case "Miss": - VerifyInputs(inputs, feature); - InitMiss(inputs); - break; - // Shows progress of the currently running command - case "Progress": - InitProgress(); - break; - // Moves DAT index entries for orphaned DATs - case "Purge Backup": - InitPurgeBackup(backup, workers, depot, dats, logOnly); - break; - // Deletes DAT index entries for orphaned DATs - case "Purge Delete": - InitPurgeDelete(workers, depot, dats, logOnly); - break; - // Refreshes the DAT index from the files in the DAT master directory tree - case "Refresh DATs": - InitRefreshDats(workers, missingSha1s); - break; - // Rescan a specific depot - case "Rescan Depots": - VerifyInputs(inputs, feature); - InitRescanDepots(inputs); - break; - // Gracefully shuts down server - case "Shutdown": - InitShutdown(); - break; - // Prints version - case "Version": - InitVersion(); - break; - // If nothing is set, show the help - default: - _help.OutputGenericHelp(); - break; - } + // Now take care of each mode in succesion + switch(feature) + { + case "Help": + // No-op as this should be caught + break; + // Adds ROM files from the specified directories to the ROM archive + case "Archive": + VerifyInputs(inputs, feature); + InitArchive(inputs, onlyNeeded, resume, includeZips, workers, includeGZips, include7Zips, skipInitialScan, useGolangZip, noDb); + break; + // For each specified DAT file it creates the torrentzip files + case "Build": + VerifyInputs(inputs, feature); + InitBuild(inputs, outdat, fixdatOnly, copy, workers, subworkers); + break; + // Cancels current long-running job + case "Cancel": + InitCancel(); + break; + // Prints dat stats + case "DatStats": + VerifyInputs(inputs, feature); + InitDatStats(inputs); + break; + // Prints db stats + case "DbStats": + InitDbStats(); + break; + // Creates a DAT file with those entries that are in -new DAT + case "Diffdat": + InitDiffDat(outdat, old, newdat, name, description); + break; + // Creates a DAT file for the specified input directory and saves it to the -out filename + case "Dir2Dat": + InitDir2Dat(outdat, source, name, description); + break; + // Creates a DAT file with those entries that are in -new DAT + case "EDiffdat": + InitEDiffDat(outdat, old, newdat); + break; + // Exports db to export.csv + case "Export": + InitExport(); + break; + // For each specified DAT file it creates a fix DAT + case "Fixdat": + VerifyInputs(inputs, feature); + InitFixdat(inputs, outdat, fixdatOnly, workers, subworkers); + break; + // Import a database from a formatted CSV file + case "Import": + VerifyInputs(inputs, feature); + InitImport(inputs); + break; + // For each specified hash it looks up any available information + case "Lookup": + VerifyInputs(inputs, feature); + InitLookup(inputs, size, outdat); + break; + // Prints memory stats + case "Memstats": + InitMemstats(); + break; + // Merges depot + case "Merge": + VerifyInputs(inputs, feature); + InitMerge(inputs, onlyNeeded, resume, workers, skipInitialScan); + break; + // Create miss and have file + case "Miss": + VerifyInputs(inputs, feature); + InitMiss(inputs); + break; + // Shows progress of the currently running command + case "Progress": + InitProgress(); + break; + // Moves DAT index entries for orphaned DATs + case "Purge Backup": + InitPurgeBackup(backup, workers, depot, dats, logOnly); + break; + // Deletes DAT index entries for orphaned DATs + case "Purge Delete": + InitPurgeDelete(workers, depot, dats, logOnly); + break; + // Refreshes the DAT index from the files in the DAT master directory tree + case "Refresh DATs": + InitRefreshDats(workers, missingSha1s); + break; + // Rescan a specific depot + case "Rescan Depots": + VerifyInputs(inputs, feature); + InitRescanDepots(inputs); + break; + // Gracefully shuts down server + case "Shutdown": + InitShutdown(); + break; + // Prints version + case "Version": + InitVersion(); + break; + // If nothing is set, show the help + default: + _help.OutputGenericHelp(); + break; + } - Globals.Logger.Close(); - return; - } + Globals.Logger.Close(); + return; + } - private static void VerifyInputs(List inputs, string feature) - { - if (inputs.Count == 0) - { - Globals.Logger.Error("This feature requires at least one input"); - _help.OutputIndividualFeature(feature); - Environment.Exit(0); - } - } - } + private static void VerifyInputs(List inputs, string feature) + { + if (inputs.Count == 0) + { + Globals.Logger.Error("This feature requires at least one input"); + _help.OutputIndividualFeature(feature); + Environment.Exit(0); + } + } + } }