[Globals] Add global variables to be used everywhere

This commit is contained in:
Matt Nadareski
2017-03-01 21:26:27 -08:00
parent 9c29fb0dbe
commit 89fd9f2237
22 changed files with 672 additions and 780 deletions

View File

@@ -37,22 +37,22 @@ namespace RombaSharp
// Total number of CRCs // Total number of CRCs
string query = "SELECT COUNT(*) FROM crc"; string query = "SELECT COUNT(*) FROM crc";
SqliteCommand slc = new SqliteCommand(query, dbc); SqliteCommand slc = new SqliteCommand(query, dbc);
_logger.User("Total CRCs: " + (long)slc.ExecuteScalar()); Globals.Logger.User("Total CRCs: " + (long)slc.ExecuteScalar());
// Total number of MD5s // Total number of MD5s
query = "SELECT COUNT(*) FROM md5"; query = "SELECT COUNT(*) FROM md5";
slc = new SqliteCommand(query, dbc); slc = new SqliteCommand(query, dbc);
_logger.User("Total MD5s: " + (long)slc.ExecuteScalar()); Globals.Logger.User("Total MD5s: " + (long)slc.ExecuteScalar());
// Total number of SHA1s // Total number of SHA1s
query = "SELECT COUNT(*) FROM sha1"; query = "SELECT COUNT(*) FROM sha1";
slc = new SqliteCommand(query, dbc); slc = new SqliteCommand(query, dbc);
_logger.User("Total SHA1s: " + (long)slc.ExecuteScalar()); Globals.Logger.User("Total SHA1s: " + (long)slc.ExecuteScalar());
// Total number of DATs // Total number of DATs
query = "SELECT COUNT(*) FROM dat"; query = "SELECT COUNT(*) FROM dat";
slc = new SqliteCommand(query, dbc); slc = new SqliteCommand(query, dbc);
_logger.User("Total DATs: " + (long)slc.ExecuteScalar()); Globals.Logger.User("Total DATs: " + (long)slc.ExecuteScalar());
slc.Dispose(); slc.Dispose();
dbc.Dispose(); dbc.Dispose();
@@ -65,16 +65,16 @@ namespace RombaSharp
{ {
Process proc = Process.GetCurrentProcess(); Process proc = Process.GetCurrentProcess();
_logger.User("Current Nonpaged Memory: " + Style.GetBytesReadable(proc.NonpagedSystemMemorySize64)); Globals.Logger.User("Current Nonpaged Memory: " + Style.GetBytesReadable(proc.NonpagedSystemMemorySize64));
_logger.User("Current Paged Memory: " + Style.GetBytesReadable(proc.PagedMemorySize64)); Globals.Logger.User("Current Paged Memory: " + Style.GetBytesReadable(proc.PagedMemorySize64));
_logger.User("Peak Paged Memory: " + Style.GetBytesReadable(proc.PeakPagedMemorySize64)); Globals.Logger.User("Peak Paged Memory: " + Style.GetBytesReadable(proc.PeakPagedMemorySize64));
_logger.User("Peak Virtual Memory: " + Style.GetBytesReadable(proc.PeakVirtualMemorySize64)); Globals.Logger.User("Peak Virtual Memory: " + Style.GetBytesReadable(proc.PeakVirtualMemorySize64));
_logger.User("Peak Working Memory: " + Style.GetBytesReadable(proc.PeakWorkingSet64)); Globals.Logger.User("Peak Working Memory: " + Style.GetBytesReadable(proc.PeakWorkingSet64));
_logger.User("Private Memory: " + Style.GetBytesReadable(proc.PrivateMemorySize64)); Globals.Logger.User("Private Memory: " + Style.GetBytesReadable(proc.PrivateMemorySize64));
_logger.User("Virtual Memory: " + Style.GetBytesReadable(proc.VirtualMemorySize64)); Globals.Logger.User("Virtual Memory: " + Style.GetBytesReadable(proc.VirtualMemorySize64));
_logger.User("Working Memory: " + Style.GetBytesReadable(proc.WorkingSet64)); Globals.Logger.User("Working Memory: " + Style.GetBytesReadable(proc.WorkingSet64));
_logger.User("Total Processor Time: " + proc.TotalProcessorTime); Globals.Logger.User("Total Processor Time: " + proc.TotalProcessorTime);
_logger.User("User Processor Time: " + proc.UserProcessorTime); Globals.Logger.User("User Processor Time: " + proc.UserProcessorTime);
} }
/// <summary> /// <summary>
@@ -130,12 +130,12 @@ namespace RombaSharp
if (lowerCaseDats.Contains(input.ToLowerInvariant())) if (lowerCaseDats.Contains(input.ToLowerInvariant()))
{ {
string fullpath = Path.GetFullPath(datRootDats[lowerCaseDats.IndexOf(input.ToLowerInvariant())]); string fullpath = Path.GetFullPath(datRootDats[lowerCaseDats.IndexOf(input.ToLowerInvariant())]);
string sha1 = FileTools.GetFileInfo(fullpath, _logger).SHA1; string sha1 = FileTools.GetFileInfo(fullpath).SHA1;
foundDats.Add(sha1, fullpath); foundDats.Add(sha1, fullpath);
} }
else else
{ {
_logger.Warning("The file '" + input + "' could not be found in the DAT root"); Globals.Logger.Warning("The file '" + input + "' could not be found in the DAT root");
} }
} }
@@ -162,7 +162,7 @@ namespace RombaSharp
Dictionary<string, Tuple<long, bool>> depots = new Dictionary<string, Tuple<long, bool>>(); Dictionary<string, Tuple<long, bool>> depots = new Dictionary<string, Tuple<long, bool>>();
// Get the XML text reader for the configuration file, if possible // Get the XML text reader for the configuration file, if possible
XmlReader xtr = FileTools.GetXmlTextReader(_config, _logger); XmlReader xtr = FileTools.GetXmlTextReader(_config);
// Now parse the XML file for settings // Now parse the XML file for settings
if (xtr != null) if (xtr != null)
@@ -339,7 +339,7 @@ namespace RombaSharp
} }
// Finally set all of the fields // Finally set all of the fields
_workers = workers; Globals.MaxDegreeOfParallelism = workers;
_logdir = logdir; _logdir = logdir;
_tmpdir = tmpdir; _tmpdir = tmpdir;
_webdir = webdir; _webdir = webdir;
@@ -359,7 +359,7 @@ namespace RombaSharp
/// <param name="logOnly">Only write out actions to log</param> /// <param name="logOnly">Only write out actions to log</param>
private static void PurgeBackup(bool logOnly) private static void PurgeBackup(bool logOnly)
{ {
_logger.User("This feature is not yet implemented: purge-backup"); Globals.Logger.User("This feature is not yet implemented: purge-backup");
} }
/// <summary> /// <summary>
@@ -368,7 +368,7 @@ namespace RombaSharp
/// <param name="logOnly">Only write out actions to log</param> /// <param name="logOnly">Only write out actions to log</param>
private static void PurgeDelete(bool logOnly) private static void PurgeDelete(bool logOnly)
{ {
_logger.User("This feature is not yet implemented: purge-delete"); Globals.Logger.User("This feature is not yet implemented: purge-delete");
} }
/// <summary> /// <summary>
@@ -404,8 +404,8 @@ namespace RombaSharp
// First get a list of SHA-1's from the input DATs // First get a list of SHA-1's from the input DATs
DatFile datroot = new DatFile { Type = "SuperDAT", }; DatFile datroot = new DatFile { Type = "SuperDAT", };
datroot.PopulateFromDir(_dats, Hash.SHA256 & Hash.SHA384 & Hash.SHA512, false, false, false, false, false, _tmpdir, false, null, 4, _logger); datroot.PopulateFromDir(_dats, Hash.SHA256 & Hash.SHA384 & Hash.SHA512, false, false, false, false, false, _tmpdir, false, null);
datroot.BucketBy(SortedBy.SHA1, false /* mergeroms */, 4 /* maxDegreeOfParallelism */, _logger); datroot.BucketBy(SortedBy.SHA1, false /* mergeroms */);
// Create a List of dat hashes in the database (SHA-1) // Create a List of dat hashes in the database (SHA-1)
List<string> databaseDats = new List<string>(); List<string> databaseDats = new List<string>();
@@ -415,7 +415,7 @@ namespace RombaSharp
dbc.Open(); dbc.Open();
// Populate the List from the database // Populate the List from the database
_logger.User("Populating the list of existing DATs"); Globals.Logger.User("Populating the list of existing DATs");
DateTime start = DateTime.Now; DateTime start = DateTime.Now;
string query = "SELECT DISTINCT hash FROM dat"; string query = "SELECT DISTINCT hash FROM dat";
@@ -435,15 +435,15 @@ namespace RombaSharp
unneeded.Add(hash); unneeded.Add(hash);
} }
} }
datroot.BucketBy(SortedBy.Game, false /* mergeroms */, 4 /* maxDegreeOfParallelism */, _logger, norename: true); datroot.BucketBy(SortedBy.Game, false /* mergeroms */, norename: true);
_logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff")); Globals.Logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
slc.Dispose(); slc.Dispose();
sldr.Dispose(); sldr.Dispose();
// Loop through the Dictionary and add all data // Loop through the Dictionary and add all data
_logger.User("Adding new DAT information"); Globals.Logger.User("Adding new DAT information");
start = DateTime.Now; start = DateTime.Now;
foreach (string key in datroot.Keys) foreach (string key in datroot.Keys)
{ {
@@ -453,10 +453,10 @@ namespace RombaSharp
} }
} }
_logger.User("Adding complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff")); Globals.Logger.User("Adding complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
// Now loop through and remove all references to old Dats // Now loop through and remove all references to old Dats
_logger.User("Removing unmatched DAT information"); Globals.Logger.User("Removing unmatched DAT information");
start = DateTime.Now; start = DateTime.Now;
foreach (string dathash in unneeded) foreach (string dathash in unneeded)
@@ -466,7 +466,7 @@ namespace RombaSharp
slc.ExecuteNonQuery(); slc.ExecuteNonQuery();
slc.Dispose(); slc.Dispose();
} }
_logger.User("Removing complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff")); Globals.Logger.User("Removing complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
dbc.Dispose(); dbc.Dispose();
} }
@@ -477,9 +477,9 @@ namespace RombaSharp
string fullpath = Path.Combine(_dats, (dat.Machine.Name == "dats" ? "" : dat.Machine.Name), dat.Name); string fullpath = Path.Combine(_dats, (dat.Machine.Name == "dats" ? "" : dat.Machine.Name), dat.Name);
// Parse the Dat if possible // Parse the Dat if possible
_logger.User("Adding from '" + dat.Name + "'"); Globals.Logger.User("Adding from '" + dat.Name + "'");
DatFile tempdat = new DatFile(); DatFile tempdat = new DatFile();
tempdat.Parse(fullpath, 0, 0, _logger); tempdat.Parse(fullpath, 0, 0);
// If the Dat wasn't empty, add the information // If the Dat wasn't empty, add the information
SqliteCommand slc = new SqliteCommand(); SqliteCommand slc = new SqliteCommand();
@@ -496,7 +496,7 @@ namespace RombaSharp
{ {
foreach (DatItem datItem in tempdat[romkey]) foreach (DatItem datItem in tempdat[romkey])
{ {
_logger.Verbose("Checking and adding file '" + datItem.Name); Globals.Logger.Verbose("Checking and adding file '" + datItem.Name);
if (datItem.Type == ItemType.Rom) if (datItem.Type == ItemType.Rom)
{ {
@@ -588,14 +588,14 @@ namespace RombaSharp
// Check that it's a valid depot first // Check that it's a valid depot first
if (!_depots.ContainsKey(depotname)) if (!_depots.ContainsKey(depotname))
{ {
_logger.User("'" + depotname + "' is not a recognized depot. Please add it to your configuration file and try again"); Globals.Logger.User("'" + depotname + "' is not a recognized depot. Please add it to your configuration file and try again");
return; return;
} }
// Then check that the depot is online // Then check that the depot is online
if (!Directory.Exists(depotname)) if (!Directory.Exists(depotname))
{ {
_logger.User("'" + depotname + "' does not appear to be online. Please check its status and try again"); Globals.Logger.User("'" + depotname + "' does not appear to be online. Please check its status and try again");
return; return;
} }
@@ -618,8 +618,8 @@ namespace RombaSharp
// Now rescan the depot itself // Now rescan the depot itself
DatFile depot = new DatFile(); DatFile depot = new DatFile();
depot.PopulateFromDir(depotname, Hash.SHA256 & Hash.SHA384 & Hash.SHA512, false, false, true, false, false, _tmpdir, false, null, _workers, _logger); depot.PopulateFromDir(depotname, Hash.SHA256 & Hash.SHA384 & Hash.SHA512, false, false, true, false, false, _tmpdir, false, null);
depot.BucketBy(SortedBy.SHA1, false /* mergeroms */, 4 /* maxDegreeOfParallelism */, _logger); depot.BucketBy(SortedBy.SHA1, false /* mergeroms */);
// Set the base queries to use // Set the base queries to use
string crcquery = "INSERT OR IGNORE INTO crc (crc) VALUES"; string crcquery = "INSERT OR IGNORE INTO crc (crc) VALUES";

View File

@@ -41,12 +41,12 @@ namespace RombaSharp
DatFile df = new DatFile(); DatFile df = new DatFile();
foreach (string dir in onlyDirs) foreach (string dir in onlyDirs)
{ {
df.PopulateFromDir(dir, Hash.SHA256 & Hash.SHA384 & Hash.SHA512, false, false, true, false, false, _tmpdir, false, null, _workers, _logger); df.PopulateFromDir(dir, Hash.SHA256 & Hash.SHA384 & Hash.SHA512, false, false, true, false, false, _tmpdir, false, null);
// If we're looking for only needed, consider the zipfiles themselves too // If we're looking for only needed, consider the zipfiles themselves too
if (onlyNeeded) if (onlyNeeded)
{ {
df.PopulateFromDir(dir, Hash.SHA256 & Hash.SHA384 & Hash.SHA512, false, true, true, false, false, _tmpdir, false, null, _workers, _logger); df.PopulateFromDir(dir, Hash.SHA256 & Hash.SHA384 & Hash.SHA512, false, true, true, false, false, _tmpdir, false, null);
} }
} }
@@ -176,7 +176,7 @@ namespace RombaSharp
ArchiveScanLevel asl = ArchiveTools.GetArchiveScanLevelFromNumbers((onlyNeeded ? 0 : 1), (onlyNeeded ? 0 : 1), (onlyNeeded ? 0 : 1), (onlyNeeded ? 0 : 1)); ArchiveScanLevel asl = ArchiveTools.GetArchiveScanLevelFromNumbers((onlyNeeded ? 0 : 1), (onlyNeeded ? 0 : 1), (onlyNeeded ? 0 : 1), (onlyNeeded ? 0 : 1));
need.RebuildGeneric(onlyDirs, _depots.Keys.ToList()[0], _tmpdir, false /*quickScan*/, false /*date*/, need.RebuildGeneric(onlyDirs, _depots.Keys.ToList()[0], _tmpdir, false /*quickScan*/, false /*date*/,
false /*delete*/, false /*inverse*/, OutputFormat.TorrentGzip, true /*romba*/, asl, false /*updateDat*/, false /*delete*/, false /*inverse*/, OutputFormat.TorrentGzip, true /*romba*/, asl, false /*updateDat*/,
null /*headerToCheckAgainst*/, _workers /*maxDegreeOfParallelism*/, _logger); null /*headerToCheckAgainst*/);
} }
/// <summary> /// <summary>
@@ -200,7 +200,7 @@ namespace RombaSharp
{ {
// Get the DAT file associated with the key // Get the DAT file associated with the key
DatFile datFile = new DatFile(); DatFile datFile = new DatFile();
datFile.Parse(Path.Combine(_dats, foundDats[key]), 0, 0, _logger); datFile.Parse(Path.Combine(_dats, foundDats[key]), 0, 0);
// Create the new output directory if it doesn't exist // Create the new output directory if it doesn't exist
string outputFolder = Path.Combine("out", Path.GetFileNameWithoutExtension(foundDats[key])); string outputFolder = Path.Combine("out", Path.GetFileNameWithoutExtension(foundDats[key]));
@@ -216,7 +216,7 @@ namespace RombaSharp
ArchiveScanLevel asl = ArchiveTools.GetArchiveScanLevelFromNumbers(1, 1, 1, 1); ArchiveScanLevel asl = ArchiveTools.GetArchiveScanLevelFromNumbers(1, 1, 1, 1);
datFile.RebuildDepot(onlineDepots, outputFolder, _tmpdir, false /*date*/, datFile.RebuildDepot(onlineDepots, outputFolder, _tmpdir, false /*date*/,
false /*delete*/, false /*inverse*/, (copy ? OutputFormat.TorrentGzip : OutputFormat.TorrentZip), copy, false /*delete*/, false /*inverse*/, (copy ? OutputFormat.TorrentGzip : OutputFormat.TorrentZip), copy,
false /*updateDat*/, null /*headerToCheckAgainst*/, _workers /*maxDegreeOfParallelism*/, _logger); false /*updateDat*/, null /*headerToCheckAgainst*/);
} }
} }
@@ -226,7 +226,7 @@ namespace RombaSharp
/// <param name="newdat"></param> /// <param name="newdat"></param>
private static void InitDiffDat(string newdat) private static void InitDiffDat(string newdat)
{ {
_logger.User("This feature is not yet implemented: diffdat"); Globals.Logger.User("This feature is not yet implemented: diffdat");
// First, we want to read in the DAT. Then for each file listed in the DAT, we check if it's in there or not. // First, we want to read in the DAT. Then for each file listed in the DAT, we check if it's in there or not.
// If it is in there, we add it to an output DAT. If it's not, we skip. Then we output the DAT. // If it is in there, we add it to an output DAT. If it's not, we skip. Then we output the DAT.
@@ -247,15 +247,13 @@ namespace RombaSharp
DatFormat = DatFormat.Logiqx, DatFormat = DatFormat.Logiqx,
}; };
Logger logger = new Logger();
foreach (string input in inputs) foreach (string input in inputs)
{ {
datdata.PopulateFromDir(input, Hash.SHA256 & Hash.SHA384 & Hash.SHA512 /* omitFromScan */, true /* bare */, false /* archivesAsFiles */, datdata.PopulateFromDir(input, Hash.SHA256 & Hash.SHA384 & Hash.SHA512 /* omitFromScan */, true /* bare */, false /* archivesAsFiles */,
true /* enableGzip */, false /* addBlanks */, false /* addDate */, _tmpdir /* tempDir */, false /* copyFiles */, true /* enableGzip */, false /* addBlanks */, false /* addDate */, _tmpdir /* tempDir */, false /* copyFiles */,
null /* headerToCheckAgainst */, _workers /* maxDegreeOfParallelism */, _logger); null /* headerToCheckAgainst */);
datdata.WriteToFile("", _workers, logger); datdata.WriteToFile("");
} }
logger.Close();
} }
/// <summary> /// <summary>
@@ -264,7 +262,7 @@ namespace RombaSharp
/// <param name="inputs"></param> /// <param name="inputs"></param>
private static void InitFixdat(List<string> inputs) private static void InitFixdat(List<string> inputs)
{ {
_logger.User("This feature is not yet implemented: fixdat"); Globals.Logger.User("This feature is not yet implemented: fixdat");
// Verify the filenames // Verify the filenames
Dictionary<string, string> foundDats = GetValidDats(inputs); Dictionary<string, string> foundDats = GetValidDats(inputs);
@@ -281,7 +279,7 @@ namespace RombaSharp
/// <param name="inputs"></param> /// <param name="inputs"></param>
private static void InitImport(List<string> inputs) private static void InitImport(List<string> inputs)
{ {
_logger.User("This feature is not yet implemented: import"); Globals.Logger.User("This feature is not yet implemented: import");
} }
/// <summary> /// <summary>
@@ -334,11 +332,11 @@ namespace RombaSharp
SqliteDataReader sldr = slc.ExecuteReader(); SqliteDataReader sldr = slc.ExecuteReader();
if (sldr.HasRows) if (sldr.HasRows)
{ {
_logger.User("For hash '" + input + "' there were " + sldr.RecordsAffected + " matches in the database"); Globals.Logger.User("For hash '" + input + "' there were " + sldr.RecordsAffected + " matches in the database");
} }
else else
{ {
_logger.User("Hash '" + input + "' had no matches in the database"); Globals.Logger.User("Hash '" + input + "' had no matches in the database");
} }
sldr.Dispose(); sldr.Dispose();
@@ -351,11 +349,11 @@ namespace RombaSharp
SqliteDataReader sldr = slc.ExecuteReader(); SqliteDataReader sldr = slc.ExecuteReader();
if (sldr.HasRows) if (sldr.HasRows)
{ {
_logger.User("For hash '" + input + "' there were " + sldr.RecordsAffected + " matches in the database"); Globals.Logger.User("For hash '" + input + "' there were " + sldr.RecordsAffected + " matches in the database");
} }
else else
{ {
_logger.User("Hash '" + input + "' had no matches in the database"); Globals.Logger.User("Hash '" + input + "' had no matches in the database");
} }
sldr.Dispose(); sldr.Dispose();
@@ -368,11 +366,11 @@ namespace RombaSharp
SqliteDataReader sldr = slc.ExecuteReader(); SqliteDataReader sldr = slc.ExecuteReader();
if (sldr.HasRows) if (sldr.HasRows)
{ {
_logger.User("For hash '" + input + "' there were " + sldr.RecordsAffected + " matches in the database"); Globals.Logger.User("For hash '" + input + "' there were " + sldr.RecordsAffected + " matches in the database");
} }
else else
{ {
_logger.User("Hash '" + input + "' had no matches in the database"); Globals.Logger.User("Hash '" + input + "' had no matches in the database");
} }
sldr.Dispose(); sldr.Dispose();
@@ -390,7 +388,7 @@ namespace RombaSharp
/// <param name="onlyNeeded"></param> /// <param name="onlyNeeded"></param>
private static void InitMerge(List<string> inputs, string depotPath, bool onlyNeeded) private static void InitMerge(List<string> inputs, string depotPath, bool onlyNeeded)
{ {
_logger.User("This feature is not yet implemented: merge"); Globals.Logger.User("This feature is not yet implemented: merge");
} }
/// <summary> /// <summary>
@@ -399,7 +397,7 @@ namespace RombaSharp
/// <param name="inputs"></param> /// <param name="inputs"></param>
private static void InitMiss(List<string> inputs) private static void InitMiss(List<string> inputs)
{ {
_logger.User("This feature is not yet implemented: miss"); Globals.Logger.User("This feature is not yet implemented: miss");
// Verify the filenames // Verify the filenames
Dictionary<string, string> foundDats = GetValidDats(inputs); Dictionary<string, string> foundDats = GetValidDats(inputs);

View File

@@ -19,7 +19,6 @@ namespace RombaSharp
public partial class RombaSharp public partial class RombaSharp
{ {
// General settings // General settings
private static int _workers; // Number of parallel threads
private static string _logdir; // Log folder location private static string _logdir; // Log folder location
private static string _tmpdir; // Temp folder location private static string _tmpdir; // Temp folder location
private static string _webdir; // Web frontend location private static string _webdir; // Web frontend location
@@ -41,7 +40,6 @@ namespace RombaSharp
private static string _config = "config.xml"; private static string _config = "config.xml";
private static string _dbSchema = "rombasharp"; private static string _dbSchema = "rombasharp";
private static string _connectionString; private static string _connectionString;
private static Logger _logger;
private static Help _help; private static Help _help;
/// <summary> /// <summary>
@@ -50,7 +48,7 @@ namespace RombaSharp
public static void Main(string[] args) public static void Main(string[] args)
{ {
// Perform initial setup and verification // Perform initial setup and verification
_logger = new Logger(true, "romba.log"); Globals.Logger = new Logger(true, "romba.log");
InitializeConfiguration(); InitializeConfiguration();
DatabaseTools.EnsureDatabase(_dbSchema, _db, _connectionString); DatabaseTools.EnsureDatabase(_dbSchema, _db, _connectionString);
@@ -68,7 +66,7 @@ namespace RombaSharp
if ((new List<string>(args)).Contains("--credits")) if ((new List<string>(args)).Contains("--credits"))
{ {
_help.OutputCredits(); _help.OutputCredits();
_logger.Close(); Globals.Logger.Close();
return; return;
} }
@@ -76,7 +74,7 @@ namespace RombaSharp
if (args.Length == 0) if (args.Length == 0)
{ {
_help.OutputGenericHelp(); _help.OutputGenericHelp();
_logger.Close(); Globals.Logger.Close();
return; return;
} }
@@ -258,9 +256,9 @@ namespace RombaSharp
if (!(archive ^ build ^ dbstats ^ depotRescan ^ diffdat ^ dir2dat ^ export ^ fixdat ^ import ^ lookup ^ if (!(archive ^ build ^ dbstats ^ depotRescan ^ diffdat ^ dir2dat ^ export ^ fixdat ^ import ^ lookup ^
memstats ^ merge ^ miss ^ progress ^ purgeBackup ^ purgeDelete ^ refreshDats ^ shutdown)) memstats ^ merge ^ miss ^ progress ^ purgeBackup ^ purgeDelete ^ refreshDats ^ shutdown))
{ {
_logger.Error("Only one feature switch is allowed at a time"); Globals.Logger.Error("Only one feature switch is allowed at a time");
_help.OutputGenericHelp(); _help.OutputGenericHelp();
_logger.Close(); Globals.Logger.Close();
return; return;
} }
@@ -268,9 +266,9 @@ namespace RombaSharp
if (inputs.Count == 0 && (archive || build || depotRescan || dir2dat || fixdat || if (inputs.Count == 0 && (archive || build || depotRescan || dir2dat || fixdat ||
import || lookup || merge || miss)) import || lookup || merge || miss))
{ {
_logger.Error("This feature requires at least one input"); Globals.Logger.Error("This feature requires at least one input");
_help.OutputGenericHelp(); _help.OutputGenericHelp();
_logger.Close(); Globals.Logger.Close();
return; return;
} }
@@ -360,7 +358,7 @@ namespace RombaSharp
// Shows progress of the currently running command // Shows progress of the currently running command
else if (progress) else if (progress)
{ {
_logger.User("This feature is not used in RombaSharp: progress"); Globals.Logger.User("This feature is not used in RombaSharp: progress");
} }
// Moves DAT index entries for orphaned DATs // Moves DAT index entries for orphaned DATs
@@ -384,7 +382,7 @@ namespace RombaSharp
// Gracefully shuts down server // Gracefully shuts down server
else if (shutdown) else if (shutdown)
{ {
_logger.User("This feature is not used in RombaSharp: shutdown"); Globals.Logger.User("This feature is not used in RombaSharp: shutdown");
} }
// If nothing is set, show the help // If nothing is set, show the help
@@ -393,7 +391,7 @@ namespace RombaSharp
_help.OutputGenericHelp(); _help.OutputGenericHelp();
} }
_logger.Close(); Globals.Logger.Close();
return; return;
} }
} }

View File

@@ -0,0 +1,34 @@
namespace SabreTools.Helper.Data
{
public class Globals
{
#region Private implementations
private static Logger _logger = null;
private static int _maxDegreeOfParallelism = 4;
#endregion
#region Public accessors
public static Logger Logger
{
get
{
if (_logger == null)
{
_logger = new Logger();
}
return _logger;
}
set { _logger = value; }
}
public static int MaxDegreeOfParallelism
{
get { return _maxDegreeOfParallelism; }
set { _maxDegreeOfParallelism = value; }
}
#endregion
}
}

View File

@@ -172,9 +172,8 @@ namespace SabreTools.Helper.Dats
/// Determine if an item is a duplicate using partial matching logic /// Determine if an item is a duplicate using partial matching logic
/// </summary> /// </summary>
/// <param name="lastItem">DatItem to use as a baseline</param> /// <param name="lastItem">DatItem to use as a baseline</param>
/// <param name="logger">Logger object for console and/or file output</param>
/// <returns>True if the roms are duplicates, false otherwise</returns> /// <returns>True if the roms are duplicates, false otherwise</returns>
public bool IsDuplicate(DatItem lastItem, Logger logger) public bool IsDuplicate(DatItem lastItem)
{ {
bool dupefound = this.Equals(lastItem); bool dupefound = this.Equals(lastItem);
@@ -183,7 +182,7 @@ namespace SabreTools.Helper.Dats
{ {
if (!String.IsNullOrEmpty(((Rom)this).SHA1) && ((Rom)this).SHA1 == ((Rom)lastItem).SHA1 && ((Rom)this).Size != ((Rom)lastItem).Size) if (!String.IsNullOrEmpty(((Rom)this).SHA1) && ((Rom)this).SHA1 == ((Rom)lastItem).SHA1 && ((Rom)this).Size != ((Rom)lastItem).Size)
{ {
logger.User("SHA-1 mismatch - Hash: " + ((Rom)this).SHA1); Globals.Logger.User("SHA-1 mismatch - Hash: " + ((Rom)this).SHA1);
} }
} }
@@ -194,14 +193,13 @@ namespace SabreTools.Helper.Dats
/// Return the duplicate status of two items /// Return the duplicate status of two items
/// </summary> /// </summary>
/// <param name="lastItem">DatItem to check against</param> /// <param name="lastItem">DatItem to check against</param>
/// <param name="logger">Logger object for console and/or file output</param>
/// <returns>The DupeType corresponding to the relationship between the two</returns> /// <returns>The DupeType corresponding to the relationship between the two</returns>
public DupeType GetDuplicateStatus(DatItem lastItem, Logger logger) public DupeType GetDuplicateStatus(DatItem lastItem)
{ {
DupeType output = 0x00; DupeType output = 0x00;
// If we don't have a duplicate at all, return none // If we don't have a duplicate at all, return none
if (!this.IsDuplicate(lastItem, logger)) if (!this.IsDuplicate(lastItem))
{ {
return output; return output;
} }
@@ -243,10 +241,8 @@ namespace SabreTools.Helper.Dats
/// Check if a DAT contains the given rom /// Check if a DAT contains the given rom
/// </summary> /// </summary>
/// <param name="datdata">Dat to match against</param> /// <param name="datdata">Dat to match against</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logger object for console and/or file output</param>
/// <returns>True if it contains the rom, false otherwise</returns> /// <returns>True if it contains the rom, false otherwise</returns>
public bool HasDuplicates(DatFile datdata, int maxDegreeOfParallelism, Logger logger) public bool HasDuplicates(DatFile datdata)
{ {
// Check for an empty rom list first // Check for an empty rom list first
if (datdata.Count == 0) if (datdata.Count == 0)
@@ -255,7 +251,7 @@ namespace SabreTools.Helper.Dats
} }
// We want to get the proper key for the DatItem // We want to get the proper key for the DatItem
string key = SortAndGetKey(datdata, maxDegreeOfParallelism, logger); string key = SortAndGetKey(datdata);
// If the key doesn't exist, return the empty list // If the key doesn't exist, return the empty list
if (!datdata.ContainsKey(key)) if (!datdata.ContainsKey(key))
@@ -268,7 +264,7 @@ namespace SabreTools.Helper.Dats
foreach (DatItem rom in roms) foreach (DatItem rom in roms)
{ {
if (IsDuplicate(rom, logger)) if (IsDuplicate(rom))
{ {
return true; return true;
} }
@@ -281,11 +277,9 @@ namespace SabreTools.Helper.Dats
/// List all duplicates found in a DAT based on a rom /// List all duplicates found in a DAT based on a rom
/// </summary> /// </summary>
/// <param name="datdata">Dat to match against</param> /// <param name="datdata">Dat to match against</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logger object for console and/or file output</param>
/// <param name="remove">True to remove matched roms from the input, false otherwise (default)</param> /// <param name="remove">True to remove matched roms from the input, false otherwise (default)</param>
/// <returns>List of matched DatItem objects</returns> /// <returns>List of matched DatItem objects</returns>
public List<DatItem> GetDuplicates(DatFile datdata, int maxDegreeOfParallelism, Logger logger, bool remove = false) public List<DatItem> GetDuplicates(DatFile datdata, bool remove = false)
{ {
List<DatItem> output = new List<DatItem>(); List<DatItem> output = new List<DatItem>();
@@ -296,7 +290,7 @@ namespace SabreTools.Helper.Dats
} }
// We want to get the proper key for the DatItem // We want to get the proper key for the DatItem
string key = SortAndGetKey(datdata, maxDegreeOfParallelism, logger); string key = SortAndGetKey(datdata);
// If the key doesn't exist, return the empty list // If the key doesn't exist, return the empty list
if (!datdata.ContainsKey(key)) if (!datdata.ContainsKey(key))
@@ -310,7 +304,7 @@ namespace SabreTools.Helper.Dats
foreach (DatItem rom in roms) foreach (DatItem rom in roms)
{ {
if (IsDuplicate(rom, logger)) if (IsDuplicate(rom))
{ {
output.Add(rom); output.Add(rom);
} }
@@ -333,9 +327,8 @@ namespace SabreTools.Helper.Dats
/// Sort the input DAT and get the key to be used by the item /// Sort the input DAT and get the key to be used by the item
/// </summary> /// </summary>
/// <param name="datdata">Dat to match against</param> /// <param name="datdata">Dat to match against</param>
/// <param name="logger">Logger object for console and/or file output</param>
/// <returns>Key to try to use</returns> /// <returns>Key to try to use</returns>
private string SortAndGetKey(DatFile datdata, int maxDegreeOfParallelism, Logger logger) private string SortAndGetKey(DatFile datdata)
{ {
string key = null; string key = null;
@@ -347,12 +340,12 @@ namespace SabreTools.Helper.Dats
if (_itemType == ItemType.Rom) if (_itemType == ItemType.Rom)
{ {
key = ((Rom)this).SHA512; key = ((Rom)this).SHA512;
datdata.BucketBy(SortedBy.SHA512, false /* mergeroms */, maxDegreeOfParallelism, logger); datdata.BucketBy(SortedBy.SHA512, false /* mergeroms */);
} }
else else
{ {
key = ((Disk)this).SHA512; key = ((Disk)this).SHA512;
datdata.BucketBy(SortedBy.SHA512, false /* mergeroms */, maxDegreeOfParallelism, logger); datdata.BucketBy(SortedBy.SHA512, false /* mergeroms */);
} }
} }
@@ -364,12 +357,12 @@ namespace SabreTools.Helper.Dats
if (_itemType == ItemType.Rom) if (_itemType == ItemType.Rom)
{ {
key = ((Rom)this).SHA384; key = ((Rom)this).SHA384;
datdata.BucketBy(SortedBy.SHA384, false /* mergeroms */, maxDegreeOfParallelism, logger); datdata.BucketBy(SortedBy.SHA384, false /* mergeroms */);
} }
else else
{ {
key = ((Disk)this).SHA384; key = ((Disk)this).SHA384;
datdata.BucketBy(SortedBy.SHA384, false /* mergeroms */, maxDegreeOfParallelism, logger); datdata.BucketBy(SortedBy.SHA384, false /* mergeroms */);
} }
} }
@@ -381,12 +374,12 @@ namespace SabreTools.Helper.Dats
if (_itemType == ItemType.Rom) if (_itemType == ItemType.Rom)
{ {
key = ((Rom)this).SHA256; key = ((Rom)this).SHA256;
datdata.BucketBy(SortedBy.SHA256, false /* mergeroms */, maxDegreeOfParallelism, logger); datdata.BucketBy(SortedBy.SHA256, false /* mergeroms */);
} }
else else
{ {
key = ((Disk)this).SHA256; key = ((Disk)this).SHA256;
datdata.BucketBy(SortedBy.SHA256, false /* mergeroms */, maxDegreeOfParallelism, logger); datdata.BucketBy(SortedBy.SHA256, false /* mergeroms */);
} }
} }
@@ -398,12 +391,12 @@ namespace SabreTools.Helper.Dats
if (_itemType == ItemType.Rom) if (_itemType == ItemType.Rom)
{ {
key = ((Rom)this).SHA1; key = ((Rom)this).SHA1;
datdata.BucketBy(SortedBy.SHA1, false /* mergeroms */, maxDegreeOfParallelism, logger); datdata.BucketBy(SortedBy.SHA1, false /* mergeroms */);
} }
else else
{ {
key = ((Disk)this).SHA1; key = ((Disk)this).SHA1;
datdata.BucketBy(SortedBy.SHA1, false /* mergeroms */, maxDegreeOfParallelism, logger); datdata.BucketBy(SortedBy.SHA1, false /* mergeroms */);
} }
} }
@@ -415,12 +408,12 @@ namespace SabreTools.Helper.Dats
if (_itemType == ItemType.Rom) if (_itemType == ItemType.Rom)
{ {
key = ((Rom)this).MD5; key = ((Rom)this).MD5;
datdata.BucketBy(SortedBy.MD5, false /* mergeroms */, maxDegreeOfParallelism, logger); datdata.BucketBy(SortedBy.MD5, false /* mergeroms */);
} }
else else
{ {
key = ((Disk)this).MD5; key = ((Disk)this).MD5;
datdata.BucketBy(SortedBy.MD5, false /* mergeroms */, maxDegreeOfParallelism, logger); datdata.BucketBy(SortedBy.MD5, false /* mergeroms */);
} }
} }
@@ -428,21 +421,21 @@ namespace SabreTools.Helper.Dats
else if (_itemType == ItemType.Disk) else if (_itemType == ItemType.Disk)
{ {
key = ((Disk)this).MD5; key = ((Disk)this).MD5;
datdata.BucketBy(SortedBy.MD5, false /* mergeroms */, maxDegreeOfParallelism, logger); datdata.BucketBy(SortedBy.MD5, false /* mergeroms */);
} }
// If we've gotten here and we have a Rom, sort by CRC // If we've gotten here and we have a Rom, sort by CRC
else if (_itemType == ItemType.Rom) else if (_itemType == ItemType.Rom)
{ {
key = ((Rom)this).CRC; key = ((Rom)this).CRC;
datdata.BucketBy(SortedBy.CRC, false /* mergeroms */, maxDegreeOfParallelism, logger); datdata.BucketBy(SortedBy.CRC, false /* mergeroms */);
} }
// Otherwise, we use -1 as the key // Otherwise, we use -1 as the key
else else
{ {
key = "-1"; key = "-1";
datdata.BucketBy(SortedBy.Size, false /* mergeroms */, maxDegreeOfParallelism, logger); datdata.BucketBy(SortedBy.Size, false /* mergeroms */);
} }
return key; return key;
@@ -460,9 +453,8 @@ namespace SabreTools.Helper.Dats
/// Merge an arbitrary set of ROMs based on the supplied information /// Merge an arbitrary set of ROMs based on the supplied information
/// </summary> /// </summary>
/// <param name="infiles">List of File objects representing the roms to be merged</param> /// <param name="infiles">List of File objects representing the roms to be merged</param>
/// <param name="logger">Logger object for console and/or file output</param>
/// <returns>A List of RomData objects representing the merged roms</returns> /// <returns>A List of RomData objects representing the merged roms</returns>
public static List<DatItem> Merge(List<DatItem> infiles, Logger logger) public static List<DatItem> Merge(List<DatItem> infiles)
{ {
// Check for null or blank roms first // Check for null or blank roms first
if (infiles == null || infiles.Count == 0) if (infiles == null || infiles.Count == 0)
@@ -500,7 +492,7 @@ namespace SabreTools.Helper.Dats
DatItem lastrom = outfiles[i]; DatItem lastrom = outfiles[i];
// Get the duplicate status // Get the duplicate status
dupetype = file.GetDuplicateStatus(lastrom, logger); dupetype = file.GetDuplicateStatus(lastrom);
// If it's a duplicate, skip adding it to the output but add any missing information // If it's a duplicate, skip adding it to the output but add any missing information
if (dupetype != 0x00) if (dupetype != 0x00)
@@ -588,12 +580,11 @@ namespace SabreTools.Helper.Dats
/// Resolve name duplicates in an arbitrary set of ROMs based on the supplied information /// Resolve name duplicates in an arbitrary set of ROMs based on the supplied information
/// </summary> /// </summary>
/// <param name="infiles">List of File objects representing the roms to be merged</param> /// <param name="infiles">List of File objects representing the roms to be merged</param>
/// <param name="logger">Logger object for console and/or file output</param>
/// <returns>A List of RomData objects representing the renamed roms</returns> /// <returns>A List of RomData objects representing the renamed roms</returns>
/// <remarks> /// <remarks>
/// Eventually, we want this to use the CRC/MD5/SHA-1 of relavent items instead of just _1 /// Eventually, we want this to use the CRC/MD5/SHA-1 of relavent items instead of just _1
/// </remarks> /// </remarks>
public static List<DatItem> ResolveNames(List<DatItem> infiles, Logger logger) public static List<DatItem> ResolveNames(List<DatItem> infiles)
{ {
// Create the output list // Create the output list
List<DatItem> output = new List<DatItem>(); List<DatItem> output = new List<DatItem>();
@@ -618,16 +609,16 @@ namespace SabreTools.Helper.Dats
} }
// If the current item exactly matches the last item, then we don't add it // If the current item exactly matches the last item, then we don't add it
if ((datItem.GetDuplicateStatus(lastItem, logger) & DupeType.All) != 0) if ((datItem.GetDuplicateStatus(lastItem) & DupeType.All) != 0)
{ {
logger.Verbose("Exact duplicate found for '" + datItem.Name + "'"); Globals.Logger.Verbose("Exact duplicate found for '" + datItem.Name + "'");
continue; continue;
} }
// If the current name matches the previous name, rename the current item // If the current name matches the previous name, rename the current item
else if (datItem.Name == lastItem.Name) else if (datItem.Name == lastItem.Name)
{ {
logger.Verbose("Name duplicate found for '" + datItem.Name + "'"); Globals.Logger.Verbose("Name duplicate found for '" + datItem.Name + "'");
if (datItem.Type == ItemType.Disk) if (datItem.Type == ItemType.Disk)
{ {
@@ -666,7 +657,7 @@ namespace SabreTools.Helper.Dats
// Otherwise, we say that we have a valid named file // Otherwise, we say that we have a valid named file
else else
{ {
logger.Verbose("Adding unmatched file '" + datItem.Name + "'"); Globals.Logger.Verbose("Adding unmatched file '" + datItem.Name + "'");
output.Add(datItem); output.Add(datItem);
lastItem = datItem; lastItem = datItem;
lastrenamed = null; lastrenamed = null;

View File

@@ -257,9 +257,8 @@ namespace SabreTools.Helper.Dats
/// Check to see if a DatItem passes the filter /// Check to see if a DatItem passes the filter
/// </summary> /// </summary>
/// <param name="item">DatItem to check</param> /// <param name="item">DatItem to check</param>
/// <param name="logger">Logger object for file and console output</param>
/// <returns>True if the file passed the filter, false otherwise</returns> /// <returns>True if the file passed the filter, false otherwise</returns>
public bool ItemPasses(DatItem item, Logger logger) public bool ItemPasses(DatItem item)
{ {
// If the item is null, we automatically fail it // If the item is null, we automatically fail it
if (item == null) if (item == null)
@@ -650,9 +649,8 @@ namespace SabreTools.Helper.Dats
/// Get the machine type from a string /// Get the machine type from a string
/// </summary> /// </summary>
/// <param name="gametype">Machine type as a string</param> /// <param name="gametype">Machine type as a string</param>
/// <param name="logger">Logger object for file and console output</param>
/// <returns>A machine type based on the input</returns> /// <returns>A machine type based on the input</returns>
public static MachineType GetMachineTypeFromString(string gametype, Logger logger) public static MachineType GetMachineTypeFromString(string gametype)
{ {
MachineType machineType = MachineType.NULL; MachineType machineType = MachineType.NULL;
switch (gametype.ToLowerInvariant()) switch (gametype.ToLowerInvariant())
@@ -672,7 +670,7 @@ namespace SabreTools.Helper.Dats
machineType |= MachineType.Mechanical; machineType |= MachineType.Mechanical;
break; break;
default: default:
logger.Warning(gametype + " is not a valid type"); Globals.Logger.Warning(gametype + " is not a valid type");
break; break;
} }
@@ -683,9 +681,8 @@ namespace SabreTools.Helper.Dats
/// Get the item status from a string /// Get the item status from a string
/// </summary> /// </summary>
/// <param name="status">Item status as a string</param> /// <param name="status">Item status as a string</param>
/// <param name="logger">Logger object for file and console output</param>
/// <returns>An item status based on the input</returns> /// <returns>An item status based on the input</returns>
public static ItemStatus GetStatusFromString(string status, Logger logger) public static ItemStatus GetStatusFromString(string status)
{ {
ItemStatus itemStatus = ItemStatus.NULL; ItemStatus itemStatus = ItemStatus.NULL;
switch (status.ToLowerInvariant()) switch (status.ToLowerInvariant())
@@ -706,7 +703,7 @@ namespace SabreTools.Helper.Dats
itemStatus |= ItemStatus.Verified; itemStatus |= ItemStatus.Verified;
break; break;
default: default:
logger.Warning(status + " is not a valid status"); Globals.Logger.Warning(status + " is not a valid status");
break; break;
} }

View File

@@ -19,11 +19,9 @@ namespace SabreTools.Helper.Dats
/// </summary> /// </summary>
/// <param name="bucketBy">SortedBy enum representing how to sort the individual items</param> /// <param name="bucketBy">SortedBy enum representing how to sort the individual items</param>
/// <param name="mergeroms">True if roms should be deduped, false otherwise</param> /// <param name="mergeroms">True if roms should be deduped, false otherwise</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logger object for file and console output</param>
/// <param name="lower">True if the key should be lowercased (default), false otherwise</param> /// <param name="lower">True if the key should be lowercased (default), false otherwise</param>
/// <param name="norename">True if games should only be compared on game and file name, false if system and source are counted</param> /// <param name="norename">True if games should only be compared on game and file name, false if system and source are counted</param>
public void BucketBy(SortedBy bucketBy, bool mergeroms, int maxDegreeOfParallelism, Logger logger, bool lower = true, bool norename = true) public void BucketBy(SortedBy bucketBy, bool mergeroms, bool lower = true, bool norename = true)
{ {
// If we already have the right sorting, trust it // If we already have the right sorting, trust it
if (_sortedBy == bucketBy) if (_sortedBy == bucketBy)
@@ -37,12 +35,12 @@ namespace SabreTools.Helper.Dats
// Create the temporary dictionary to sort into // Create the temporary dictionary to sort into
SortedDictionary<string, List<DatItem>> sortable = new SortedDictionary<string, List<DatItem>>(); SortedDictionary<string, List<DatItem>> sortable = new SortedDictionary<string, List<DatItem>>();
logger.User("Organizing " + (mergeroms ? "and merging " : "") + "roms by " + bucketBy); Globals.Logger.User("Organizing " + (mergeroms ? "and merging " : "") + "roms by " + bucketBy);
// First do the initial sort of all of the roms // First do the initial sort of all of the roms
List<string> keys = Keys.ToList(); List<string> keys = Keys.ToList();
Parallel.ForEach(keys, Parallel.ForEach(keys,
new ParallelOptions() { MaxDegreeOfParallelism = maxDegreeOfParallelism }, new ParallelOptions() { MaxDegreeOfParallelism = Globals.MaxDegreeOfParallelism },
key => key =>
{ {
List<DatItem> roms = this[key]; List<DatItem> roms = this[key];
@@ -50,7 +48,7 @@ namespace SabreTools.Helper.Dats
// If we're merging the roms, do so // If we're merging the roms, do so
if (mergeroms) if (mergeroms)
{ {
roms = DatItem.Merge(roms, logger); roms = DatItem.Merge(roms);
} }
// Now add each of the roms to their respective games // Now add each of the roms to their respective games
@@ -131,7 +129,7 @@ namespace SabreTools.Helper.Dats
// Now go through and sort all of the individual lists // Now go through and sort all of the individual lists
keys = sortable.Keys.ToList(); keys = sortable.Keys.ToList();
Parallel.ForEach(keys, Parallel.ForEach(keys,
new ParallelOptions() { MaxDegreeOfParallelism = maxDegreeOfParallelism }, new ParallelOptions() { MaxDegreeOfParallelism = Globals.MaxDegreeOfParallelism },
key => key =>
{ {
List<DatItem> sortedlist = sortable[key]; List<DatItem> sortedlist = sortable[key];
@@ -155,25 +153,23 @@ namespace SabreTools.Helper.Dats
/// Use cloneof tags to create non-merged sets and remove the tags plus using the device_ref tags to get full sets /// Use cloneof tags to create non-merged sets and remove the tags plus using the device_ref tags to get full sets
/// </summary> /// </summary>
/// <param name="mergeroms">True if roms should be deduped, false otherwise</param> /// <param name="mergeroms">True if roms should be deduped, false otherwise</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param> public void CreateFullyNonMergedSets(bool mergeroms)
/// <param name="logger">Logger object for file and console output</param>
public void CreateFullyNonMergedSets(bool mergeroms, int maxDegreeOfParallelism, Logger logger)
{ {
logger.User("Creating fully non-merged sets from the DAT"); Globals.Logger.User("Creating fully non-merged sets from the DAT");
// For sake of ease, the first thing we want to do is sort by game // For sake of ease, the first thing we want to do is sort by game
BucketBy(SortedBy.Game, mergeroms, maxDegreeOfParallelism, logger, norename: true); BucketBy(SortedBy.Game, mergeroms, norename: true);
_sortedBy = SortedBy.Default; _sortedBy = SortedBy.Default;
// Now we want to loop through all of the games and set the correct information // Now we want to loop through all of the games and set the correct information
AddRomsFromDevices(logger); AddRomsFromDevices();
AddRomsFromParent(logger); AddRomsFromParent();
// Now that we have looped through the cloneof tags, we loop through the romof tags // Now that we have looped through the cloneof tags, we loop through the romof tags
AddRomsFromBios(logger); AddRomsFromBios();
// Then, remove the romof and cloneof tags so it's not picked up by the manager // Then, remove the romof and cloneof tags so it's not picked up by the manager
RemoveTagsFromChild(logger); RemoveTagsFromChild();
// Finally, remove all sets that are labeled as bios or device // Finally, remove all sets that are labeled as bios or device
//RemoveBiosAndDeviceSets(logger); //RemoveBiosAndDeviceSets(logger);
@@ -183,72 +179,66 @@ namespace SabreTools.Helper.Dats
/// Use cloneof tags to create merged sets and remove the tags /// Use cloneof tags to create merged sets and remove the tags
/// </summary> /// </summary>
/// <param name="mergeroms">True if roms should be deduped, false otherwise</param> /// <param name="mergeroms">True if roms should be deduped, false otherwise</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param> public void CreateMergedSets(bool mergeroms)
/// <param name="logger">Logger object for file and console output</param>
public void CreateMergedSets(bool mergeroms, int maxDegreeOfParallelism, Logger logger)
{ {
logger.User("Creating merged sets from the DAT"); Globals.Logger.User("Creating merged sets from the DAT");
// For sake of ease, the first thing we want to do is sort by game // For sake of ease, the first thing we want to do is sort by game
BucketBy(SortedBy.Game, mergeroms, maxDegreeOfParallelism, logger, norename: true); BucketBy(SortedBy.Game, mergeroms, norename: true);
_sortedBy = SortedBy.Default; _sortedBy = SortedBy.Default;
// Now we want to loop through all of the games and set the correct information // Now we want to loop through all of the games and set the correct information
AddRomsToParent(logger); AddRomsToParent();
// Now that we have looped through the cloneof tags, we loop through the romof tags // Now that we have looped through the cloneof tags, we loop through the romof tags
RemoveBiosRomsFromChild(logger); RemoveBiosRomsFromChild();
// Finally, remove the romof and cloneof tags so it's not picked up by the manager // Finally, remove the romof and cloneof tags so it's not picked up by the manager
RemoveTagsFromChild(logger); RemoveTagsFromChild();
} }
/// <summary> /// <summary>
/// Use cloneof tags to create non-merged sets and remove the tags /// Use cloneof tags to create non-merged sets and remove the tags
/// </summary> /// </summary>
/// <param name="mergeroms">True if roms should be deduped, false otherwise</param> /// <param name="mergeroms">True if roms should be deduped, false otherwise</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param> public void CreateNonMergedSets(bool mergeroms)
/// <param name="logger">Logger object for file and console output</param>
public void CreateNonMergedSets(bool mergeroms, int maxDegreeOfParallelism, Logger logger)
{ {
logger.User("Creating non-merged sets from the DAT"); Globals.Logger.User("Creating non-merged sets from the DAT");
// For sake of ease, the first thing we want to do is sort by game // For sake of ease, the first thing we want to do is sort by game
BucketBy(SortedBy.Game, mergeroms, maxDegreeOfParallelism, logger, norename: true); BucketBy(SortedBy.Game, mergeroms, norename: true);
_sortedBy = SortedBy.Default; _sortedBy = SortedBy.Default;
// Now we want to loop through all of the games and set the correct information // Now we want to loop through all of the games and set the correct information
AddRomsFromParent(logger); AddRomsFromParent();
// Now that we have looped through the cloneof tags, we loop through the romof tags // Now that we have looped through the cloneof tags, we loop through the romof tags
RemoveBiosRomsFromChild(logger); RemoveBiosRomsFromChild();
// Finally, remove the romof and cloneof tags so it's not picked up by the manager // Finally, remove the romof and cloneof tags so it's not picked up by the manager
RemoveTagsFromChild(logger); RemoveTagsFromChild();
} }
/// <summary> /// <summary>
/// Use cloneof and romof tags to create split sets and remove the tags /// Use cloneof and romof tags to create split sets and remove the tags
/// </summary> /// </summary>
/// <param name="mergeroms">True if roms should be deduped, false otherwise</param> /// <param name="mergeroms">True if roms should be deduped, false otherwise</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param> public void CreateSplitSets(bool mergeroms)
/// <param name="logger">Logger object for file and console output</param>
public void CreateSplitSets(bool mergeroms, int maxDegreeOfParallelism, Logger logger)
{ {
logger.User("Creating split sets from the DAT"); Globals.Logger.User("Creating split sets from the DAT");
// For sake of ease, the first thing we want to do is sort by game // For sake of ease, the first thing we want to do is sort by game
BucketBy(SortedBy.Game, mergeroms, maxDegreeOfParallelism, logger, norename: true); BucketBy(SortedBy.Game, mergeroms, norename: true);
_sortedBy = SortedBy.Default; _sortedBy = SortedBy.Default;
// Now we want to loop through all of the games and set the correct information // Now we want to loop through all of the games and set the correct information
RemoveRomsFromChild(logger); RemoveRomsFromChild();
// Now that we have looped through the cloneof tags, we loop through the romof tags // Now that we have looped through the cloneof tags, we loop through the romof tags
RemoveBiosRomsFromChild(logger); RemoveBiosRomsFromChild();
// Finally, remove the romof and cloneof tags so it's not picked up by the manager // Finally, remove the romof and cloneof tags so it's not picked up by the manager
RemoveTagsFromChild(logger); RemoveTagsFromChild();
} }
#endregion #endregion
@@ -258,8 +248,7 @@ namespace SabreTools.Helper.Dats
/// <summary> /// <summary>
/// Use romof tags to add roms to the children /// Use romof tags to add roms to the children
/// </summary> /// </summary>
/// <param name="logger">Logger object for file and console output</param> private void AddRomsFromBios()
private void AddRomsFromBios(Logger logger)
{ {
List<string> games = Keys.ToList(); List<string> games = Keys.ToList();
foreach (string game in games) foreach (string game in games)
@@ -359,8 +348,7 @@ namespace SabreTools.Helper.Dats
/// <summary> /// <summary>
/// Use device_ref tags to add roms to the children /// Use device_ref tags to add roms to the children
/// </summary> /// </summary>
/// <param name="logger">Logger object for file and console output</param> private void AddRomsFromDevices()
private void AddRomsFromDevices(Logger logger)
{ {
List<string> games = Keys.ToList(); List<string> games = Keys.ToList();
foreach (string game in games) foreach (string game in games)
@@ -452,8 +440,7 @@ namespace SabreTools.Helper.Dats
/// <summary> /// <summary>
/// Use cloneof tags to add roms to the children, setting the new romof tag in the process /// Use cloneof tags to add roms to the children, setting the new romof tag in the process
/// </summary> /// </summary>
/// <param name="logger">Logger object for file and console output</param> private void AddRomsFromParent()
private void AddRomsFromParent(Logger logger)
{ {
List<string> games = Keys.ToList(); List<string> games = Keys.ToList();
foreach (string game in games) foreach (string game in games)
@@ -561,8 +548,7 @@ namespace SabreTools.Helper.Dats
/// <summary> /// <summary>
/// Use cloneof tags to add roms to the parents, removing the child sets in the process /// Use cloneof tags to add roms to the parents, removing the child sets in the process
/// </summary> /// </summary>
/// <param name="logger"></param> private void AddRomsToParent()
private void AddRomsToParent(Logger logger)
{ {
List<string> games = Keys.ToList(); List<string> games = Keys.ToList();
foreach (string game in games) foreach (string game in games)
@@ -610,8 +596,7 @@ namespace SabreTools.Helper.Dats
/// <summary> /// <summary>
/// Remove all BIOS and device sets /// Remove all BIOS and device sets
/// </summary> /// </summary>
/// <param name="logger"></param> private void RemoveBiosAndDeviceSets()
private void RemoveBiosAndDeviceSets(Logger logger)
{ {
List<string> games = Keys.ToList(); List<string> games = Keys.ToList();
foreach (string game in games) foreach (string game in games)
@@ -628,8 +613,7 @@ namespace SabreTools.Helper.Dats
/// <summary> /// <summary>
/// Use romof tags to remove roms from the children /// Use romof tags to remove roms from the children
/// </summary> /// </summary>
/// <param name="logger">Logger object for file and console output</param> private void RemoveBiosRomsFromChild()
private void RemoveBiosRomsFromChild(Logger logger)
{ {
// Loop through the romof tags // Loop through the romof tags
List<string> games = Keys.ToList(); List<string> games = Keys.ToList();
@@ -724,8 +708,7 @@ namespace SabreTools.Helper.Dats
/// <summary> /// <summary>
/// Use cloneof tags to remove roms from the children /// Use cloneof tags to remove roms from the children
/// </summary> /// </summary>
/// <param name="logger">Logger object for file and console output</param> private void RemoveRomsFromChild()
private void RemoveRomsFromChild(Logger logger)
{ {
List<string> games = Keys.ToList(); List<string> games = Keys.ToList();
foreach (string game in games) foreach (string game in games)
@@ -827,8 +810,7 @@ namespace SabreTools.Helper.Dats
/// <summary> /// <summary>
/// Remove all romof and cloneof tags from all games /// Remove all romof and cloneof tags from all games
/// </summary> /// </summary>
/// <param name="logger"></param> private void RemoveTagsFromChild()
private void RemoveTagsFromChild(Logger logger)
{ {
List<string> games = Keys.ToList(); List<string> games = Keys.ToList();
foreach (string game in games) foreach (string game in games)
@@ -856,12 +838,11 @@ namespace SabreTools.Helper.Dats
/// <param name="list">Input unsorted list</param> /// <param name="list">Input unsorted list</param>
/// <param name="mergeroms">True if roms should be deduped, false otherwise</param> /// <param name="mergeroms">True if roms should be deduped, false otherwise</param>
/// <param name="norename">True if games should only be compared on game and file name, false if system and source are counted</param> /// <param name="norename">True if games should only be compared on game and file name, false if system and source are counted</param>
/// <param name="logger">Logger object for file and console output</param>
/// <param name="output">True if the number of hashes counted is to be output (default), false otherwise</param> /// <param name="output">True if the number of hashes counted is to be output (default), false otherwise</param>
/// <returns>SortedDictionary bucketed by game name</returns> /// <returns>SortedDictionary bucketed by game name</returns>
public static SortedDictionary<string, List<DatItem>> BucketListByGame(List<DatItem> list, bool mergeroms, bool norename, Logger logger, bool output = true) public static SortedDictionary<string, List<DatItem>> BucketListByGame(List<DatItem> list, bool mergeroms, bool norename, bool output = true)
{ {
logger.User("Organizing " + (mergeroms ? "and merging " : "") + "roms for output"); Globals.Logger.User("Organizing " + (mergeroms ? "and merging " : "") + "roms for output");
SortedDictionary<string, List<DatItem>> sortable = new SortedDictionary<string, List<DatItem>>(); SortedDictionary<string, List<DatItem>> sortable = new SortedDictionary<string, List<DatItem>>();
long count = 0; long count = 0;
@@ -875,7 +856,7 @@ namespace SabreTools.Helper.Dats
// If we're merging the roms, do so // If we're merging the roms, do so
if (mergeroms) if (mergeroms)
{ {
list = DatItem.Merge(list, logger); list = DatItem.Merge(list);
} }
// Now add each of the roms to their respective games // Now add each of the roms to their respective games

View File

@@ -38,20 +38,17 @@ namespace SabreTools.Helper.Dats
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param> /// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
/// <param name="single">True if all games should be replaced by '!', false otherwise</param> /// <param name="single">True if all games should be replaced by '!', false otherwise</param>
/// <param name="root">String representing root directory to compare against for length calculation</param> /// <param name="root">String representing root directory to compare against for length calculation</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logging object for console and file output</param>
/// <remarks> /// <remarks>
/// TODO: Folder-wise output for update (similar to level-split) /// TODO: Folder-wise output for update (similar to level-split)
/// </remarks> /// </remarks>
public void DetermineUpdateType(List<string> inputPaths, string outDir, bool merge, DiffMode diff, bool inplace, bool skip, public void DetermineUpdateType(List<string> inputPaths, string outDir, bool merge, DiffMode diff, bool inplace, bool skip,
bool bare, bool clean, bool descAsName, Filter filter, SplitType splitType, bool trim, bool single, string root, bool bare, bool clean, bool descAsName, Filter filter, SplitType splitType, bool trim, bool single, string root)
int maxDegreeOfParallelism, Logger logger)
{ {
// If we're in merging or diffing mode, use the full list of inputs // If we're in merging or diffing mode, use the full list of inputs
if (merge || diff != 0) if (merge || diff != 0)
{ {
// Make sure there are no folders in inputs // Make sure there are no folders in inputs
List<string> newInputFileNames = FileTools.GetOnlyFilesFromInputs(inputPaths, maxDegreeOfParallelism, logger, appendparent: true); List<string> newInputFileNames = FileTools.GetOnlyFilesFromInputs(inputPaths, appendparent: true);
// If we're in inverse cascade, reverse the list // If we're in inverse cascade, reverse the list
if ((diff & DiffMode.ReverseCascade) != 0) if ((diff & DiffMode.ReverseCascade) != 0)
@@ -61,28 +58,28 @@ namespace SabreTools.Helper.Dats
// Create a dictionary of all ROMs from the input DATs // Create a dictionary of all ROMs from the input DATs
List<DatFile> datHeaders = PopulateUserData(newInputFileNames, inplace, clean, descAsName, List<DatFile> datHeaders = PopulateUserData(newInputFileNames, inplace, clean, descAsName,
outDir, filter, splitType, trim, single, root, maxDegreeOfParallelism, logger); outDir, filter, splitType, trim, single, root);
// Modify the Dictionary if necessary and output the results // Modify the Dictionary if necessary and output the results
if (diff != 0 && diff < DiffMode.Cascade) if (diff != 0 && diff < DiffMode.Cascade)
{ {
DiffNoCascade(diff, outDir, newInputFileNames, maxDegreeOfParallelism, logger); DiffNoCascade(diff, outDir, newInputFileNames);
} }
// If we're in cascade and diff, output only cascaded diffs // If we're in cascade and diff, output only cascaded diffs
else if (diff != 0 && diff >= DiffMode.Cascade) else if (diff != 0 && diff >= DiffMode.Cascade)
{ {
DiffCascade(outDir, inplace, newInputFileNames, datHeaders, skip, maxDegreeOfParallelism, logger); DiffCascade(outDir, inplace, newInputFileNames, datHeaders, skip);
} }
// Output all entries with user-defined merge // Output all entries with user-defined merge
else else
{ {
MergeNoDiff(outDir, newInputFileNames, datHeaders, maxDegreeOfParallelism, logger); MergeNoDiff(outDir, newInputFileNames, datHeaders);
} }
} }
// Otherwise, loop through all of the inputs individually // Otherwise, loop through all of the inputs individually
else else
{ {
Update(inputPaths, outDir, clean, descAsName, filter, splitType, trim, single, root, maxDegreeOfParallelism, logger); Update(inputPaths, outDir, clean, descAsName, filter, splitType, trim, single, root);
} }
return; return;
} }
@@ -95,25 +92,23 @@ namespace SabreTools.Helper.Dats
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param> /// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
/// <param name="single">True if all games should be replaced by '!', false otherwise</param> /// <param name="single">True if all games should be replaced by '!', false otherwise</param>
/// <param name="root">String representing root directory to compare against for length calculation</param> /// <param name="root">String representing root directory to compare against for length calculation</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logging object for console and file output</param>
/// <returns>List of DatData objects representing headers</returns> /// <returns>List of DatData objects representing headers</returns>
private List<DatFile> PopulateUserData(List<string> inputs, bool inplace, bool clean, bool descAsName, string outDir, private List<DatFile> PopulateUserData(List<string> inputs, bool inplace, bool clean, bool descAsName, string outDir,
Filter filter, SplitType splitType, bool trim, bool single, string root, int maxDegreeOfParallelism, Logger logger) Filter filter, SplitType splitType, bool trim, bool single, string root)
{ {
DatFile[] datHeaders = new DatFile[inputs.Count]; DatFile[] datHeaders = new DatFile[inputs.Count];
DateTime start = DateTime.Now; DateTime start = DateTime.Now;
logger.User("Processing individual DATs"); Globals.Logger.User("Processing individual DATs");
// TODO: Can parsing headers be separated from parsing content? // TODO: Can parsing headers be separated from parsing content?
// TODO: Can all DATs be parsed into the same structure in one loop? // TODO: Can all DATs be parsed into the same structure in one loop?
Parallel.For(0, Parallel.For(0,
inputs.Count, inputs.Count,
new ParallelOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism }, new ParallelOptions { MaxDegreeOfParallelism = Globals.MaxDegreeOfParallelism },
i => i =>
{ {
string input = inputs[i]; string input = inputs[i];
logger.User("Adding DAT: " + input.Split('¬')[0]); Globals.Logger.User("Adding DAT: " + input.Split('¬')[0]);
datHeaders[i] = new DatFile datHeaders[i] = new DatFile
{ {
DatFormat = (DatFormat != 0 ? DatFormat : 0), DatFormat = (DatFormat != 0 ? DatFormat : 0),
@@ -121,12 +116,12 @@ namespace SabreTools.Helper.Dats
}; };
datHeaders[i].Parse(input.Split('¬')[0], i, 0, filter, splitType, trim, single, datHeaders[i].Parse(input.Split('¬')[0], i, 0, filter, splitType, trim, single,
root, maxDegreeOfParallelism, logger, true, clean, descAsName); root, true, clean, descAsName);
}); });
logger.User("Processing complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff")); Globals.Logger.User("Processing complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
logger.User("Populating internal DAT"); Globals.Logger.User("Populating internal DAT");
for (int i = 0; i < inputs.Count; i++) for (int i = 0; i < inputs.Count; i++)
{ {
List<string> keys = datHeaders[i].Keys.ToList(); List<string> keys = datHeaders[i].Keys.ToList();
@@ -138,7 +133,7 @@ namespace SabreTools.Helper.Dats
datHeaders[i].Delete(); datHeaders[i].Delete();
} }
logger.User("Processing and populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff")); Globals.Logger.User("Processing and populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
return datHeaders.ToList(); return datHeaders.ToList();
} }
@@ -149,12 +144,10 @@ namespace SabreTools.Helper.Dats
/// <param name="diff">Non-zero flag for diffing mode, zero otherwise</param> /// <param name="diff">Non-zero flag for diffing mode, zero otherwise</param>
/// <param name="outDir">Output directory to write the DATs to</param> /// <param name="outDir">Output directory to write the DATs to</param>
/// <param name="inputs">List of inputs to write out from</param> /// <param name="inputs">List of inputs to write out from</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param> public void DiffNoCascade(DiffMode diff, string outDir, List<string> inputs)
/// <param name="logger">Logging object for console and file output</param>
public void DiffNoCascade(DiffMode diff, string outDir, List<string> inputs, int maxDegreeOfParallelism, Logger logger)
{ {
DateTime start = DateTime.Now; DateTime start = DateTime.Now;
logger.User("Initializing all output DATs"); Globals.Logger.User("Initializing all output DATs");
// Default vars for use // Default vars for use
string post = ""; string post = "";
@@ -218,15 +211,15 @@ namespace SabreTools.Helper.Dats
outDats = outDatsArray.ToList(); outDats = outDatsArray.ToList();
} }
logger.User("Initializing complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff")); Globals.Logger.User("Initializing complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
// Now, loop through the dictionary and populate the correct DATs // Now, loop through the dictionary and populate the correct DATs
start = DateTime.Now; start = DateTime.Now;
logger.User("Populating all output DATs"); Globals.Logger.User("Populating all output DATs");
List<string> keys = Keys.ToList(); List<string> keys = Keys.ToList();
foreach (string key in keys) foreach (string key in keys)
{ {
List<DatItem> roms = DatItem.Merge(this[key], logger); List<DatItem> roms = DatItem.Merge(this[key]);
if (roms != null && roms.Count > 0) if (roms != null && roms.Count > 0)
{ {
@@ -268,22 +261,22 @@ namespace SabreTools.Helper.Dats
} }
} }
} }
logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff")); Globals.Logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
// Finally, loop through and output each of the DATs // Finally, loop through and output each of the DATs
start = DateTime.Now; start = DateTime.Now;
logger.User("Outputting all created DATs"); Globals.Logger.User("Outputting all created DATs");
// Output the difflist (a-b)+(b-a) diff // Output the difflist (a-b)+(b-a) diff
if ((diff & DiffMode.NoDupes) != 0) if ((diff & DiffMode.NoDupes) != 0)
{ {
outerDiffData.WriteToFile(outDir, maxDegreeOfParallelism, logger); outerDiffData.WriteToFile(outDir);
} }
// Output the (ab) diff // Output the (ab) diff
if ((diff & DiffMode.Dupes) != 0) if ((diff & DiffMode.Dupes) != 0)
{ {
dupeData.WriteToFile(outDir, maxDegreeOfParallelism, logger); dupeData.WriteToFile(outDir);
} }
// Output the individual (a-b) DATs // Output the individual (a-b) DATs
@@ -298,10 +291,10 @@ namespace SabreTools.Helper.Dats
: (Path.GetDirectoryName(split[0]).Remove(0, split[1].Length))); : (Path.GetDirectoryName(split[0]).Remove(0, split[1].Length)));
// Try to output the file // Try to output the file
outDats[j].WriteToFile(path, maxDegreeOfParallelism, logger); outDats[j].WriteToFile(path);
} }
} }
logger.User("Outputting complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff")); Globals.Logger.User("Outputting complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
} }
/// <summary> /// <summary>
@@ -312,9 +305,7 @@ namespace SabreTools.Helper.Dats
/// <param name="inputs">List of inputs to write out from</param> /// <param name="inputs">List of inputs to write out from</param>
/// <param name="datHeaders">Dat headers used optionally</param> /// <param name="datHeaders">Dat headers used optionally</param>
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param> /// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param> public void DiffCascade(string outDir, bool inplace, List<string> inputs, List<DatFile> datHeaders, bool skip)
/// <param name="logger">Logging object for console and file output</param>
public void DiffCascade(string outDir, bool inplace, List<string> inputs, List<DatFile> datHeaders, bool skip, int maxDegreeOfParallelism, Logger logger)
{ {
string post = ""; string post = "";
@@ -323,7 +314,7 @@ namespace SabreTools.Helper.Dats
// Loop through each of the inputs and get or create a new DatData object // Loop through each of the inputs and get or create a new DatData object
DateTime start = DateTime.Now; DateTime start = DateTime.Now;
logger.User("Initializing all output DATs"); Globals.Logger.User("Initializing all output DATs");
DatFile[] outDatsArray = new DatFile[inputs.Count]; DatFile[] outDatsArray = new DatFile[inputs.Count];
@@ -350,16 +341,16 @@ namespace SabreTools.Helper.Dats
}); });
outDats = outDatsArray.ToList(); outDats = outDatsArray.ToList();
logger.User("Initializing complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff")); Globals.Logger.User("Initializing complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
// Now, loop through the dictionary and populate the correct DATs // Now, loop through the dictionary and populate the correct DATs
start = DateTime.Now; start = DateTime.Now;
logger.User("Populating all output DATs"); Globals.Logger.User("Populating all output DATs");
List<string> keys = Keys.ToList(); List<string> keys = Keys.ToList();
foreach (string key in keys) foreach (string key in keys)
{ {
List<DatItem> roms = DatItem.Merge(this[key], logger); List<DatItem> roms = DatItem.Merge(this[key]);
if (roms != null && roms.Count > 0) if (roms != null && roms.Count > 0)
{ {
@@ -368,7 +359,7 @@ namespace SabreTools.Helper.Dats
// There's odd cases where there are items with System ID < 0. Skip them for now // There's odd cases where there are items with System ID < 0. Skip them for now
if (rom.SystemID < 0) if (rom.SystemID < 0)
{ {
logger.Warning("Item found with a <0 SystemID: " + rom.Name); Globals.Logger.Warning("Item found with a <0 SystemID: " + rom.Name);
continue; continue;
} }
@@ -376,11 +367,11 @@ namespace SabreTools.Helper.Dats
} }
} }
} }
logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff")); Globals.Logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
// Finally, loop through and output each of the DATs // Finally, loop through and output each of the DATs
start = DateTime.Now; start = DateTime.Now;
logger.User("Outputting all created DATs"); Globals.Logger.User("Outputting all created DATs");
for (int j = (skip ? 1 : 0); j < inputs.Count; j++) for (int j = (skip ? 1 : 0); j < inputs.Count; j++)
{ {
// If we have an output directory set, replace the path // If we have an output directory set, replace the path
@@ -398,9 +389,9 @@ namespace SabreTools.Helper.Dats
} }
// Try to output the file // Try to output the file
outDats[j].WriteToFile(path, maxDegreeOfParallelism, logger); outDats[j].WriteToFile(path);
} }
logger.User("Outputting complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff")); Globals.Logger.User("Outputting complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
} }
/// <summary> /// <summary>
@@ -409,9 +400,7 @@ namespace SabreTools.Helper.Dats
/// <param name="outDir">Output directory to write the DATs to</param> /// <param name="outDir">Output directory to write the DATs to</param>
/// <param name="inputs">List of inputs to write out from</param> /// <param name="inputs">List of inputs to write out from</param>
/// <param name="datHeaders">Dat headers used optionally</param> /// <param name="datHeaders">Dat headers used optionally</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param> public void MergeNoDiff(string outDir, List<string> inputs, List<DatFile> datHeaders)
/// <param name="logger">Logging object for console and file output</param>
public void MergeNoDiff(string outDir, List<string> inputs, List<DatFile> datHeaders, int maxDegreeOfParallelism, Logger logger)
{ {
// If we're in SuperDAT mode, prefix all games with their respective DATs // If we're in SuperDAT mode, prefix all games with their respective DATs
if (Type == "SuperDAT") if (Type == "SuperDAT")
@@ -438,17 +427,16 @@ namespace SabreTools.Helper.Dats
} }
// Try to output the file // Try to output the file
WriteToFile(outDir, maxDegreeOfParallelism, logger); WriteToFile(outDir);
} }
/// <summary> /// <summary>
/// Strip the given hash types from the DAT /// Strip the given hash types from the DAT
/// </summary> /// </summary>
/// <param name="logger">Logging object for console and file output</param> private void StripHashesFromItems()
private void StripHashesFromItems(Logger logger)
{ {
// Output the logging statement // Output the logging statement
logger.User("Stripping requested hashes"); Globals.Logger.User("Stripping requested hashes");
// Now process all of the roms // Now process all of the roms
List<string> keys = Keys.ToList(); List<string> keys = Keys.ToList();
@@ -532,13 +520,11 @@ namespace SabreTools.Helper.Dats
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param> /// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
/// <param name="single">True if all games should be replaced by '!', false otherwise</param> /// <param name="single">True if all games should be replaced by '!', false otherwise</param>
/// <param name="root">String representing root directory to compare against for length calculation</param> /// <param name="root">String representing root directory to compare against for length calculation</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logging object for console and file output</param>
public void Update(List<string> inputFileNames, string outDir, bool clean, bool descAsName, Filter filter, public void Update(List<string> inputFileNames, string outDir, bool clean, bool descAsName, Filter filter,
SplitType splitType, bool trim, bool single, string root, int maxDegreeOfParallelism, Logger logger) SplitType splitType, bool trim, bool single, string root)
{ {
Parallel.ForEach(inputFileNames, Parallel.ForEach(inputFileNames,
new ParallelOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism }, new ParallelOptions { MaxDegreeOfParallelism = Globals.MaxDegreeOfParallelism },
inputFileName => inputFileName =>
{ {
// Clean the input string // Clean the input string
@@ -550,36 +536,36 @@ namespace SabreTools.Helper.Dats
if (File.Exists(inputFileName)) if (File.Exists(inputFileName))
{ {
DatFile innerDatdata = new DatFile(this); DatFile innerDatdata = new DatFile(this);
logger.User("Processing \"" + Path.GetFileName(inputFileName) + "\""); Globals.Logger.User("Processing \"" + Path.GetFileName(inputFileName) + "\"");
innerDatdata.Parse(inputFileName, 0, 0, filter, splitType, trim, single, innerDatdata.Parse(inputFileName, 0, 0, filter, splitType, trim, single,
root, maxDegreeOfParallelism, logger, true, clean, descAsName, root, true, clean, descAsName,
keepext: ((innerDatdata.DatFormat & DatFormat.TSV) != 0 || (innerDatdata.DatFormat & DatFormat.CSV) != 0)); keepext: ((innerDatdata.DatFormat & DatFormat.TSV) != 0 || (innerDatdata.DatFormat & DatFormat.CSV) != 0));
// Try to output the file // Try to output the file
innerDatdata.WriteToFile((outDir == "" ? Path.GetDirectoryName(inputFileName) : outDir), maxDegreeOfParallelism, logger, overwrite: (outDir != "")); innerDatdata.WriteToFile((outDir == "" ? Path.GetDirectoryName(inputFileName) : outDir), overwrite: (outDir != ""));
} }
else if (Directory.Exists(inputFileName)) else if (Directory.Exists(inputFileName))
{ {
inputFileName = Path.GetFullPath(inputFileName) + Path.DirectorySeparatorChar; inputFileName = Path.GetFullPath(inputFileName) + Path.DirectorySeparatorChar;
Parallel.ForEach(Directory.EnumerateFiles(inputFileName, "*", SearchOption.AllDirectories), Parallel.ForEach(Directory.EnumerateFiles(inputFileName, "*", SearchOption.AllDirectories),
new ParallelOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism }, new ParallelOptions { MaxDegreeOfParallelism = Globals.MaxDegreeOfParallelism },
file => file =>
{ {
logger.User("Processing \"" + Path.GetFullPath(file).Remove(0, inputFileName.Length) + "\""); Globals.Logger.User("Processing \"" + Path.GetFullPath(file).Remove(0, inputFileName.Length) + "\"");
DatFile innerDatdata = new DatFile(this); DatFile innerDatdata = new DatFile(this);
innerDatdata.Parse(file, 0, 0, filter, splitType, innerDatdata.Parse(file, 0, 0, filter, splitType,
trim, single, root, maxDegreeOfParallelism, logger, true, clean, descAsName, trim, single, root, true, clean, descAsName,
keepext: ((innerDatdata.DatFormat & DatFormat.TSV) != 0 || (innerDatdata.DatFormat & DatFormat.CSV) != 0)); keepext: ((innerDatdata.DatFormat & DatFormat.TSV) != 0 || (innerDatdata.DatFormat & DatFormat.CSV) != 0));
// Try to output the file // Try to output the file
innerDatdata.WriteToFile((outDir == "" ? Path.GetDirectoryName(file) : outDir + Path.GetDirectoryName(file).Remove(0, inputFileName.Length - 1)), innerDatdata.WriteToFile((outDir == "" ? Path.GetDirectoryName(file) : outDir + Path.GetDirectoryName(file).Remove(0, inputFileName.Length - 1)),
maxDegreeOfParallelism, logger, overwrite: (outDir != "")); overwrite: (outDir != ""));
}); });
} }
else else
{ {
logger.Error("I'm sorry but " + inputFileName + " doesn't exist!"); Globals.Logger.Error("I'm sorry but " + inputFileName + " doesn't exist!");
return; return;
} }
}); });

View File

@@ -36,11 +36,8 @@ namespace SabreTools.Helper.Dats
/// <param name="outDir">Output directory to </param> /// <param name="outDir">Output directory to </param>
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param> /// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param> /// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logger object for console and file output</param>
public bool PopulateFromDir(string basePath, Hash omitFromScan, bool bare, bool archivesAsFiles, public bool PopulateFromDir(string basePath, Hash omitFromScan, bool bare, bool archivesAsFiles,
bool enableGzip, bool addBlanks, bool addDate, string tempDir, bool copyFiles, string headerToCheckAgainst, bool enableGzip, bool addBlanks, bool addDate, string tempDir, bool copyFiles, string headerToCheckAgainst)
int maxDegreeOfParallelism, Logger logger)
{ {
// If the description is defined but not the name, set the name from the description // If the description is defined but not the name, set the name from the description
if (String.IsNullOrEmpty(Name) && !String.IsNullOrEmpty(Description)) if (String.IsNullOrEmpty(Name) && !String.IsNullOrEmpty(Description))
@@ -64,31 +61,31 @@ namespace SabreTools.Helper.Dats
// Process the input // Process the input
if (Directory.Exists(basePath)) if (Directory.Exists(basePath))
{ {
logger.Verbose("Folder found: " + basePath); Globals.Logger.Verbose("Folder found: " + basePath);
// Process the files in the main folder // Process the files in the main folder
List<string> files = Directory.EnumerateFiles(basePath, "*", SearchOption.TopDirectoryOnly).ToList(); List<string> files = Directory.EnumerateFiles(basePath, "*", SearchOption.TopDirectoryOnly).ToList();
Parallel.ForEach(files, Parallel.ForEach(files,
new ParallelOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism }, new ParallelOptions { MaxDegreeOfParallelism = Globals.MaxDegreeOfParallelism },
item => item =>
{ {
PopulateFromDirCheckFile(item, basePath, omitFromScan, bare, archivesAsFiles, enableGzip, addBlanks, addDate, PopulateFromDirCheckFile(item, basePath, omitFromScan, bare, archivesAsFiles, enableGzip, addBlanks, addDate,
tempDir, copyFiles, headerToCheckAgainst, maxDegreeOfParallelism, logger); tempDir, copyFiles, headerToCheckAgainst);
}); });
// Find all top-level subfolders // Find all top-level subfolders
files = Directory.EnumerateDirectories(basePath, "*", SearchOption.TopDirectoryOnly).ToList(); files = Directory.EnumerateDirectories(basePath, "*", SearchOption.TopDirectoryOnly).ToList();
Parallel.ForEach(files, Parallel.ForEach(files,
new ParallelOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism }, new ParallelOptions { MaxDegreeOfParallelism = Globals.MaxDegreeOfParallelism },
item => item =>
{ {
List<string> subfiles = Directory.EnumerateFiles(item, "*", SearchOption.AllDirectories).ToList(); List<string> subfiles = Directory.EnumerateFiles(item, "*", SearchOption.AllDirectories).ToList();
Parallel.ForEach(subfiles, Parallel.ForEach(subfiles,
new ParallelOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism }, new ParallelOptions { MaxDegreeOfParallelism = Globals.MaxDegreeOfParallelism },
subitem => subitem =>
{ {
PopulateFromDirCheckFile(subitem, basePath, omitFromScan, bare, archivesAsFiles, enableGzip, addBlanks, addDate, PopulateFromDirCheckFile(subitem, basePath, omitFromScan, bare, archivesAsFiles, enableGzip, addBlanks, addDate,
tempDir, copyFiles, headerToCheckAgainst, maxDegreeOfParallelism, logger); tempDir, copyFiles, headerToCheckAgainst);
}); });
}); });
@@ -97,7 +94,7 @@ namespace SabreTools.Helper.Dats
{ {
List<string> empties = Directory.EnumerateDirectories(basePath, "*", SearchOption.AllDirectories).ToList(); List<string> empties = Directory.EnumerateDirectories(basePath, "*", SearchOption.AllDirectories).ToList();
Parallel.ForEach(empties, Parallel.ForEach(empties,
new ParallelOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism }, new ParallelOptions { MaxDegreeOfParallelism = Globals.MaxDegreeOfParallelism },
dir => dir =>
{ {
if (Directory.EnumerateFiles(dir, "*", SearchOption.TopDirectoryOnly).Count() == 0) if (Directory.EnumerateFiles(dir, "*", SearchOption.TopDirectoryOnly).Count() == 0)
@@ -141,7 +138,7 @@ namespace SabreTools.Helper.Dats
romname = romname.Substring(0, romname.Length - 1); romname = romname.Substring(0, romname.Length - 1);
} }
logger.Verbose("Adding blank empty folder: " + gamename); Globals.Logger.Verbose("Adding blank empty folder: " + gamename);
this["null"].Add(new Rom(romname, gamename)); this["null"].Add(new Rom(romname, gamename));
} }
}); });
@@ -150,11 +147,11 @@ namespace SabreTools.Helper.Dats
else if (File.Exists(basePath)) else if (File.Exists(basePath))
{ {
PopulateFromDirCheckFile(basePath, Path.GetDirectoryName(Path.GetDirectoryName(basePath)), omitFromScan, bare, archivesAsFiles, enableGzip, addBlanks, addDate, PopulateFromDirCheckFile(basePath, Path.GetDirectoryName(Path.GetDirectoryName(basePath)), omitFromScan, bare, archivesAsFiles, enableGzip, addBlanks, addDate,
tempDir, copyFiles, headerToCheckAgainst, maxDegreeOfParallelism, logger); tempDir, copyFiles, headerToCheckAgainst);
} }
// Now that we're done, delete the temp folder (if it's not the default) // Now that we're done, delete the temp folder (if it's not the default)
logger.User("Cleaning temp folder"); Globals.Logger.User("Cleaning temp folder");
try try
{ {
if (tempDir != Path.GetTempPath()) if (tempDir != Path.GetTempPath())
@@ -184,11 +181,8 @@ namespace SabreTools.Helper.Dats
/// <param name="tempDir">Name of the directory to create a temp folder in (blank is current directory)</param> /// <param name="tempDir">Name of the directory to create a temp folder in (blank is current directory)</param>
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param> /// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param> /// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logger object for console and file output</param>
private void PopulateFromDirCheckFile(string item, string basePath, Hash omitFromScan, bool bare, bool archivesAsFiles, private void PopulateFromDirCheckFile(string item, string basePath, Hash omitFromScan, bool bare, bool archivesAsFiles,
bool enableGzip, bool addBlanks, bool addDate, string tempDir, bool copyFiles, string headerToCheckAgainst, bool enableGzip, bool addBlanks, bool addDate, string tempDir, bool copyFiles, string headerToCheckAgainst)
int maxDegreeOfParallelism, Logger logger)
{ {
// Define the temporary directory // Define the temporary directory
string tempSubDir = Path.GetFullPath(Path.Combine(tempDir, Path.GetRandomFileName())) + Path.DirectorySeparatorChar; string tempSubDir = Path.GetFullPath(Path.Combine(tempDir, Path.GetRandomFileName())) + Path.DirectorySeparatorChar;
@@ -196,18 +190,18 @@ namespace SabreTools.Helper.Dats
// Special case for if we are in Romba mode (all names are supposed to be SHA-1 hashes) // Special case for if we are in Romba mode (all names are supposed to be SHA-1 hashes)
if (Romba) if (Romba)
{ {
Rom rom = ArchiveTools.GetTorrentGZFileInfo(item, logger); Rom rom = ArchiveTools.GetTorrentGZFileInfo(item);
// If the rom is valid, write it out // If the rom is valid, write it out
if (rom != null && rom.Name != null) if (rom != null && rom.Name != null)
{ {
// Add the list if it doesn't exist already // Add the list if it doesn't exist already
Add(rom.Size + "-" + rom.CRC, rom); Add(rom.Size + "-" + rom.CRC, rom);
logger.User("File added: " + Path.GetFileNameWithoutExtension(item) + Environment.NewLine); Globals.Logger.User("File added: " + Path.GetFileNameWithoutExtension(item) + Environment.NewLine);
} }
else else
{ {
logger.User("File not added: " + Path.GetFileNameWithoutExtension(item) + Environment.NewLine); Globals.Logger.User("File not added: " + Path.GetFileNameWithoutExtension(item) + Environment.NewLine);
return; return;
} }
@@ -228,26 +222,25 @@ namespace SabreTools.Helper.Dats
// If all deep hash skip flags are set, do a quickscan // If all deep hash skip flags are set, do a quickscan
if (omitFromScan == Hash.SecureHashes) if (omitFromScan == Hash.SecureHashes)
{ {
ArchiveType? type = ArchiveTools.GetCurrentArchiveType(newItem, logger); ArchiveType? type = ArchiveTools.GetCurrentArchiveType(newItem);
// If we have an archive, scan it // If we have an archive, scan it
if (type != null && !archivesAsFiles) if (type != null && !archivesAsFiles)
{ {
List<Rom> extracted = ArchiveTools.GetArchiveFileInfo(newItem, logger); List<Rom> extracted = ArchiveTools.GetArchiveFileInfo(newItem);
foreach (Rom rom in extracted) foreach (Rom rom in extracted)
{ {
PopulateFromDirProcessFileHelper(newItem, PopulateFromDirProcessFileHelper(newItem,
rom, rom,
basePath, basePath,
(Path.GetDirectoryName(Path.GetFullPath(item)) + Path.DirectorySeparatorChar).Remove(0, basePath.Length) + Path.GetFileNameWithoutExtension(item), (Path.GetDirectoryName(Path.GetFullPath(item)) + Path.DirectorySeparatorChar).Remove(0, basePath.Length) + Path.GetFileNameWithoutExtension(item));
logger);
} }
} }
// Otherwise, just get the info on the file itself // Otherwise, just get the info on the file itself
else if (File.Exists(newItem)) else if (File.Exists(newItem))
{ {
PopulateFromDirProcessFile(newItem, "", newBasePath, omitFromScan, addDate, headerToCheckAgainst, logger); PopulateFromDirProcessFile(newItem, "", newBasePath, omitFromScan, addDate, headerToCheckAgainst);
} }
} }
// Otherwise, attempt to extract the files to the temporary directory // Otherwise, attempt to extract the files to the temporary directory
@@ -258,15 +251,15 @@ namespace SabreTools.Helper.Dats
| (archivesAsFiles ? ArchiveScanLevel.RarExternal : ArchiveScanLevel.RarInternal) | (archivesAsFiles ? ArchiveScanLevel.RarExternal : ArchiveScanLevel.RarInternal)
| (archivesAsFiles ? ArchiveScanLevel.ZipExternal : ArchiveScanLevel.ZipInternal); | (archivesAsFiles ? ArchiveScanLevel.ZipExternal : ArchiveScanLevel.ZipInternal);
bool encounteredErrors = ArchiveTools.ExtractArchive(newItem, tempSubDir, asl, logger); bool encounteredErrors = ArchiveTools.ExtractArchive(newItem, tempSubDir, asl);
// If the file was an archive and was extracted successfully, check it // If the file was an archive and was extracted successfully, check it
if (!encounteredErrors) if (!encounteredErrors)
{ {
logger.Verbose(Path.GetFileName(item) + " treated like an archive"); Globals.Logger.Verbose(Path.GetFileName(item) + " treated like an archive");
List<string> extracted = Directory.EnumerateFiles(tempSubDir, "*", SearchOption.AllDirectories).ToList(); List<string> extracted = Directory.EnumerateFiles(tempSubDir, "*", SearchOption.AllDirectories).ToList();
Parallel.ForEach(extracted, Parallel.ForEach(extracted,
new ParallelOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism }, new ParallelOptions { MaxDegreeOfParallelism = Globals.MaxDegreeOfParallelism },
entry => entry =>
{ {
PopulateFromDirProcessFile(entry, PopulateFromDirProcessFile(entry,
@@ -277,14 +270,13 @@ namespace SabreTools.Helper.Dats
tempSubDir, tempSubDir,
omitFromScan, omitFromScan,
addDate, addDate,
headerToCheckAgainst, headerToCheckAgainst);
logger);
}); });
} }
// Otherwise, just get the info on the file itself // Otherwise, just get the info on the file itself
else if (File.Exists(newItem)) else if (File.Exists(newItem))
{ {
PopulateFromDirProcessFile(newItem, "", newBasePath, omitFromScan, addDate, headerToCheckAgainst, logger); PopulateFromDirProcessFile(newItem, "", newBasePath, omitFromScan, addDate, headerToCheckAgainst);
} }
} }
@@ -314,14 +306,13 @@ namespace SabreTools.Helper.Dats
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param> /// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param> /// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param> /// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <param name="logger">Logger object for console and file output</param>
private void PopulateFromDirProcessFile(string item, string parent, string basePath, Hash omitFromScan, private void PopulateFromDirProcessFile(string item, string parent, string basePath, Hash omitFromScan,
bool addDate, string headerToCheckAgainst, Logger logger) bool addDate, string headerToCheckAgainst)
{ {
logger.Verbose(Path.GetFileName(item) + " treated like a file"); Globals.Logger.Verbose(Path.GetFileName(item) + " treated like a file");
Rom rom = FileTools.GetFileInfo(item, logger, omitFromScan: omitFromScan, date: addDate, header: headerToCheckAgainst); Rom rom = FileTools.GetFileInfo(item, omitFromScan: omitFromScan, date: addDate, header: headerToCheckAgainst);
PopulateFromDirProcessFileHelper(item, rom, basePath, parent, logger); PopulateFromDirProcessFileHelper(item, rom, basePath, parent);
} }
/// <summary> /// <summary>
@@ -331,7 +322,7 @@ namespace SabreTools.Helper.Dats
/// <param name="item">Rom data to be used to write to file</param> /// <param name="item">Rom data to be used to write to file</param>
/// <param name="basepath">Path the represents the parent directory</param> /// <param name="basepath">Path the represents the parent directory</param>
/// <param name="parent">Parent game to be used</param> /// <param name="parent">Parent game to be used</param>
private void PopulateFromDirProcessFileHelper(string item, DatItem datItem, string basepath, string parent, Logger logger) private void PopulateFromDirProcessFileHelper(string item, DatItem datItem, string basepath, string parent)
{ {
// If the datItem isn't a Rom or Disk, return // If the datItem isn't a Rom or Disk, return
if (datItem.Type != ItemType.Rom && datItem.Type != ItemType.Disk) if (datItem.Type != ItemType.Rom && datItem.Type != ItemType.Disk)
@@ -445,11 +436,11 @@ namespace SabreTools.Helper.Dats
// Add the file information to the DAT // Add the file information to the DAT
Add(key, datItem); Add(key, datItem);
logger.User("File added: " + romname + Environment.NewLine); Globals.Logger.User("File added: " + romname + Environment.NewLine);
} }
catch (IOException ex) catch (IOException ex)
{ {
logger.Error(ex.ToString()); Globals.Logger.Error(ex.ToString());
return; return;
} }
} }

View File

@@ -29,16 +29,15 @@ namespace SabreTools.Helper.Dats
/// <param name="sysid">System ID for the DAT</param> /// <param name="sysid">System ID for the DAT</param>
/// <param name="srcid">Source ID for the DAT</param> /// <param name="srcid">Source ID for the DAT</param>
/// <param name="datdata">The DatData object representing found roms to this point</param> /// <param name="datdata">The DatData object representing found roms to this point</param>
/// <param name="logger">Logger object for console and/or file output</param>
/// <param name="keep">True if full pathnames are to be kept, false otherwise (default)</param> /// <param name="keep">True if full pathnames are to be kept, false otherwise (default)</param>
/// <param name="clean">True if game names are sanitized, false otherwise (default)</param> /// <param name="clean">True if game names are sanitized, false otherwise (default)</param>
/// <param name="descAsName">True if descriptions should be used as names, false otherwise (default)</param> /// <param name="descAsName">True if descriptions should be used as names, false otherwise (default)</param>
/// <param name="keepext">True if original extension should be kept, false otherwise (default)</param> /// <param name="keepext">True if original extension should be kept, false otherwise (default)</param>
/// <param name="useTags">True if tags from the DAT should be used to merge the output, false otherwise (default)</param> /// <param name="useTags">True if tags from the DAT should be used to merge the output, false otherwise (default)</param>
public void Parse(string filename, int sysid, int srcid, Logger logger, public void Parse(string filename, int sysid, int srcid,
bool keep = false, bool clean = false, bool descAsName = false, bool keepext = false, bool useTags = false) bool keep = false, bool clean = false, bool descAsName = false, bool keepext = false, bool useTags = false)
{ {
Parse(filename, sysid, srcid, new Filter(), SplitType.None, false, false, "", 4, logger, Parse(filename, sysid, srcid, new Filter(), SplitType.None, false, false, "",
keep: keep, clean: clean, descAsName: descAsName, keepext: keepext, useTags: useTags); keep: keep, clean: clean, descAsName: descAsName, keepext: keepext, useTags: useTags);
} }
@@ -53,8 +52,6 @@ namespace SabreTools.Helper.Dats
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param> /// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
/// <param name="single">True if all games should be replaced by '!', false otherwise</param> /// <param name="single">True if all games should be replaced by '!', false otherwise</param>
/// <param name="root">String representing root directory to compare against for length calculation</param> /// <param name="root">String representing root directory to compare against for length calculation</param>
/// <param name="logger">Logger object for console and/or file output</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="keep">True if full pathnames are to be kept, false otherwise (default)</param> /// <param name="keep">True if full pathnames are to be kept, false otherwise (default)</param>
/// <param name="clean">True if game names are sanitized, false otherwise (default)</param> /// <param name="clean">True if game names are sanitized, false otherwise (default)</param>
/// <param name="descAsName">True if descriptions should be used as names, false otherwise (default)</param> /// <param name="descAsName">True if descriptions should be used as names, false otherwise (default)</param>
@@ -76,8 +73,6 @@ namespace SabreTools.Helper.Dats
string root, string root,
// Miscellaneous // Miscellaneous
int maxDegreeOfParallelism,
Logger logger,
bool keep = false, bool keep = false,
bool clean = false, bool clean = false,
bool descAsName = false, bool descAsName = false,
@@ -100,52 +95,52 @@ namespace SabreTools.Helper.Dats
FileName = (String.IsNullOrEmpty(FileName) ? (keepext ? Path.GetFileName(filename) : Path.GetFileNameWithoutExtension(filename)) : FileName); FileName = (String.IsNullOrEmpty(FileName) ? (keepext ? Path.GetFileName(filename) : Path.GetFileNameWithoutExtension(filename)) : FileName);
// If the output type isn't set already, get the internal output type // If the output type isn't set already, get the internal output type
DatFormat = (DatFormat == 0 ? FileTools.GetDatFormat(filename, logger) : DatFormat); DatFormat = (DatFormat == 0 ? FileTools.GetDatFormat(filename) : DatFormat);
// Now parse the correct type of DAT // Now parse the correct type of DAT
try try
{ {
switch (FileTools.GetDatFormat(filename, logger)) switch (FileTools.GetDatFormat(filename))
{ {
case DatFormat.AttractMode: case DatFormat.AttractMode:
ParseAttractMode(filename, sysid, srcid, filter, trim, single, root, logger, keep, clean, descAsName); ParseAttractMode(filename, sysid, srcid, filter, trim, single, root, keep, clean, descAsName);
break; break;
case DatFormat.ClrMamePro: case DatFormat.ClrMamePro:
case DatFormat.DOSCenter: case DatFormat.DOSCenter:
ParseCMP(filename, sysid, srcid, filter, trim, single, root, logger, keep, clean, descAsName); ParseCMP(filename, sysid, srcid, filter, trim, single, root, keep, clean, descAsName);
break; break;
case DatFormat.CSV: case DatFormat.CSV:
ParseCSVTSV(filename, sysid, srcid, ',', filter, trim, single, root, logger, keep, clean, descAsName); ParseCSVTSV(filename, sysid, srcid, ',', filter, trim, single, root, keep, clean, descAsName);
break; break;
case DatFormat.Logiqx: case DatFormat.Logiqx:
case DatFormat.OfflineList: case DatFormat.OfflineList:
case DatFormat.SabreDat: case DatFormat.SabreDat:
case DatFormat.SoftwareList: case DatFormat.SoftwareList:
ParseGenericXML(filename, sysid, srcid, filter, trim, single, root, logger, keep, clean, descAsName); ParseGenericXML(filename, sysid, srcid, filter, trim, single, root, keep, clean, descAsName);
break; break;
case DatFormat.RedumpMD5: case DatFormat.RedumpMD5:
ParseRedumpMD5(filename, sysid, srcid, filter, trim, single, root, logger, clean); ParseRedumpMD5(filename, sysid, srcid, filter, trim, single, root, clean);
break; break;
case DatFormat.RedumpSFV: case DatFormat.RedumpSFV:
ParseRedumpSFV(filename, sysid, srcid, filter, trim, single, root, logger, clean); ParseRedumpSFV(filename, sysid, srcid, filter, trim, single, root, clean);
break; break;
case DatFormat.RedumpSHA1: case DatFormat.RedumpSHA1:
ParseRedumpSHA1(filename, sysid, srcid, filter, trim, single, root, logger, clean); ParseRedumpSHA1(filename, sysid, srcid, filter, trim, single, root, clean);
break; break;
case DatFormat.RedumpSHA256: case DatFormat.RedumpSHA256:
ParseRedumpSHA256(filename, sysid, srcid, filter, trim, single, root, logger, clean); ParseRedumpSHA256(filename, sysid, srcid, filter, trim, single, root, clean);
break; break;
case DatFormat.RedumpSHA384: case DatFormat.RedumpSHA384:
ParseRedumpSHA384(filename, sysid, srcid, filter, trim, single, root, logger, clean); ParseRedumpSHA384(filename, sysid, srcid, filter, trim, single, root, clean);
break; break;
case DatFormat.RedumpSHA512: case DatFormat.RedumpSHA512:
ParseRedumpSHA512(filename, sysid, srcid, filter, trim, single, root, logger, clean); ParseRedumpSHA512(filename, sysid, srcid, filter, trim, single, root, clean);
break; break;
case DatFormat.RomCenter: case DatFormat.RomCenter:
ParseRC(filename, sysid, srcid, filter, trim, single, root, logger, clean, descAsName); ParseRC(filename, sysid, srcid, filter, trim, single, root, clean, descAsName);
break; break;
case DatFormat.TSV: case DatFormat.TSV:
ParseCSVTSV(filename, sysid, srcid, '\t', filter, trim, single, root, logger, keep, clean, descAsName); ParseCSVTSV(filename, sysid, srcid, '\t', filter, trim, single, root, keep, clean, descAsName);
break; break;
default: default:
return; return;
@@ -153,7 +148,7 @@ namespace SabreTools.Helper.Dats
} }
catch (Exception ex) catch (Exception ex)
{ {
logger.Error("Error with file '" + filename + "': " + ex.ToString()); Globals.Logger.Error("Error with file '" + filename + "': " + ex.ToString());
} }
// If we are using tags from the DAT, set the proper input for split type unless overridden // If we are using tags from the DAT, set the proper input for split type unless overridden
@@ -183,16 +178,16 @@ namespace SabreTools.Helper.Dats
switch (splitType) switch (splitType)
{ {
case SplitType.FullNonMerged: case SplitType.FullNonMerged:
CreateFullyNonMergedSets(false, maxDegreeOfParallelism, logger); CreateFullyNonMergedSets(false);
break; break;
case SplitType.NonMerged: case SplitType.NonMerged:
CreateNonMergedSets(false, maxDegreeOfParallelism, logger); CreateNonMergedSets(false);
break; break;
case SplitType.Merged: case SplitType.Merged:
CreateMergedSets(false, maxDegreeOfParallelism, logger); CreateMergedSets(false);
break; break;
case SplitType.Split: case SplitType.Split:
CreateSplitSets(false, maxDegreeOfParallelism, logger); CreateSplitSets(false);
break; break;
} }
} }
@@ -207,7 +202,6 @@ namespace SabreTools.Helper.Dats
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param> /// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
/// <param name="single">True if all games should be replaced by '!', false otherwise</param> /// <param name="single">True if all games should be replaced by '!', false otherwise</param>
/// <param name="root">String representing root directory to compare against for length calculation</param> /// <param name="root">String representing root directory to compare against for length calculation</param>
/// <param name="logger">Logger object for console and/or file output</param>
/// <param name="keep">True if full pathnames are to be kept, false otherwise (default)</param> /// <param name="keep">True if full pathnames are to be kept, false otherwise (default)</param>
/// <param name="clean">True if game names are sanitized, false otherwise (default)</param> /// <param name="clean">True if game names are sanitized, false otherwise (default)</param>
/// <param name="descAsName">True if descriptions should be used as names, false otherwise (default)</param> /// <param name="descAsName">True if descriptions should be used as names, false otherwise (default)</param>
@@ -226,7 +220,6 @@ namespace SabreTools.Helper.Dats
string root, string root,
// Miscellaneous // Miscellaneous
Logger logger,
bool keep, bool keep,
bool clean, bool clean,
bool descAsName) bool descAsName)
@@ -284,7 +277,7 @@ namespace SabreTools.Helper.Dats
}; };
// Now process and add the rom // Now process and add the rom
ParseAddHelper(rom, filter, trim, single, root, clean, logger, out string key); ParseAddHelper(rom, filter, trim, single, root, clean, out string key);
} }
sr.Dispose(); sr.Dispose();
@@ -300,7 +293,6 @@ namespace SabreTools.Helper.Dats
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param> /// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
/// <param name="single">True if all games should be replaced by '!', false otherwise</param> /// <param name="single">True if all games should be replaced by '!', false otherwise</param>
/// <param name="root">String representing root directory to compare against for length calculation</param> /// <param name="root">String representing root directory to compare against for length calculation</param>
/// <param name="logger">Logger object for console and/or file output</param>
/// <param name="keep">True if full pathnames are to be kept, false otherwise (default)</param> /// <param name="keep">True if full pathnames are to be kept, false otherwise (default)</param>
/// <param name="clean">True if game names are sanitized, false otherwise (default)</param> /// <param name="clean">True if game names are sanitized, false otherwise (default)</param>
/// <param name="descAsName">True if descriptions should be used as names, false otherwise (default)</param> /// <param name="descAsName">True if descriptions should be used as names, false otherwise (default)</param>
@@ -319,7 +311,6 @@ namespace SabreTools.Helper.Dats
string root, string root,
// Miscellaneous // Miscellaneous
Logger logger,
bool keep, bool keep,
bool clean, bool clean,
bool descAsName) bool descAsName)
@@ -430,7 +421,7 @@ namespace SabreTools.Helper.Dats
// Now process and add the sample // Now process and add the sample
key = ""; key = "";
ParseAddHelper(item, filter, trim, single, root, clean, logger, out key); ParseAddHelper(item, filter, trim, single, root, clean, out key);
continue; continue;
} }
@@ -515,7 +506,7 @@ namespace SabreTools.Helper.Dats
// Now process and add the rom // Now process and add the rom
key = ""; key = "";
ParseAddHelper(item, filter, trim, single, root, clean, logger, out key); ParseAddHelper(item, filter, trim, single, root, clean, out key);
continue; continue;
} }
@@ -728,7 +719,7 @@ namespace SabreTools.Helper.Dats
// Now process and add the rom // Now process and add the rom
key = ""; key = "";
ParseAddHelper(item, filter, trim, single, root, clean, logger, out key); ParseAddHelper(item, filter, trim, single, root, clean, out key);
} }
// If the line is anything but a rom or disk and we're in a block // If the line is anything but a rom or disk and we're in a block
@@ -917,7 +908,6 @@ namespace SabreTools.Helper.Dats
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param> /// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
/// <param name="single">True if all games should be replaced by '!', false otherwise</param> /// <param name="single">True if all games should be replaced by '!', false otherwise</param>
/// <param name="root">String representing root directory to compare against for length calculation</param> /// <param name="root">String representing root directory to compare against for length calculation</param>
/// <param name="logger">Logger object for console and/or file output</param>
/// <param name="keep">True if full pathnames are to be kept, false otherwise (default)</param> /// <param name="keep">True if full pathnames are to be kept, false otherwise (default)</param>
/// <param name="clean">True if game names are sanitized, false otherwise (default)</param> /// <param name="clean">True if game names are sanitized, false otherwise (default)</param>
/// <param name="descAsName">True if SL XML names should be kept, false otherwise (default)</param> /// <param name="descAsName">True if SL XML names should be kept, false otherwise (default)</param>
@@ -937,7 +927,6 @@ namespace SabreTools.Helper.Dats
string root, string root,
// Miscellaneous // Miscellaneous
Logger logger,
bool keep, bool keep,
bool clean, bool clean,
bool descAsName) bool descAsName)
@@ -1050,7 +1039,7 @@ namespace SabreTools.Helper.Dats
// If the line doesn't have the correct number of columns, we log and skip // If the line doesn't have the correct number of columns, we log and skip
if (parsedLine.Length != columns.Count) if (parsedLine.Length != columns.Count)
{ {
logger.Warning("Malformed line found in '" + filename + " at line " + linenum); Globals.Logger.Warning("Malformed line found in '" + filename + " at line " + linenum);
continue; continue;
} }
@@ -1175,7 +1164,7 @@ namespace SabreTools.Helper.Dats
}, },
}; };
ParseAddHelper(archive, filter, trim, single, root, clean, logger, out key); ParseAddHelper(archive, filter, trim, single, root, clean, out key);
break; break;
case ItemType.BiosSet: case ItemType.BiosSet:
BiosSet biosset = new BiosSet() BiosSet biosset = new BiosSet()
@@ -1189,7 +1178,7 @@ namespace SabreTools.Helper.Dats
}, },
}; };
ParseAddHelper(biosset, filter, trim, single, root, clean, logger, out key); ParseAddHelper(biosset, filter, trim, single, root, clean, out key);
break; break;
case ItemType.Disk: case ItemType.Disk:
Disk disk = new Disk() Disk disk = new Disk()
@@ -1210,7 +1199,7 @@ namespace SabreTools.Helper.Dats
ItemStatus = status, ItemStatus = status,
}; };
ParseAddHelper(disk, filter, trim, single, root, clean, logger, out key); ParseAddHelper(disk, filter, trim, single, root, clean, out key);
break; break;
case ItemType.Release: case ItemType.Release:
Release release = new Release() Release release = new Release()
@@ -1224,7 +1213,7 @@ namespace SabreTools.Helper.Dats
}, },
}; };
ParseAddHelper(release, filter, trim, single, root, clean, logger, out key); ParseAddHelper(release, filter, trim, single, root, clean, out key);
break; break;
case ItemType.Rom: case ItemType.Rom:
Rom rom = new Rom() Rom rom = new Rom()
@@ -1247,7 +1236,7 @@ namespace SabreTools.Helper.Dats
ItemStatus = status, ItemStatus = status,
}; };
ParseAddHelper(rom, filter, trim, single, root, clean, logger, out key); ParseAddHelper(rom, filter, trim, single, root, clean, out key);
break; break;
case ItemType.Sample: case ItemType.Sample:
Sample sample = new Sample() Sample sample = new Sample()
@@ -1261,7 +1250,7 @@ namespace SabreTools.Helper.Dats
}, },
}; };
ParseAddHelper(sample, filter, trim, single, root, clean, logger, out key); ParseAddHelper(sample, filter, trim, single, root, clean, out key);
break; break;
} }
} }
@@ -1277,7 +1266,6 @@ namespace SabreTools.Helper.Dats
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param> /// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
/// <param name="single">True if all games should be replaced by '!', false otherwise</param> /// <param name="single">True if all games should be replaced by '!', false otherwise</param>
/// <param name="root">String representing root directory to compare against for length calculation</param> /// <param name="root">String representing root directory to compare against for length calculation</param>
/// <param name="logger">Logger object for console and/or file output</param>
/// <param name="keep">True if full pathnames are to be kept, false otherwise (default)</param> /// <param name="keep">True if full pathnames are to be kept, false otherwise (default)</param>
/// <param name="clean">True if game names are sanitized, false otherwise (default)</param> /// <param name="clean">True if game names are sanitized, false otherwise (default)</param>
/// <param name="descAsName">True if SL XML names should be kept, false otherwise (default)</param> /// <param name="descAsName">True if SL XML names should be kept, false otherwise (default)</param>
@@ -1300,7 +1288,6 @@ namespace SabreTools.Helper.Dats
string root, string root,
// Miscellaneous // Miscellaneous
Logger logger,
bool keep, bool keep,
bool clean, bool clean,
bool descAsName) bool descAsName)
@@ -1314,7 +1301,7 @@ namespace SabreTools.Helper.Dats
List<string> parent = new List<string>(); List<string> parent = new List<string>();
Encoding enc = Style.GetEncoding(filename); Encoding enc = Style.GetEncoding(filename);
XmlReader xtr = FileTools.GetXmlTextReader(filename, logger); XmlReader xtr = FileTools.GetXmlTextReader(filename);
// If we got a null reader, just return // If we got a null reader, just return
if (xtr == null) if (xtr == null)
@@ -1338,14 +1325,14 @@ namespace SabreTools.Helper.Dats
Rom rom = new Rom("null", tempgame); Rom rom = new Rom("null", tempgame);
// Now process and add the rom // Now process and add the rom
ParseAddHelper(rom, filter, trim, single, root, clean, logger, out key); ParseAddHelper(rom, filter, trim, single, root, clean, out key);
} }
// Regardless, end the current folder // Regardless, end the current folder
int parentcount = parent.Count; int parentcount = parent.Count;
if (parentcount == 0) if (parentcount == 0)
{ {
logger.Verbose("Empty parent: " + String.Join("\\", parent)); Globals.Logger.Verbose("Empty parent: " + String.Join("\\", parent));
empty = true; empty = true;
} }
@@ -1884,7 +1871,7 @@ namespace SabreTools.Helper.Dats
}; };
// Now process and add the rom // Now process and add the rom
ParseAddHelper(olrom, filter, trim, single, root, clean, logger, out key); ParseAddHelper(olrom, filter, trim, single, root, clean, out key);
break; break;
// For Software List and MAME listxml only // For Software List and MAME listxml only
@@ -1986,7 +1973,7 @@ namespace SabreTools.Helper.Dats
}; };
// Now process and add the rom // Now process and add the rom
ParseAddHelper(relrom, filter, trim, single, root, clean, logger, out key); ParseAddHelper(relrom, filter, trim, single, root, clean, out key);
subreader.Read(); subreader.Read();
break; break;
@@ -2029,7 +2016,7 @@ namespace SabreTools.Helper.Dats
}; };
// Now process and add the rom // Now process and add the rom
ParseAddHelper(biosrom, filter, trim, single, root, clean, logger, out key); ParseAddHelper(biosrom, filter, trim, single, root, clean, out key);
subreader.Read(); subreader.Read();
break; break;
@@ -2057,7 +2044,7 @@ namespace SabreTools.Helper.Dats
}; };
// Now process and add the rom // Now process and add the rom
ParseAddHelper(archiverom, filter, trim, single, root, clean, logger, out key); ParseAddHelper(archiverom, filter, trim, single, root, clean, out key);
subreader.Read(); subreader.Read();
break; break;
@@ -2085,7 +2072,7 @@ namespace SabreTools.Helper.Dats
}; };
// Now process and add the rom // Now process and add the rom
ParseAddHelper(samplerom, filter, trim, single, root, clean, logger, out key); ParseAddHelper(samplerom, filter, trim, single, root, clean, out key);
subreader.Read(); subreader.Read();
break; break;
@@ -2104,13 +2091,13 @@ namespace SabreTools.Helper.Dats
} }
if (subreader.GetAttribute("flags") == "baddump" || subreader.GetAttribute("status") == "baddump") if (subreader.GetAttribute("flags") == "baddump" || subreader.GetAttribute("status") == "baddump")
{ {
logger.Verbose("Bad dump detected: " + Globals.Logger.Verbose("Bad dump detected: " +
(subreader.GetAttribute("name") != null && subreader.GetAttribute("name") != "" ? "\"" + xtr.GetAttribute("name") + "\"" : "ROM NAME NOT FOUND")); (subreader.GetAttribute("name") != null && subreader.GetAttribute("name") != "" ? "\"" + xtr.GetAttribute("name") + "\"" : "ROM NAME NOT FOUND"));
its = ItemStatus.BadDump; its = ItemStatus.BadDump;
} }
if (subreader.GetAttribute("flags") == "nodump" || subreader.GetAttribute("status") == "nodump") if (subreader.GetAttribute("flags") == "nodump" || subreader.GetAttribute("status") == "nodump")
{ {
logger.Verbose("Nodump detected: " + Globals.Logger.Verbose("Nodump detected: " +
(subreader.GetAttribute("name") != null && subreader.GetAttribute("name") != "" ? "\"" + xtr.GetAttribute("name") + "\"" : "ROM NAME NOT FOUND")); (subreader.GetAttribute("name") != null && subreader.GetAttribute("name") != "" ? "\"" + xtr.GetAttribute("name") + "\"" : "ROM NAME NOT FOUND"));
its = ItemStatus.Nodump; its = ItemStatus.Nodump;
} }
@@ -2232,7 +2219,7 @@ namespace SabreTools.Helper.Dats
} }
// Now process and add the rom // Now process and add the rom
ParseAddHelper(inrom, filter, trim, single, root, clean, logger, out key); ParseAddHelper(inrom, filter, trim, single, root, clean, out key);
subreader.Read(); subreader.Read();
break; break;
@@ -2297,12 +2284,12 @@ namespace SabreTools.Helper.Dats
its = ItemStatus.Good; its = ItemStatus.Good;
break; break;
case "baddump": case "baddump":
logger.Verbose("Bad dump detected: " + (xtr.GetAttribute("name") != null && xtr.GetAttribute("name") != "" ? Globals.Logger.Verbose("Bad dump detected: " + (xtr.GetAttribute("name") != null && xtr.GetAttribute("name") != "" ?
"\"" + xtr.GetAttribute("name") + "\"" : "ROM NAME NOT FOUND")); "\"" + xtr.GetAttribute("name") + "\"" : "ROM NAME NOT FOUND"));
its = ItemStatus.BadDump; its = ItemStatus.BadDump;
break; break;
case "nodump": case "nodump":
logger.Verbose("Nodump detected: " + (xtr.GetAttribute("name") != null && xtr.GetAttribute("name") != "" ? Globals.Logger.Verbose("Nodump detected: " + (xtr.GetAttribute("name") != null && xtr.GetAttribute("name") != "" ?
"\"" + xtr.GetAttribute("name") + "\"" : "ROM NAME NOT FOUND")); "\"" + xtr.GetAttribute("name") + "\"" : "ROM NAME NOT FOUND"));
its = ItemStatus.Nodump; its = ItemStatus.Nodump;
break; break;
@@ -2411,7 +2398,7 @@ namespace SabreTools.Helper.Dats
} }
// Now process and add the rom // Now process and add the rom
ParseAddHelper(rom, filter, trim, single, root, clean, logger, out key); ParseAddHelper(rom, filter, trim, single, root, clean, out key);
xtr.Read(); xtr.Read();
break; break;
@@ -2423,7 +2410,7 @@ namespace SabreTools.Helper.Dats
} }
catch (Exception ex) catch (Exception ex)
{ {
logger.Warning(ex.ToString()); Globals.Logger.Warning(ex.ToString());
// For XML errors, just skip the affected node // For XML errors, just skip the affected node
xtr?.Read(); xtr?.Read();
@@ -2442,7 +2429,6 @@ namespace SabreTools.Helper.Dats
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param> /// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
/// <param name="single">True if all games should be replaced by '!', false otherwise</param> /// <param name="single">True if all games should be replaced by '!', false otherwise</param>
/// <param name="root">String representing root directory to compare against for length calculation</param> /// <param name="root">String representing root directory to compare against for length calculation</param>
/// <param name="logger">Logger object for console and/or file output</param>
/// <param name="clean">True if game names are sanitized, false otherwise (default)</param> /// <param name="clean">True if game names are sanitized, false otherwise (default)</param>
private void ParseRedumpMD5( private void ParseRedumpMD5(
// Standard Dat parsing // Standard Dat parsing
@@ -2459,7 +2445,6 @@ namespace SabreTools.Helper.Dats
string root, string root,
// Miscellaneous // Miscellaneous
Logger logger,
bool clean) bool clean)
{ {
// Open a file reader // Open a file reader
@@ -2487,7 +2472,7 @@ namespace SabreTools.Helper.Dats
}; };
// Now process and add the rom // Now process and add the rom
ParseAddHelper(rom, filter, trim, single, root, clean, logger, out string key); ParseAddHelper(rom, filter, trim, single, root, clean, out string key);
} }
sr.Dispose(); sr.Dispose();
@@ -2503,7 +2488,6 @@ namespace SabreTools.Helper.Dats
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param> /// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
/// <param name="single">True if all games should be replaced by '!', false otherwise</param> /// <param name="single">True if all games should be replaced by '!', false otherwise</param>
/// <param name="root">String representing root directory to compare against for length calculation</param> /// <param name="root">String representing root directory to compare against for length calculation</param>
/// <param name="logger">Logger object for console and/or file output</param>
/// <param name="clean">True if game names are sanitized, false otherwise (default)</param> /// <param name="clean">True if game names are sanitized, false otherwise (default)</param>
private void ParseRedumpSFV( private void ParseRedumpSFV(
// Standard Dat parsing // Standard Dat parsing
@@ -2520,7 +2504,6 @@ namespace SabreTools.Helper.Dats
string root, string root,
// Miscellaneous // Miscellaneous
Logger logger,
bool clean) bool clean)
{ {
// Open a file reader // Open a file reader
@@ -2548,7 +2531,7 @@ namespace SabreTools.Helper.Dats
}; };
// Now process and add the rom // Now process and add the rom
ParseAddHelper(rom, filter, trim, single, root, clean, logger, out string key); ParseAddHelper(rom, filter, trim, single, root, clean, out string key);
} }
sr.Dispose(); sr.Dispose();
@@ -2564,7 +2547,6 @@ namespace SabreTools.Helper.Dats
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param> /// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
/// <param name="single">True if all games should be replaced by '!', false otherwise</param> /// <param name="single">True if all games should be replaced by '!', false otherwise</param>
/// <param name="root">String representing root directory to compare against for length calculation</param> /// <param name="root">String representing root directory to compare against for length calculation</param>
/// <param name="logger">Logger object for console and/or file output</param>
/// <param name="clean">True if game names are sanitized, false otherwise (default)</param> /// <param name="clean">True if game names are sanitized, false otherwise (default)</param>
private void ParseRedumpSHA1( private void ParseRedumpSHA1(
// Standard Dat parsing // Standard Dat parsing
@@ -2581,7 +2563,6 @@ namespace SabreTools.Helper.Dats
string root, string root,
// Miscellaneous // Miscellaneous
Logger logger,
bool clean) bool clean)
{ {
// Open a file reader // Open a file reader
@@ -2609,7 +2590,7 @@ namespace SabreTools.Helper.Dats
}; };
// Now process and add the rom // Now process and add the rom
ParseAddHelper(rom, filter, trim, single, root, clean, logger, out string key); ParseAddHelper(rom, filter, trim, single, root, clean, out string key);
} }
sr.Dispose(); sr.Dispose();
@@ -2625,7 +2606,6 @@ namespace SabreTools.Helper.Dats
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param> /// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
/// <param name="single">True if all games should be replaced by '!', false otherwise</param> /// <param name="single">True if all games should be replaced by '!', false otherwise</param>
/// <param name="root">String representing root directory to compare against for length calculation</param> /// <param name="root">String representing root directory to compare against for length calculation</param>
/// <param name="logger">Logger object for console and/or file output</param>
/// <param name="clean">True if game names are sanitized, false otherwise (default)</param> /// <param name="clean">True if game names are sanitized, false otherwise (default)</param>
private void ParseRedumpSHA256( private void ParseRedumpSHA256(
// Standard Dat parsing // Standard Dat parsing
@@ -2642,7 +2622,6 @@ namespace SabreTools.Helper.Dats
string root, string root,
// Miscellaneous // Miscellaneous
Logger logger,
bool clean) bool clean)
{ {
// Open a file reader // Open a file reader
@@ -2670,7 +2649,7 @@ namespace SabreTools.Helper.Dats
}; };
// Now process and add the rom // Now process and add the rom
ParseAddHelper(rom, filter, trim, single, root, clean, logger, out string key); ParseAddHelper(rom, filter, trim, single, root, clean, out string key);
} }
sr.Dispose(); sr.Dispose();
@@ -2686,7 +2665,6 @@ namespace SabreTools.Helper.Dats
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param> /// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
/// <param name="single">True if all games should be replaced by '!', false otherwise</param> /// <param name="single">True if all games should be replaced by '!', false otherwise</param>
/// <param name="root">String representing root directory to compare against for length calculation</param> /// <param name="root">String representing root directory to compare against for length calculation</param>
/// <param name="logger">Logger object for console and/or file output</param>
/// <param name="clean">True if game names are sanitized, false otherwise (default)</param> /// <param name="clean">True if game names are sanitized, false otherwise (default)</param>
private void ParseRedumpSHA384( private void ParseRedumpSHA384(
// Standard Dat parsing // Standard Dat parsing
@@ -2703,7 +2681,6 @@ namespace SabreTools.Helper.Dats
string root, string root,
// Miscellaneous // Miscellaneous
Logger logger,
bool clean) bool clean)
{ {
// Open a file reader // Open a file reader
@@ -2731,7 +2708,7 @@ namespace SabreTools.Helper.Dats
}; };
// Now process and add the rom // Now process and add the rom
ParseAddHelper(rom, filter, trim, single, root, clean, logger, out string key); ParseAddHelper(rom, filter, trim, single, root, clean, out string key);
} }
sr.Dispose(); sr.Dispose();
@@ -2747,7 +2724,6 @@ namespace SabreTools.Helper.Dats
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param> /// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
/// <param name="single">True if all games should be replaced by '!', false otherwise</param> /// <param name="single">True if all games should be replaced by '!', false otherwise</param>
/// <param name="root">String representing root directory to compare against for length calculation</param> /// <param name="root">String representing root directory to compare against for length calculation</param>
/// <param name="logger">Logger object for console and/or file output</param>
/// <param name="clean">True if game names are sanitized, false otherwise (default)</param> /// <param name="clean">True if game names are sanitized, false otherwise (default)</param>
private void ParseRedumpSHA512( private void ParseRedumpSHA512(
// Standard Dat parsing // Standard Dat parsing
@@ -2764,7 +2740,6 @@ namespace SabreTools.Helper.Dats
string root, string root,
// Miscellaneous // Miscellaneous
Logger logger,
bool clean) bool clean)
{ {
// Open a file reader // Open a file reader
@@ -2792,7 +2767,7 @@ namespace SabreTools.Helper.Dats
}; };
// Now process and add the rom // Now process and add the rom
ParseAddHelper(rom, filter, trim, single, root, clean, logger, out string key); ParseAddHelper(rom, filter, trim, single, root, clean, out string key);
} }
sr.Dispose(); sr.Dispose();
@@ -2808,7 +2783,6 @@ namespace SabreTools.Helper.Dats
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param> /// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
/// <param name="single">True if all games should be replaced by '!', false otherwise</param> /// <param name="single">True if all games should be replaced by '!', false otherwise</param>
/// <param name="root">String representing root directory to compare against for length calculation</param> /// <param name="root">String representing root directory to compare against for length calculation</param>
/// <param name="logger">Logger object for console and/or file output</param>
/// <param name="clean">True if game names are sanitized, false otherwise (default)</param> /// <param name="clean">True if game names are sanitized, false otherwise (default)</param>
/// <param name="descAsName">True if descriptions should be used as names, false otherwise (default)</param> /// <param name="descAsName">True if descriptions should be used as names, false otherwise (default)</param>
private void ParseRC( private void ParseRC(
@@ -2826,7 +2800,6 @@ namespace SabreTools.Helper.Dats
string root, string root,
// Miscellaneous // Miscellaneous
Logger logger,
bool clean, bool clean,
bool descAsName) bool descAsName)
{ {
@@ -2966,7 +2939,7 @@ namespace SabreTools.Helper.Dats
}; };
// Now process and add the rom // Now process and add the rom
ParseAddHelper(rom, filter, trim, single, root, clean, logger, out string key); ParseAddHelper(rom, filter, trim, single, root, clean, out string key);
} }
} }
} }
@@ -2982,22 +2955,21 @@ namespace SabreTools.Helper.Dats
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param> /// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
/// <param name="single">True if all games should be replaced by '!', false otherwise</param> /// <param name="single">True if all games should be replaced by '!', false otherwise</param>
/// <param name="root">String representing root directory to compare against for length calculation</param> /// <param name="root">String representing root directory to compare against for length calculation</param>
/// <param name="logger">Logger object for console and/or file output</param> private void ParseAddHelper(DatItem item, Filter filter, bool trim, bool single, string root, bool clean, out string key)
private void ParseAddHelper(DatItem item, Filter filter, bool trim, bool single, string root, bool clean, Logger logger, out string key)
{ {
key = ""; key = "";
// If there's no name in the rom, we log and skip it // If there's no name in the rom, we log and skip it
if (item.Name == null) if (item.Name == null)
{ {
logger.Warning("Rom with no name found! Skipping..."); Globals.Logger.Warning("Rom with no name found! Skipping...");
return; return;
} }
// If the name ends with a directory separator, we log and skip it (DOSCenter only?) // If the name ends with a directory separator, we log and skip it (DOSCenter only?)
if (item.Name.EndsWith("/") || item.Name.EndsWith("\\")) if (item.Name.EndsWith("/") || item.Name.EndsWith("\\"))
{ {
logger.Warning("Rom with directory separator found: '" + item.Name + "'. Skipping..."); Globals.Logger.Warning("Rom with directory separator found: '" + item.Name + "'. Skipping...");
return; return;
} }
@@ -3037,7 +3009,7 @@ namespace SabreTools.Helper.Dats
// If the file has no size and it's not the above case, skip and log // If the file has no size and it's not the above case, skip and log
else if (itemRom.ItemStatus != ItemStatus.Nodump && (itemRom.Size == 0 || itemRom.Size == -1)) else if (itemRom.ItemStatus != ItemStatus.Nodump && (itemRom.Size == 0 || itemRom.Size == -1))
{ {
logger.Verbose("Incomplete entry for \"" + itemRom.Name + "\" will be output as nodump"); Globals.Logger.Verbose("Incomplete entry for \"" + itemRom.Name + "\" will be output as nodump");
itemRom.ItemStatus = ItemStatus.Nodump; itemRom.ItemStatus = ItemStatus.Nodump;
} }
// If the file has a size but aboslutely no hashes, skip and log // If the file has a size but aboslutely no hashes, skip and log
@@ -3050,7 +3022,7 @@ namespace SabreTools.Helper.Dats
&& String.IsNullOrEmpty(itemRom.SHA384) && String.IsNullOrEmpty(itemRom.SHA384)
&& String.IsNullOrEmpty(itemRom.SHA512)) && String.IsNullOrEmpty(itemRom.SHA512))
{ {
logger.Verbose("Incomplete entry for \"" + itemRom.Name + "\" will be output as nodump"); Globals.Logger.Verbose("Incomplete entry for \"" + itemRom.Name + "\" will be output as nodump");
itemRom.ItemStatus = ItemStatus.Nodump; itemRom.ItemStatus = ItemStatus.Nodump;
} }
@@ -3075,7 +3047,7 @@ namespace SabreTools.Helper.Dats
&& String.IsNullOrEmpty(itemDisk.SHA384) && String.IsNullOrEmpty(itemDisk.SHA384)
&& String.IsNullOrEmpty(itemDisk.SHA512)) && String.IsNullOrEmpty(itemDisk.SHA512))
{ {
logger.Verbose("Incomplete entry for \"" + itemDisk.Name + "\" will be output as nodump"); Globals.Logger.Verbose("Incomplete entry for \"" + itemDisk.Name + "\" will be output as nodump");
itemDisk.ItemStatus = ItemStatus.Nodump; itemDisk.ItemStatus = ItemStatus.Nodump;
} }
@@ -3083,7 +3055,7 @@ namespace SabreTools.Helper.Dats
} }
// If the rom passes the filter, include it // If the rom passes the filter, include it
if (filter.ItemPasses(item, logger)) if (filter.ItemPasses(item))
{ {
// If we are in single game mode, rename all games // If we are in single game mode, rename all games
if (single) if (single)

View File

@@ -36,18 +36,16 @@ namespace SabreTools.Helper.Dats
/// <param name="romba">True if files should be output in Romba depot folders, false otherwise</param> /// <param name="romba">True if files should be output in Romba depot folders, false otherwise</param>
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param> /// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param> /// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <param name="logger">Logger object for file and console output</param>
/// <returns>True if rebuilding was a success, false otherwise</returns> /// <returns>True if rebuilding was a success, false otherwise</returns>
public bool RebuildDepot(List<string> inputs, string outDir, string tempDir, bool date, bool delete, public bool RebuildDepot(List<string> inputs, string outDir, string tempDir, bool date, bool delete,
bool inverse, OutputFormat outputFormat, bool romba, bool updateDat, string headerToCheckAgainst, bool inverse, OutputFormat outputFormat, bool romba, bool updateDat, string headerToCheckAgainst)
int maxDegreeOfParallelism, Logger logger)
{ {
#region Perform setup #region Perform setup
// If the DAT is not populated and inverse is not set, inform the user and quit // If the DAT is not populated and inverse is not set, inform the user and quit
if (Count == 0 && !inverse) if (Count == 0 && !inverse)
{ {
logger.User("No entries were found to rebuild, exiting..."); Globals.Logger.User("No entries were found to rebuild, exiting...");
return false; return false;
} }
@@ -100,28 +98,28 @@ namespace SabreTools.Helper.Dats
switch (outputFormat) switch (outputFormat)
{ {
case OutputFormat.Folder: case OutputFormat.Folder:
logger.User("Rebuilding all files to directory"); Globals.Logger.User("Rebuilding all files to directory");
break; break;
case OutputFormat.TapeArchive: case OutputFormat.TapeArchive:
logger.User("Rebuilding all files to TAR"); Globals.Logger.User("Rebuilding all files to TAR");
break; break;
case OutputFormat.Torrent7Zip: case OutputFormat.Torrent7Zip:
logger.User("Rebuilding all files to Torrent7Z"); Globals.Logger.User("Rebuilding all files to Torrent7Z");
break; break;
case OutputFormat.TorrentGzip: case OutputFormat.TorrentGzip:
logger.User("Rebuilding all files to TorrentGZ"); Globals.Logger.User("Rebuilding all files to TorrentGZ");
break; break;
case OutputFormat.TorrentLrzip: case OutputFormat.TorrentLrzip:
logger.User("Rebuilding all files to TorrentLRZ"); Globals.Logger.User("Rebuilding all files to TorrentLRZ");
break; break;
case OutputFormat.TorrentRar: case OutputFormat.TorrentRar:
logger.User("Rebuilding all files to TorrentRAR"); Globals.Logger.User("Rebuilding all files to TorrentRAR");
break; break;
case OutputFormat.TorrentXZ: case OutputFormat.TorrentXZ:
logger.User("Rebuilding all files to TorrentXZ"); Globals.Logger.User("Rebuilding all files to TorrentXZ");
break; break;
case OutputFormat.TorrentZip: case OutputFormat.TorrentZip:
logger.User("Rebuilding all files to TorrentZip"); Globals.Logger.User("Rebuilding all files to TorrentZip");
break; break;
} }
DateTime start = DateTime.Now; DateTime start = DateTime.Now;
@@ -133,7 +131,7 @@ namespace SabreTools.Helper.Dats
// Add to the list if the input is a directory // Add to the list if the input is a directory
if (Directory.Exists(input)) if (Directory.Exists(input))
{ {
logger.Verbose("Adding depot: '" + input + "'"); Globals.Logger.Verbose("Adding depot: '" + input + "'");
directories.Add(input); directories.Add(input);
} }
} }
@@ -145,7 +143,7 @@ namespace SabreTools.Helper.Dats
} }
// Now that we have a list of depots, we want to sort the input DAT by SHA-1 // Now that we have a list of depots, we want to sort the input DAT by SHA-1
BucketBy(SortedBy.SHA1, false /* mergeroms */, maxDegreeOfParallelism, logger); BucketBy(SortedBy.SHA1, false /* mergeroms */);
// Then we want to loop through each of the hashes and see if we can rebuild // Then we want to loop through each of the hashes and see if we can rebuild
List<string> hashes = Keys.ToList(); List<string> hashes = Keys.ToList();
@@ -157,7 +155,7 @@ namespace SabreTools.Helper.Dats
continue; continue;
} }
logger.User("Checking hash '" + hash + "'"); Globals.Logger.User("Checking hash '" + hash + "'");
// Get the extension path for the hash // Get the extension path for the hash
string subpath = Style.GetRombaPath(hash); string subpath = Style.GetRombaPath(hash);
@@ -180,7 +178,7 @@ namespace SabreTools.Helper.Dats
} }
// If we have a path, we want to try to get the rom information // If we have a path, we want to try to get the rom information
Rom fileinfo = ArchiveTools.GetTorrentGZFileInfo(foundpath, logger); Rom fileinfo = ArchiveTools.GetTorrentGZFileInfo(foundpath);
// If the file information is null, then we continue // If the file information is null, then we continue
if (fileinfo == null) if (fileinfo == null)
@@ -190,10 +188,10 @@ namespace SabreTools.Helper.Dats
// Otherwise, we rebuild that file to all locations that we need to // Otherwise, we rebuild that file to all locations that we need to
RebuildIndividualFile(fileinfo, foundpath, outDir, tempDir, date, inverse, outputFormat, romba, RebuildIndividualFile(fileinfo, foundpath, outDir, tempDir, date, inverse, outputFormat, romba,
updateDat, true /*isZip*/, headerToCheckAgainst, maxDegreeOfParallelism, logger); updateDat, true /*isZip*/, headerToCheckAgainst);
} }
logger.User("Rebuilding complete in: " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff")); Globals.Logger.User("Rebuilding complete in: " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
#endregion #endregion
@@ -203,7 +201,7 @@ namespace SabreTools.Helper.Dats
_fileName = "fixDAT_" + _fileName; _fileName = "fixDAT_" + _fileName;
_name = "fixDAT_" + _name; _name = "fixDAT_" + _name;
_description = "fixDAT_" + _description; _description = "fixDAT_" + _description;
WriteToFile(outDir, maxDegreeOfParallelism, logger); WriteToFile(outDir);
} }
return success; return success;
@@ -224,18 +222,17 @@ namespace SabreTools.Helper.Dats
/// <param name="archiveScanLevel">ArchiveScanLevel representing the archive handling levels</param> /// <param name="archiveScanLevel">ArchiveScanLevel representing the archive handling levels</param>
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param> /// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param> /// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <param name="logger">Logger object for file and console output</param>
/// <returns>True if rebuilding was a success, false otherwise</returns> /// <returns>True if rebuilding was a success, false otherwise</returns>
public bool RebuildGeneric(List<string> inputs, string outDir, string tempDir, bool quickScan, bool date, public bool RebuildGeneric(List<string> inputs, string outDir, string tempDir, bool quickScan, bool date,
bool delete, bool inverse, OutputFormat outputFormat, bool romba, ArchiveScanLevel archiveScanLevel, bool updateDat, bool delete, bool inverse, OutputFormat outputFormat, bool romba, ArchiveScanLevel archiveScanLevel, bool updateDat,
string headerToCheckAgainst, int maxDegreeOfParallelism, Logger logger) string headerToCheckAgainst)
{ {
#region Perform setup #region Perform setup
// If the DAT is not populated and inverse is not set, inform the user and quit // If the DAT is not populated and inverse is not set, inform the user and quit
if (Count == 0 && !inverse) if (Count == 0 && !inverse)
{ {
logger.User("No entries were found to rebuild, exiting..."); Globals.Logger.User("No entries were found to rebuild, exiting...");
return false; return false;
} }
@@ -288,28 +285,28 @@ namespace SabreTools.Helper.Dats
switch (outputFormat) switch (outputFormat)
{ {
case OutputFormat.Folder: case OutputFormat.Folder:
logger.User("Rebuilding all files to directory"); Globals.Logger.User("Rebuilding all files to directory");
break; break;
case OutputFormat.TapeArchive: case OutputFormat.TapeArchive:
logger.User("Rebuilding all files to TAR"); Globals.Logger.User("Rebuilding all files to TAR");
break; break;
case OutputFormat.Torrent7Zip: case OutputFormat.Torrent7Zip:
logger.User("Rebuilding all files to Torrent7Z"); Globals.Logger.User("Rebuilding all files to Torrent7Z");
break; break;
case OutputFormat.TorrentGzip: case OutputFormat.TorrentGzip:
logger.User("Rebuilding all files to TorrentGZ"); Globals.Logger.User("Rebuilding all files to TorrentGZ");
break; break;
case OutputFormat.TorrentLrzip: case OutputFormat.TorrentLrzip:
logger.User("Rebuilding all files to TorrentLRZ"); Globals.Logger.User("Rebuilding all files to TorrentLRZ");
break; break;
case OutputFormat.TorrentRar: case OutputFormat.TorrentRar:
logger.User("Rebuilding all files to TorrentRAR"); Globals.Logger.User("Rebuilding all files to TorrentRAR");
break; break;
case OutputFormat.TorrentXZ: case OutputFormat.TorrentXZ:
logger.User("Rebuilding all files to TorrentXZ"); Globals.Logger.User("Rebuilding all files to TorrentXZ");
break; break;
case OutputFormat.TorrentZip: case OutputFormat.TorrentZip:
logger.User("Rebuilding all files to TorrentZip"); Globals.Logger.User("Rebuilding all files to TorrentZip");
break; break;
} }
DateTime start = DateTime.Now; DateTime start = DateTime.Now;
@@ -320,25 +317,25 @@ namespace SabreTools.Helper.Dats
// If the input is a file // If the input is a file
if (File.Exists(input)) if (File.Exists(input))
{ {
logger.User("Checking file: '" + input + "'"); Globals.Logger.User("Checking file: '" + input + "'");
RebuildGenericHelper(input, outDir, tempDir, quickScan, date, delete, inverse, RebuildGenericHelper(input, outDir, tempDir, quickScan, date, delete, inverse,
outputFormat, romba, archiveScanLevel, updateDat, headerToCheckAgainst, maxDegreeOfParallelism, logger); outputFormat, romba, archiveScanLevel, updateDat, headerToCheckAgainst);
} }
// If the input is a directory // If the input is a directory
else if (Directory.Exists(input)) else if (Directory.Exists(input))
{ {
logger.Verbose("Checking directory: '" + input + "'"); Globals.Logger.Verbose("Checking directory: '" + input + "'");
foreach (string file in Directory.EnumerateFiles(input, "*", SearchOption.AllDirectories)) foreach (string file in Directory.EnumerateFiles(input, "*", SearchOption.AllDirectories))
{ {
logger.User("Checking file: '" + file + "'"); Globals.Logger.User("Checking file: '" + file + "'");
RebuildGenericHelper(file, outDir, tempDir, quickScan, date, delete, inverse, RebuildGenericHelper(file, outDir, tempDir, quickScan, date, delete, inverse,
outputFormat, romba, archiveScanLevel, updateDat, headerToCheckAgainst, maxDegreeOfParallelism, logger); outputFormat, romba, archiveScanLevel, updateDat, headerToCheckAgainst);
} }
} }
} }
logger.User("Rebuilding complete in: " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff")); Globals.Logger.User("Rebuilding complete in: " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
#endregion #endregion
@@ -348,7 +345,7 @@ namespace SabreTools.Helper.Dats
_fileName = "fixDAT_" + _fileName; _fileName = "fixDAT_" + _fileName;
_name = "fixDAT_" + _name; _name = "fixDAT_" + _name;
_description = "fixDAT_" + _description; _description = "fixDAT_" + _description;
WriteToFile(outDir, maxDegreeOfParallelism, logger); WriteToFile(outDir);
} }
return success; return success;
@@ -369,10 +366,9 @@ namespace SabreTools.Helper.Dats
/// <param name="archiveScanLevel">ArchiveScanLevel representing the archive handling levels</param> /// <param name="archiveScanLevel">ArchiveScanLevel representing the archive handling levels</param>
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param> /// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param> /// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <param name="logger">Logger object for file and console output</param>
private void RebuildGenericHelper(string file, string outDir, string tempDir, bool quickScan, bool date, private void RebuildGenericHelper(string file, string outDir, string tempDir, bool quickScan, bool date,
bool delete, bool inverse, OutputFormat outputFormat, bool romba, ArchiveScanLevel archiveScanLevel, bool updateDat, bool delete, bool inverse, OutputFormat outputFormat, bool romba, ArchiveScanLevel archiveScanLevel, bool updateDat,
string headerToCheckAgainst, int maxDegreeOfParallelism, Logger logger) string headerToCheckAgainst)
{ {
// If we somehow have a null filename, return // If we somehow have a null filename, return
if (file == null) if (file == null)
@@ -388,15 +384,15 @@ namespace SabreTools.Helper.Dats
bool usedInternally = false; bool usedInternally = false;
// Get the required scanning level for the file // Get the required scanning level for the file
ArchiveTools.GetInternalExternalProcess(file, archiveScanLevel, logger, out bool shouldExternalProcess, out bool shouldInternalProcess); ArchiveTools.GetInternalExternalProcess(file, archiveScanLevel, out bool shouldExternalProcess, out bool shouldInternalProcess);
// If we're supposed to scan the file externally // If we're supposed to scan the file externally
if (shouldExternalProcess) if (shouldExternalProcess)
{ {
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
Rom rom = FileTools.GetFileInfo(file, logger, omitFromScan: (quickScan ? Hash.SecureHashes : Hash.DeepHashes), header: headerToCheckAgainst); Rom rom = FileTools.GetFileInfo(file, omitFromScan: (quickScan ? Hash.SecureHashes : Hash.DeepHashes), header: headerToCheckAgainst);
usedExternally = RebuildIndividualFile(rom, file, outDir, tempSubDir, date, inverse, outputFormat, usedExternally = RebuildIndividualFile(rom, file, outDir, tempSubDir, date, inverse, outputFormat,
romba, updateDat, false /* isZip */, headerToCheckAgainst, maxDegreeOfParallelism, logger); romba, updateDat, false /* isZip */, headerToCheckAgainst);
} }
// If we're supposed to scan the file internally // If we're supposed to scan the file internally
@@ -405,42 +401,42 @@ namespace SabreTools.Helper.Dats
// If quickscan is set, do so // If quickscan is set, do so
if (quickScan) if (quickScan)
{ {
List<Rom> extracted = ArchiveTools.GetArchiveFileInfo(file, logger); List<Rom> extracted = ArchiveTools.GetArchiveFileInfo(file);
usedInternally = true; usedInternally = true;
foreach (Rom rom in extracted) foreach (Rom rom in extracted)
{ {
usedInternally &= RebuildIndividualFile(rom, file, outDir, tempSubDir, date, inverse, outputFormat, usedInternally &= RebuildIndividualFile(rom, file, outDir, tempSubDir, date, inverse, outputFormat,
romba, updateDat, true /* isZip */, headerToCheckAgainst, maxDegreeOfParallelism, logger); romba, updateDat, true /* isZip */, headerToCheckAgainst);
} }
} }
// Otherwise, attempt to extract the files to the temporary directory // Otherwise, attempt to extract the files to the temporary directory
else else
{ {
bool encounteredErrors = ArchiveTools.ExtractArchive(file, tempSubDir, archiveScanLevel, logger); bool encounteredErrors = ArchiveTools.ExtractArchive(file, tempSubDir, archiveScanLevel);
// If the file was an archive and was extracted successfully, check it // If the file was an archive and was extracted successfully, check it
if (!encounteredErrors) if (!encounteredErrors)
{ {
usedInternally = true; usedInternally = true;
logger.Verbose(Path.GetFileName(file) + " treated like an archive"); Globals.Logger.Verbose(Path.GetFileName(file) + " treated like an archive");
List<string> extracted = Directory.EnumerateFiles(tempSubDir, "*", SearchOption.AllDirectories).ToList(); List<string> extracted = Directory.EnumerateFiles(tempSubDir, "*", SearchOption.AllDirectories).ToList();
foreach (string entry in extracted) foreach (string entry in extracted)
{ {
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
Rom rom = FileTools.GetFileInfo(entry, logger, omitFromScan: (quickScan ? Hash.SecureHashes : Hash.DeepHashes)); Rom rom = FileTools.GetFileInfo(entry, omitFromScan: (quickScan ? Hash.SecureHashes : Hash.DeepHashes));
usedInternally &= RebuildIndividualFile(rom, entry, outDir, tempSubDir, date, inverse, outputFormat, usedInternally &= RebuildIndividualFile(rom, entry, outDir, tempSubDir, date, inverse, outputFormat,
romba, updateDat, false /* isZip */, headerToCheckAgainst, maxDegreeOfParallelism, logger); romba, updateDat, false /* isZip */, headerToCheckAgainst);
} }
} }
// Otherwise, just get the info on the file itself // Otherwise, just get the info on the file itself
else if (File.Exists(file)) else if (File.Exists(file))
{ {
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
Rom rom = FileTools.GetFileInfo(file, logger, omitFromScan: (quickScan ? Hash.SecureHashes : Hash.DeepHashes)); Rom rom = FileTools.GetFileInfo(file, omitFromScan: (quickScan ? Hash.SecureHashes : Hash.DeepHashes));
usedExternally = RebuildIndividualFile(rom, file, outDir, tempSubDir, date, inverse, outputFormat, usedExternally = RebuildIndividualFile(rom, file, outDir, tempSubDir, date, inverse, outputFormat,
romba, updateDat, false /* isZip */, headerToCheckAgainst, maxDegreeOfParallelism, logger); romba, updateDat, false /* isZip */, headerToCheckAgainst);
} }
} }
} }
@@ -450,13 +446,13 @@ namespace SabreTools.Helper.Dats
{ {
try try
{ {
logger.Verbose("Attempting to delete input file '" + file + "'"); Globals.Logger.Verbose("Attempting to delete input file '" + file + "'");
File.Delete(file); File.Delete(file);
logger.Verbose("File '" + file + "' deleted"); Globals.Logger.Verbose("File '" + file + "' deleted");
} }
catch (Exception ex) catch (Exception ex)
{ {
logger.Error("An error occurred while trying to delete '" + file + "' " + ex.ToString()); Globals.Logger.Error("An error occurred while trying to delete '" + file + "' " + ex.ToString());
} }
} }
@@ -482,27 +478,24 @@ namespace SabreTools.Helper.Dats
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param> /// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
/// <param name="isZip">True if the input file is an archive, false otherwise</param> /// <param name="isZip">True if the input file is an archive, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param> /// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logger object for file and console output</param>
/// <returns>True if the file was able to be rebuilt, false otherwise</returns> /// <returns>True if the file was able to be rebuilt, false otherwise</returns>
/// <remarks> /// <remarks>
/// TODO: If going from a TGZ file to a TGZ file, don't extract, just copy /// TODO: If going from a TGZ file to a TGZ file, don't extract, just copy
/// </remarks> /// </remarks>
private bool RebuildIndividualFile(Rom rom, string file, string outDir, string tempDir, bool date, private bool RebuildIndividualFile(Rom rom, string file, string outDir, string tempDir, bool date,
bool inverse, OutputFormat outputFormat, bool romba, bool updateDat, bool isZip, string headerToCheckAgainst, bool inverse, OutputFormat outputFormat, bool romba, bool updateDat, bool isZip, string headerToCheckAgainst)
int maxDegreeOfParallelism, Logger logger)
{ {
// Set the output value // Set the output value
bool rebuilt = false; bool rebuilt = false;
// Find if the file has duplicates in the DAT // Find if the file has duplicates in the DAT
bool hasDuplicates = rom.HasDuplicates(this, maxDegreeOfParallelism, logger); bool hasDuplicates = rom.HasDuplicates(this);
// If it has duplicates and we're not filtering, rebuild it // If it has duplicates and we're not filtering, rebuild it
if (hasDuplicates && !inverse) if (hasDuplicates && !inverse)
{ {
// Get the list of duplicates to rebuild to // Get the list of duplicates to rebuild to
List<DatItem> dupes = rom.GetDuplicates(this, maxDegreeOfParallelism, logger, remove: updateDat); List<DatItem> dupes = rom.GetDuplicates(this, remove: updateDat);
// If we don't have any duplicates, continue // If we don't have any duplicates, continue
if (dupes.Count == 0) if (dupes.Count == 0)
@@ -514,7 +507,7 @@ namespace SabreTools.Helper.Dats
if (isZip) if (isZip)
{ {
// Otherwise, extract the file to the temp folder // Otherwise, extract the file to the temp folder
file = ArchiveTools.ExtractItem(file, rom.Name, tempDir, logger); file = ArchiveTools.ExtractItem(file, rom.Name, tempDir);
} }
// If we couldn't extract the file, then continue, // If we couldn't extract the file, then continue,
@@ -523,7 +516,7 @@ namespace SabreTools.Helper.Dats
return rebuilt; return rebuilt;
} }
logger.User("Matches found for '" + Style.GetFileName(file) + "', rebuilding accordingly..."); Globals.Logger.User("Matches found for '" + Style.GetFileName(file) + "', rebuilding accordingly...");
rebuilt = true; rebuilt = true;
// Now loop through the list and rebuild accordingly // Now loop through the list and rebuild accordingly
@@ -555,13 +548,13 @@ namespace SabreTools.Helper.Dats
break; break;
case OutputFormat.TapeArchive: case OutputFormat.TapeArchive:
rebuilt &= ArchiveTools.WriteTAR(file, outDir, item, logger, date: date); rebuilt &= ArchiveTools.WriteTAR(file, outDir, item, date: date);
break; break;
case OutputFormat.Torrent7Zip: case OutputFormat.Torrent7Zip:
rebuilt &= ArchiveTools.WriteTorrent7Zip(file, outDir, item, logger, date: date); rebuilt &= ArchiveTools.WriteTorrent7Zip(file, outDir, item, date: date);
break; break;
case OutputFormat.TorrentGzip: case OutputFormat.TorrentGzip:
rebuilt &= ArchiveTools.WriteTorrentGZ(file, outDir, romba, logger); rebuilt &= ArchiveTools.WriteTorrentGZ(file, outDir, romba);
break; break;
case OutputFormat.TorrentLrzip: case OutputFormat.TorrentLrzip:
break; break;
@@ -570,7 +563,7 @@ namespace SabreTools.Helper.Dats
case OutputFormat.TorrentXZ: case OutputFormat.TorrentXZ:
break; break;
case OutputFormat.TorrentZip: case OutputFormat.TorrentZip:
rebuilt &= ArchiveTools.WriteTorrentZip(file, outDir, item, logger, date: date); rebuilt &= ArchiveTools.WriteTorrentZip(file, outDir, item, date: date);
break; break;
} }
} }
@@ -586,7 +579,7 @@ namespace SabreTools.Helper.Dats
{ {
// Otherwise, extract the file to the temp folder // Otherwise, extract the file to the temp folder
machinename = Style.GetFileNameWithoutExtension(file); machinename = Style.GetFileNameWithoutExtension(file);
file = ArchiveTools.ExtractItem(file, rom.Name, tempDir, logger); file = ArchiveTools.ExtractItem(file, rom.Name, tempDir);
} }
// If we couldn't extract the file, then continue, // If we couldn't extract the file, then continue,
@@ -596,7 +589,7 @@ namespace SabreTools.Helper.Dats
} }
// Get the item from the current file // Get the item from the current file
Rom item = FileTools.GetFileInfo(file, logger); Rom item = FileTools.GetFileInfo(file);
item.Machine = new Machine() item.Machine = new Machine()
{ {
Name = Style.GetFileNameWithoutExtension(item.Name), Name = Style.GetFileNameWithoutExtension(item.Name),
@@ -610,7 +603,7 @@ namespace SabreTools.Helper.Dats
item.Machine.Description = machinename; item.Machine.Description = machinename;
} }
logger.User("No matches found for '" + Style.GetFileName(file) + "', rebuilding accordingly from inverse flag..."); Globals.Logger.User("No matches found for '" + Style.GetFileName(file) + "', rebuilding accordingly from inverse flag...");
// Now rebuild to the output file // Now rebuild to the output file
switch (outputFormat) switch (outputFormat)
@@ -639,13 +632,13 @@ namespace SabreTools.Helper.Dats
break; break;
case OutputFormat.TapeArchive: case OutputFormat.TapeArchive:
rebuilt &= ArchiveTools.WriteTAR(file, outDir, item, logger, date: date); rebuilt &= ArchiveTools.WriteTAR(file, outDir, item, date: date);
break; break;
case OutputFormat.Torrent7Zip: case OutputFormat.Torrent7Zip:
rebuilt &= ArchiveTools.WriteTorrent7Zip(file, outDir, item, logger, date: date); rebuilt &= ArchiveTools.WriteTorrent7Zip(file, outDir, item, date: date);
break; break;
case OutputFormat.TorrentGzip: case OutputFormat.TorrentGzip:
rebuilt &= ArchiveTools.WriteTorrentGZ(file, outDir, romba, logger); rebuilt &= ArchiveTools.WriteTorrentGZ(file, outDir, romba);
break; break;
case OutputFormat.TorrentLrzip: case OutputFormat.TorrentLrzip:
break; break;
@@ -654,7 +647,7 @@ namespace SabreTools.Helper.Dats
case OutputFormat.TorrentXZ: case OutputFormat.TorrentXZ:
break; break;
case OutputFormat.TorrentZip: case OutputFormat.TorrentZip:
rebuilt &= ArchiveTools.WriteTorrentZip(file, outDir, item, logger, date: date); rebuilt &= ArchiveTools.WriteTorrentZip(file, outDir, item, date: date);
break; break;
} }
} }
@@ -663,25 +656,25 @@ namespace SabreTools.Helper.Dats
if (headerToCheckAgainst != null) if (headerToCheckAgainst != null)
{ {
// Check to see if we have a matching header first // Check to see if we have a matching header first
SkipperRule rule = Skipper.GetMatchingRule(file, Path.GetFileNameWithoutExtension(headerToCheckAgainst), logger); SkipperRule rule = Skipper.GetMatchingRule(file, Path.GetFileNameWithoutExtension(headerToCheckAgainst));
// If there's a match, create the new file to write // If there's a match, create the new file to write
if (rule.Tests != null && rule.Tests.Count != 0) if (rule.Tests != null && rule.Tests.Count != 0)
{ {
// If the file could be transformed correctly // If the file could be transformed correctly
if (rule.TransformFile(file, file + ".new", logger)) if (rule.TransformFile(file, file + ".new"))
{ {
// Get the file informations that we will be using // Get the file informations that we will be using
Rom headerless = FileTools.GetFileInfo(file + ".new", logger); Rom headerless = FileTools.GetFileInfo(file + ".new");
// Find if the file has duplicates in the DAT // Find if the file has duplicates in the DAT
hasDuplicates = headerless.HasDuplicates(this, maxDegreeOfParallelism, logger); hasDuplicates = headerless.HasDuplicates(this);
// If it has duplicates and we're not filtering, rebuild it // If it has duplicates and we're not filtering, rebuild it
if (hasDuplicates && !inverse) if (hasDuplicates && !inverse)
{ {
// Get the list of duplicates to rebuild to // Get the list of duplicates to rebuild to
List<DatItem> dupes = headerless.GetDuplicates(this, maxDegreeOfParallelism, logger, remove: updateDat); List<DatItem> dupes = headerless.GetDuplicates(this, remove: updateDat);
// If we don't have any duplicates, continue // If we don't have any duplicates, continue
if (dupes.Count == 0) if (dupes.Count == 0)
@@ -689,7 +682,7 @@ namespace SabreTools.Helper.Dats
return rebuilt; return rebuilt;
} }
logger.User("Headerless matches found for '" + Style.GetFileName(file) + "', rebuilding accordingly..."); Globals.Logger.User("Headerless matches found for '" + Style.GetFileName(file) + "', rebuilding accordingly...");
rebuilt = true; rebuilt = true;
// Now loop through the list and rebuild accordingly // Now loop through the list and rebuild accordingly
@@ -740,16 +733,16 @@ namespace SabreTools.Helper.Dats
break; break;
case OutputFormat.TapeArchive: case OutputFormat.TapeArchive:
rebuilt &= ArchiveTools.WriteTAR(file + ".new", outDir, item, logger, date: date); rebuilt &= ArchiveTools.WriteTAR(file + ".new", outDir, item, date: date);
rebuilt &= ArchiveTools.WriteTAR(file, outDir, rom, logger, date: date); rebuilt &= ArchiveTools.WriteTAR(file, outDir, rom, date: date);
break; break;
case OutputFormat.Torrent7Zip: case OutputFormat.Torrent7Zip:
rebuilt &= ArchiveTools.WriteTorrent7Zip(file + ".new", outDir, item, logger, date: date); rebuilt &= ArchiveTools.WriteTorrent7Zip(file + ".new", outDir, item, date: date);
rebuilt &= ArchiveTools.WriteTorrent7Zip(file, outDir, rom, logger, date: date); rebuilt &= ArchiveTools.WriteTorrent7Zip(file, outDir, rom, date: date);
break; break;
case OutputFormat.TorrentGzip: case OutputFormat.TorrentGzip:
rebuilt &= ArchiveTools.WriteTorrentGZ(file + ".new", outDir, romba, logger); rebuilt &= ArchiveTools.WriteTorrentGZ(file + ".new", outDir, romba);
rebuilt &= ArchiveTools.WriteTorrentGZ(file, outDir, romba, logger); rebuilt &= ArchiveTools.WriteTorrentGZ(file, outDir, romba);
break; break;
case OutputFormat.TorrentLrzip: case OutputFormat.TorrentLrzip:
break; break;
@@ -758,8 +751,8 @@ namespace SabreTools.Helper.Dats
case OutputFormat.TorrentXZ: case OutputFormat.TorrentXZ:
break; break;
case OutputFormat.TorrentZip: case OutputFormat.TorrentZip:
rebuilt &= ArchiveTools.WriteTorrentZip(file + ".new", outDir, item, logger, date: date); rebuilt &= ArchiveTools.WriteTorrentZip(file + ".new", outDir, item, date: date);
rebuilt &= ArchiveTools.WriteTorrentZip(file, outDir, rom, logger, date: date); rebuilt &= ArchiveTools.WriteTorrentZip(file, outDir, rom, date: date);
break; break;
} }
} }
@@ -789,10 +782,8 @@ namespace SabreTools.Helper.Dats
/// <param name="hashOnly">True if only hashes should be checked, false for full file information</param> /// <param name="hashOnly">True if only hashes should be checked, false for full file information</param>
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param> /// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param> /// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logger object for file and console output</param>
/// <returns>True if verification was a success, false otherwise</returns> /// <returns>True if verification was a success, false otherwise</returns>
public bool VerifyDepot(List<string> inputs, string tempDir, string headerToCheckAgainst, int maxDegreeOfParallelism, Logger logger) public bool VerifyDepot(List<string> inputs, string tempDir, string headerToCheckAgainst)
{ {
// Check the temp directory // Check the temp directory
if (String.IsNullOrEmpty(tempDir)) if (String.IsNullOrEmpty(tempDir))
@@ -812,7 +803,7 @@ namespace SabreTools.Helper.Dats
bool success = true; bool success = true;
logger.User("Verifying all from supplied depots"); Globals.Logger.User("Verifying all from supplied depots");
DateTime start = DateTime.Now; DateTime start = DateTime.Now;
// Now loop through and get only directories from the input paths // Now loop through and get only directories from the input paths
@@ -822,7 +813,7 @@ namespace SabreTools.Helper.Dats
// Add to the list if the input is a directory // Add to the list if the input is a directory
if (Directory.Exists(input)) if (Directory.Exists(input))
{ {
logger.Verbose("Adding depot: '" + input + "'"); Globals.Logger.Verbose("Adding depot: '" + input + "'");
directories.Add(input); directories.Add(input);
} }
} }
@@ -834,7 +825,7 @@ namespace SabreTools.Helper.Dats
} }
// Now that we have a list of depots, we want to sort the input DAT by SHA-1 // Now that we have a list of depots, we want to sort the input DAT by SHA-1
BucketBy(SortedBy.SHA1, false /* mergeroms */, maxDegreeOfParallelism, logger); BucketBy(SortedBy.SHA1, false /* mergeroms */);
// Then we want to loop through each of the hashes and see if we can rebuild // Then we want to loop through each of the hashes and see if we can rebuild
List<string> hashes = Keys.ToList(); List<string> hashes = Keys.ToList();
@@ -846,7 +837,7 @@ namespace SabreTools.Helper.Dats
continue; continue;
} }
logger.User("Checking hash '" + hash + "'"); Globals.Logger.User("Checking hash '" + hash + "'");
// Get the extension path for the hash // Get the extension path for the hash
string subpath = Style.GetRombaPath(hash); string subpath = Style.GetRombaPath(hash);
@@ -869,7 +860,7 @@ namespace SabreTools.Helper.Dats
} }
// If we have a path, we want to try to get the rom information // If we have a path, we want to try to get the rom information
Rom fileinfo = ArchiveTools.GetTorrentGZFileInfo(foundpath, logger); Rom fileinfo = ArchiveTools.GetTorrentGZFileInfo(foundpath);
// If the file information is null, then we continue // If the file information is null, then we continue
if (fileinfo == null) if (fileinfo == null)
@@ -878,16 +869,16 @@ namespace SabreTools.Helper.Dats
} }
// Now we want to remove all duplicates from the DAT // Now we want to remove all duplicates from the DAT
fileinfo.GetDuplicates(this, maxDegreeOfParallelism, logger, remove: true); fileinfo.GetDuplicates(this, remove: true);
} }
logger.User("Verifying complete in: " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff")); Globals.Logger.User("Verifying complete in: " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
// If there are any entries in the DAT, output to the rebuild directory // If there are any entries in the DAT, output to the rebuild directory
_fileName = "fixDAT_" + _fileName; _fileName = "fixDAT_" + _fileName;
_name = "fixDAT_" + _name; _name = "fixDAT_" + _name;
_description = "fixDAT_" + _description; _description = "fixDAT_" + _description;
WriteToFile(null, maxDegreeOfParallelism, logger); WriteToFile(null);
return success; return success;
} }
@@ -900,11 +891,9 @@ namespace SabreTools.Helper.Dats
/// <param name="hashOnly">True if only hashes should be checked, false for full file information</param> /// <param name="hashOnly">True if only hashes should be checked, false for full file information</param>
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param> /// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param> /// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logger object for file and console output</param>
/// <returns>True if verification was a success, false otherwise</returns> /// <returns>True if verification was a success, false otherwise</returns>
public bool VerifyGeneric(List<string> inputs, string tempDir, bool hashOnly, bool quickScan, public bool VerifyGeneric(List<string> inputs, string tempDir, bool hashOnly, bool quickScan,
string headerToCheckAgainst, int maxDegreeOfParallelism, Logger logger) string headerToCheckAgainst)
{ {
// Check the temp directory exists // Check the temp directory exists
if (String.IsNullOrEmpty(tempDir)) if (String.IsNullOrEmpty(tempDir))
@@ -926,13 +915,13 @@ namespace SabreTools.Helper.Dats
bool success = true; bool success = true;
// Then, loop through and check each of the inputs // Then, loop through and check each of the inputs
logger.User("Processing files:\n"); Globals.Logger.User("Processing files:\n");
foreach (string input in inputs) foreach (string input in inputs)
{ {
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
PopulateFromDir(input, (quickScan ? Hash.SecureHashes : Hash.DeepHashes) /* omitFromScan */, true /* bare */, false /* archivesAsFiles */, PopulateFromDir(input, (quickScan ? Hash.SecureHashes : Hash.DeepHashes) /* omitFromScan */, true /* bare */, false /* archivesAsFiles */,
true /* enableGzip */, false /* addBlanks */, false /* addDate */, tempDir /* tempDir */, false /* copyFiles */, true /* enableGzip */, false /* addBlanks */, false /* addDate */, tempDir /* tempDir */, false /* copyFiles */,
headerToCheckAgainst, 4 /* maxDegreeOfParallelism */, logger); headerToCheckAgainst);
} }
// Setup the fixdat // Setup the fixdat
@@ -947,7 +936,7 @@ namespace SabreTools.Helper.Dats
if (hashOnly) if (hashOnly)
{ {
// First we need to sort by hash to get duplicates // First we need to sort by hash to get duplicates
BucketBy(SortedBy.SHA1, false /* mergeroms */, maxDegreeOfParallelism, logger); BucketBy(SortedBy.SHA1, false /* mergeroms */);
// Then follow the same tactics as before // Then follow the same tactics as before
foreach (string key in Keys) foreach (string key in Keys)
@@ -971,7 +960,7 @@ namespace SabreTools.Helper.Dats
foreach (string key in Keys) foreach (string key in Keys)
{ {
List<DatItem> roms = this[key]; List<DatItem> roms = this[key];
List<DatItem> newroms = DatItem.Merge(roms, logger); List<DatItem> newroms = DatItem.Merge(roms);
foreach (Rom rom in newroms) foreach (Rom rom in newroms)
{ {
if (rom.SourceID == 99) if (rom.SourceID == 99)
@@ -983,7 +972,7 @@ namespace SabreTools.Helper.Dats
} }
// Now output the fixdat to the main folder // Now output the fixdat to the main folder
success &= matched.WriteToFile("", maxDegreeOfParallelism, logger, stats: true); success &= matched.WriteToFile("", stats: true);
return success; return success;
} }

View File

@@ -26,10 +26,8 @@ namespace SabreTools.Helper.Dats
/// <param name="basepath">Parent path for replacement</param> /// <param name="basepath">Parent path for replacement</param>
/// <param name="extA">List of extensions to split on (first DAT)</param> /// <param name="extA">List of extensions to split on (first DAT)</param>
/// <param name="extB">List of extensions to split on (second DAT)</param> /// <param name="extB">List of extensions to split on (second DAT)</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logger object for console and file writing</param>
/// <returns>True if split succeeded, false otherwise</returns> /// <returns>True if split succeeded, false otherwise</returns>
public bool SplitByExt(string outDir, string basepath, List<string> extA, List<string> extB, int maxDegreeOfParallelism, Logger logger) public bool SplitByExt(string outDir, string basepath, List<string> extA, List<string> extB)
{ {
// Make sure all of the extensions have a dot at the beginning // Make sure all of the extensions have a dot at the beginning
List<string> newExtA = new List<string>(); List<string> newExtA = new List<string>();
@@ -116,8 +114,8 @@ namespace SabreTools.Helper.Dats
} }
// Then write out both files // Then write out both files
bool success = datdataA.WriteToFile(outDir, maxDegreeOfParallelism, logger); bool success = datdataA.WriteToFile(outDir);
success &= datdataB.WriteToFile(outDir, maxDegreeOfParallelism, logger); success &= datdataB.WriteToFile(outDir);
return success; return success;
} }
@@ -127,16 +125,14 @@ namespace SabreTools.Helper.Dats
/// </summary> /// </summary>
/// <param name="outDir">Name of the directory to write the DATs out to</param> /// <param name="outDir">Name of the directory to write the DATs out to</param>
/// <param name="basepath">Parent path for replacement</param> /// <param name="basepath">Parent path for replacement</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logger object for console and file writing</param>
/// <returns>True if split succeeded, false otherwise</returns> /// <returns>True if split succeeded, false otherwise</returns>
public bool SplitByHash(string outDir, string basepath, int maxDegreeOfParallelism, Logger logger) public bool SplitByHash(string outDir, string basepath)
{ {
// Sanitize the basepath to be more predictable // Sanitize the basepath to be more predictable
basepath = (basepath.EndsWith(Path.DirectorySeparatorChar.ToString()) ? basepath : basepath + Path.DirectorySeparatorChar); basepath = (basepath.EndsWith(Path.DirectorySeparatorChar.ToString()) ? basepath : basepath + Path.DirectorySeparatorChar);
// Create each of the respective output DATs // Create each of the respective output DATs
logger.User("Creating and populating new DATs"); Globals.Logger.User("Creating and populating new DATs");
DatFile nodump = new DatFile DatFile nodump = new DatFile
{ {
FileName = this.FileName + " (Nodump)", FileName = this.FileName + " (Nodump)",
@@ -299,12 +295,12 @@ namespace SabreTools.Helper.Dats
} }
// Now, output all of the files to the output directory // Now, output all of the files to the output directory
logger.User("DAT information created, outputting new files"); Globals.Logger.User("DAT information created, outputting new files");
bool success = true; bool success = true;
success &= nodump.WriteToFile(outDir, maxDegreeOfParallelism, logger); success &= nodump.WriteToFile(outDir);
success &= sha1.WriteToFile(outDir, maxDegreeOfParallelism, logger); success &= sha1.WriteToFile(outDir);
success &= md5.WriteToFile(outDir, maxDegreeOfParallelism, logger); success &= md5.WriteToFile(outDir);
success &= crc.WriteToFile(outDir, maxDegreeOfParallelism, logger); success &= crc.WriteToFile(outDir);
return success; return success;
} }
@@ -316,16 +312,14 @@ namespace SabreTools.Helper.Dats
/// <param name="basepath">Parent path for replacement</param> /// <param name="basepath">Parent path for replacement</param>
/// <param name="shortname">True if short names should be used, false otherwise</param> /// <param name="shortname">True if short names should be used, false otherwise</param>
/// <param name="basedat">True if original filenames should be used as the base for output filename, false otherwise</param> /// <param name="basedat">True if original filenames should be used as the base for output filename, false otherwise</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logger object for console and file writing</param>
/// <returns>True if split succeeded, false otherwise</returns> /// <returns>True if split succeeded, false otherwise</returns>
public bool SplitByLevel(string outDir, string basepath, bool shortname, bool basedat, int maxDegreeOfParallelism, Logger logger) public bool SplitByLevel(string outDir, string basepath, bool shortname, bool basedat)
{ {
// Sanitize the basepath to be more predictable // Sanitize the basepath to be more predictable
basepath = (basepath.EndsWith(Path.DirectorySeparatorChar.ToString()) ? basepath : basepath + Path.DirectorySeparatorChar); basepath = (basepath.EndsWith(Path.DirectorySeparatorChar.ToString()) ? basepath : basepath + Path.DirectorySeparatorChar);
// First, organize by games so that we can do the right thing // First, organize by games so that we can do the right thing
BucketBy(SortedBy.Game, false /* mergeroms */, maxDegreeOfParallelism, logger, lower: false, norename: true); BucketBy(SortedBy.Game, false /* mergeroms */, lower: false, norename: true);
// Create a temporary DAT to add things to // Create a temporary DAT to add things to
DatFile tempDat = new DatFile(this) DatFile tempDat = new DatFile(this)
@@ -344,7 +338,7 @@ namespace SabreTools.Helper.Dats
if (tempDat.Name != null && tempDat.Name != Style.GetDirectoryName(key)) if (tempDat.Name != null && tempDat.Name != Style.GetDirectoryName(key))
{ {
// Process and output the DAT // Process and output the DAT
SplitByLevelHelper(tempDat, outDir, shortname, basedat, maxDegreeOfParallelism, logger); SplitByLevelHelper(tempDat, outDir, shortname, basedat);
// Reset the DAT for the next items // Reset the DAT for the next items
tempDat = new DatFile(this) tempDat = new DatFile(this)
@@ -366,7 +360,7 @@ namespace SabreTools.Helper.Dats
} }
// Then we write the last DAT out since it would be skipped otherwise // Then we write the last DAT out since it would be skipped otherwise
SplitByLevelHelper(tempDat, outDir, shortname, basedat, maxDegreeOfParallelism, logger); SplitByLevelHelper(tempDat, outDir, shortname, basedat);
return true; return true;
} }
@@ -397,9 +391,7 @@ namespace SabreTools.Helper.Dats
/// <param name="outDir">Directory to write out to</param> /// <param name="outDir">Directory to write out to</param>
/// <param name="shortname">True if short naming scheme should be used, false otherwise</param> /// <param name="shortname">True if short naming scheme should be used, false otherwise</param>
/// <param name="restore">True if original filenames should be used as the base for output filename, false otherwise</param> /// <param name="restore">True if original filenames should be used as the base for output filename, false otherwise</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param> private void SplitByLevelHelper(DatFile datFile, string outDir, bool shortname, bool restore)
/// <param name="logger">Logger object for file and console output</param>
private void SplitByLevelHelper(DatFile datFile, string outDir, bool shortname, bool restore, int maxDegreeOfParallelism, Logger logger)
{ {
// Get the name from the DAT to use separately // Get the name from the DAT to use separately
string name = datFile.Name; string name = datFile.Name;
@@ -424,7 +416,7 @@ namespace SabreTools.Helper.Dats
datFile.Type = null; datFile.Type = null;
// Write out the temporary DAT to the proper directory // Write out the temporary DAT to the proper directory
datFile.WriteToFile(path, maxDegreeOfParallelism, logger); datFile.WriteToFile(path);
} }
/// <summary> /// <summary>
@@ -432,16 +424,14 @@ namespace SabreTools.Helper.Dats
/// </summary> /// </summary>
/// <param name="outDir">Name of the directory to write the DATs out to</param> /// <param name="outDir">Name of the directory to write the DATs out to</param>
/// <param name="basepath">Parent path for replacement</param> /// <param name="basepath">Parent path for replacement</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logger object for console and file writing</param>
/// <returns>True if split succeeded, false otherwise</returns> /// <returns>True if split succeeded, false otherwise</returns>
public bool SplitByType(string outDir, string basepath, int maxDegreeOfParallelism, Logger logger) public bool SplitByType(string outDir, string basepath)
{ {
// Sanitize the basepath to be more predictable // Sanitize the basepath to be more predictable
basepath = (basepath.EndsWith(Path.DirectorySeparatorChar.ToString()) ? basepath : basepath + Path.DirectorySeparatorChar); basepath = (basepath.EndsWith(Path.DirectorySeparatorChar.ToString()) ? basepath : basepath + Path.DirectorySeparatorChar);
// Create each of the respective output DATs // Create each of the respective output DATs
logger.User("Creating and populating new DATs"); Globals.Logger.User("Creating and populating new DATs");
DatFile romdat = new DatFile DatFile romdat = new DatFile
{ {
FileName = this.FileName + " (ROM)", FileName = this.FileName + " (ROM)",
@@ -542,11 +532,11 @@ namespace SabreTools.Helper.Dats
} }
// Now, output all of the files to the output directory // Now, output all of the files to the output directory
logger.User("DAT information created, outputting new files"); Globals.Logger.User("DAT information created, outputting new files");
bool success = true; bool success = true;
success &= romdat.WriteToFile(outDir, maxDegreeOfParallelism, logger); success &= romdat.WriteToFile(outDir);
success &= diskdat.WriteToFile(outDir, maxDegreeOfParallelism, logger); success &= diskdat.WriteToFile(outDir);
success &= sampledat.WriteToFile(outDir, maxDegreeOfParallelism, logger); success &= sampledat.WriteToFile(outDir);
return success; return success;
} }

View File

@@ -102,13 +102,11 @@ namespace SabreTools.Helper.Dats
/// </summary> /// </summary>
/// <param name="outputs">Dictionary representing the outputs</param> /// <param name="outputs">Dictionary representing the outputs</param>
/// <param name="statDatFormat">Set the statistics output format to use</param> /// <param name="statDatFormat">Set the statistics output format to use</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logger object for file and console writing</param>
/// <param name="recalculate">True if numbers should be recalculated for the DAT, false otherwise (default)</param> /// <param name="recalculate">True if numbers should be recalculated for the DAT, false otherwise (default)</param>
/// <param name="game">Number of games to use, -1 means recalculate games (default)</param> /// <param name="game">Number of games to use, -1 means recalculate games (default)</param>
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise (default)</param> /// <param name="baddumpCol">True if baddumps should be included in output, false otherwise (default)</param>
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise (default)</param> /// <param name="nodumpCol">True if nodumps should be included in output, false otherwise (default)</param>
public void OutputStats(Dictionary<StatDatFormat, StreamWriter> outputs, StatDatFormat statDatFormat, int maxDegreeOfParallelism, Logger logger, public void OutputStats(Dictionary<StatDatFormat, StreamWriter> outputs, StatDatFormat statDatFormat,
bool recalculate = false, long game = -1, bool baddumpCol = false, bool nodumpCol = false) bool recalculate = false, long game = -1, bool baddumpCol = false, bool nodumpCol = false)
{ {
// If we're supposed to recalculate the statistics, do so // If we're supposed to recalculate the statistics, do so
@@ -117,7 +115,7 @@ namespace SabreTools.Helper.Dats
RecalculateStats(); RecalculateStats();
} }
BucketBy(SortedBy.Game, false /* mergeroms */, maxDegreeOfParallelism, logger, norename: true); BucketBy(SortedBy.Game, false /* mergeroms */, norename: true);
if (TotalSize < 0) if (TotalSize < 0)
{ {
TotalSize = Int64.MaxValue + TotalSize; TotalSize = Int64.MaxValue + TotalSize;
@@ -144,7 +142,7 @@ namespace SabreTools.Helper.Dats
results += " Roms with Nodump status: " + NodumpCount + "\n"; results += " Roms with Nodump status: " + NodumpCount + "\n";
} }
logger.User(results); Globals.Logger.User(results);
// Now write it out to file as well // Now write it out to file as well
string line = ""; string line = "";
@@ -263,10 +261,8 @@ namespace SabreTools.Helper.Dats
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param> /// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param> /// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
/// <param name="statDatFormat" > Set the statistics output format to use</param> /// <param name="statDatFormat" > Set the statistics output format to use</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logger object for file and console output</param>
public static void OutputStats(List<string> inputs, string reportName, string outDir, bool single, public static void OutputStats(List<string> inputs, string reportName, string outDir, bool single,
bool baddumpCol, bool nodumpCol, StatDatFormat statDatFormat, int maxDegreeOfParallelism, Logger logger) bool baddumpCol, bool nodumpCol, StatDatFormat statDatFormat)
{ {
// If there's no output format, set the default // If there's no output format, set the default
if (statDatFormat == 0x0) if (statDatFormat == 0x0)
@@ -364,7 +360,7 @@ namespace SabreTools.Helper.Dats
BaddumpCount = dirBaddump, BaddumpCount = dirBaddump,
NodumpCount = dirNodump, NodumpCount = dirNodump,
}; };
lastdirdat.OutputStats(outputs, statDatFormat, maxDegreeOfParallelism, logger, lastdirdat.OutputStats(outputs, statDatFormat,
game: dirGame, baddumpCol: baddumpCol, nodumpCol: nodumpCol); game: dirGame, baddumpCol: baddumpCol, nodumpCol: nodumpCol);
// Write the mid-footer, if any // Write the mid-footer, if any
@@ -386,17 +382,17 @@ namespace SabreTools.Helper.Dats
dirNodump = 0; dirNodump = 0;
} }
logger.Verbose("Beginning stat collection for '" + filename.Item1 + "'", false); Globals.Logger.Verbose("Beginning stat collection for '" + filename.Item1 + "'", false);
List<string> games = new List<string>(); List<string> games = new List<string>();
DatFile datdata = new DatFile(); DatFile datdata = new DatFile();
datdata.Parse(filename.Item1, 0, 0, logger); datdata.Parse(filename.Item1, 0, 0);
datdata.BucketBy(SortedBy.Game, false /* mergeroms */, maxDegreeOfParallelism, logger, norename: true); datdata.BucketBy(SortedBy.Game, false /* mergeroms */, norename: true);
// Output single DAT stats (if asked) // Output single DAT stats (if asked)
logger.User("Adding stats for file '" + filename.Item1 + "'\n", false); Globals.Logger.User("Adding stats for file '" + filename.Item1 + "'\n", false);
if (single) if (single)
{ {
datdata.OutputStats(outputs, statDatFormat, maxDegreeOfParallelism, logger, datdata.OutputStats(outputs, statDatFormat,
baddumpCol: baddumpCol, nodumpCol: nodumpCol); baddumpCol: baddumpCol, nodumpCol: nodumpCol);
} }
@@ -446,7 +442,7 @@ namespace SabreTools.Helper.Dats
BaddumpCount = dirBaddump, BaddumpCount = dirBaddump,
NodumpCount = dirNodump, NodumpCount = dirNodump,
}; };
dirdat.OutputStats(outputs, statDatFormat, maxDegreeOfParallelism, logger, dirdat.OutputStats(outputs, statDatFormat,
game: dirGame, baddumpCol: baddumpCol, nodumpCol: nodumpCol); game: dirGame, baddumpCol: baddumpCol, nodumpCol: nodumpCol);
} }
@@ -481,7 +477,7 @@ namespace SabreTools.Helper.Dats
BaddumpCount = totalBaddump, BaddumpCount = totalBaddump,
NodumpCount = totalNodump, NodumpCount = totalNodump,
}; };
totaldata.OutputStats(outputs, statDatFormat, maxDegreeOfParallelism, logger, totaldata.OutputStats(outputs, statDatFormat,
game: totalGame, baddumpCol: baddumpCol, nodumpCol: nodumpCol); game: totalGame, baddumpCol: baddumpCol, nodumpCol: nodumpCol);
// Output footer if needed // Output footer if needed
@@ -494,7 +490,7 @@ namespace SabreTools.Helper.Dats
outputs[format].Dispose(); outputs[format].Dispose();
} }
logger.User(@" Globals.Logger.User(@"
Please check the log folder if the stats scrolled offscreen", false); Please check the log folder if the stats scrolled offscreen", false);
} }

View File

@@ -29,8 +29,6 @@ namespace SabreTools.Helper.Dats
/// </summary> /// </summary>
/// <param name="datdata">All information for creating the datfile header</param> /// <param name="datdata">All information for creating the datfile header</param>
/// <param name="outDir">Set the output directory</param> /// <param name="outDir">Set the output directory</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logger object for console and/or file output</param>
/// <param name="norename">True if games should only be compared on game and file name (default), false if system and source are counted</param> /// <param name="norename">True if games should only be compared on game and file name (default), false if system and source are counted</param>
/// <param name="stats">True if DAT statistics should be output on write, false otherwise (default)</param> /// <param name="stats">True if DAT statistics should be output on write, false otherwise (default)</param>
/// <param name="ignoreblanks">True if blank roms should be skipped on output, false otherwise (default)</param> /// <param name="ignoreblanks">True if blank roms should be skipped on output, false otherwise (default)</param>
@@ -40,20 +38,20 @@ namespace SabreTools.Helper.Dats
/// The following features have been requested for file output: /// The following features have been requested for file output:
/// - Have the ability to strip special (non-ASCII) characters from rom information /// - Have the ability to strip special (non-ASCII) characters from rom information
/// </remarks> /// </remarks>
public bool WriteToFile(string outDir, int maxDegreeOfParallelism, Logger logger, public bool WriteToFile(string outDir,
bool norename = true, bool stats = false, bool ignoreblanks = false, bool overwrite = true) bool norename = true, bool stats = false, bool ignoreblanks = false, bool overwrite = true)
{ {
// If there's nothing there, abort // If there's nothing there, abort
if (Count == 0) if (Count == 0)
{ {
logger.User("There were no items to write out!"); Globals.Logger.User("There were no items to write out!");
return false; return false;
} }
// If output directory is empty, use the current folder // If output directory is empty, use the current folder
if (outDir == null || outDir.Trim() == "") if (outDir == null || outDir.Trim() == "")
{ {
logger.Verbose("No output directory defined, defaulting to curent folder"); Globals.Logger.Verbose("No output directory defined, defaulting to curent folder");
outDir = Environment.CurrentDirectory; outDir = Environment.CurrentDirectory;
} }
@@ -66,7 +64,7 @@ namespace SabreTools.Helper.Dats
// If the DAT has no output format, default to XML // If the DAT has no output format, default to XML
if (DatFormat == 0) if (DatFormat == 0)
{ {
logger.Verbose("No DAT format defined, defaulting to XML"); Globals.Logger.Verbose("No DAT format defined, defaulting to XML");
DatFormat = DatFormat.Logiqx; DatFormat = DatFormat.Logiqx;
} }
@@ -107,15 +105,15 @@ namespace SabreTools.Helper.Dats
// Output initial statistics, for kicks // Output initial statistics, for kicks
if (stats) if (stats)
{ {
OutputStats(new Dictionary<StatDatFormat, StreamWriter>(), StatDatFormat.None, maxDegreeOfParallelism, logger, OutputStats(new Dictionary<StatDatFormat, StreamWriter>(), StatDatFormat.None,
recalculate: (RomCount + DiskCount == 0), baddumpCol: true, nodumpCol: true); recalculate: (RomCount + DiskCount == 0), baddumpCol: true, nodumpCol: true);
} }
// Bucket roms by game name and optionally dedupe // Bucket roms by game name and optionally dedupe
BucketBy(SortedBy.Game, MergeRoms, maxDegreeOfParallelism, logger, norename: norename); BucketBy(SortedBy.Game, MergeRoms, norename: norename);
// Output the number of items we're going to be writing // Output the number of items we're going to be writing
logger.User("A total of " + Count + " items will be written out to file"); Globals.Logger.User("A total of " + Count + " items will be written out to file");
// Filter the DAT by 1G1R rules, if we're supposed to // Filter the DAT by 1G1R rules, if we're supposed to
// TODO: Create 1G1R logic before write // TODO: Create 1G1R logic before write
@@ -123,7 +121,7 @@ namespace SabreTools.Helper.Dats
// If we are removing hashes, do that now // If we are removing hashes, do that now
if (_stripHash != 0x0) if (_stripHash != 0x0)
{ {
StripHashesFromItems(logger); StripHashesFromItems();
} }
// Get the outfile names // Get the outfile names
@@ -139,12 +137,12 @@ namespace SabreTools.Helper.Dats
{ {
string outfile = outfiles[datFormat]; string outfile = outfiles[datFormat];
logger.User("Opening file for writing: " + outfile); Globals.Logger.User("Opening file for writing: " + outfile);
FileStream fs = File.Create(outfile); FileStream fs = File.Create(outfile);
StreamWriter sw = new StreamWriter(fs, new UTF8Encoding(true)); StreamWriter sw = new StreamWriter(fs, new UTF8Encoding(true));
// Write out the header // Write out the header
WriteHeader(sw, datFormat, logger); WriteHeader(sw, datFormat);
// Write out each of the machines and roms // Write out each of the machines and roms
int depth = 2, last = -1; int depth = 2, last = -1;
@@ -156,7 +154,7 @@ namespace SabreTools.Helper.Dats
List<DatItem> roms = this[key]; List<DatItem> roms = this[key];
// Resolve the names in the block // Resolve the names in the block
roms = DatItem.ResolveNames(roms, logger); roms = DatItem.ResolveNames(roms);
for (int index = 0; index < roms.Count; index++) for (int index = 0; index < roms.Count; index++)
{ {
@@ -165,7 +163,7 @@ namespace SabreTools.Helper.Dats
// There are apparently times when a null rom can skip by, skip them // There are apparently times when a null rom can skip by, skip them
if (rom.Name == null || rom.Machine.Name == null) if (rom.Name == null || rom.Machine.Name == null)
{ {
logger.Warning("Null rom found!"); Globals.Logger.Warning("Null rom found!");
continue; continue;
} }
@@ -174,13 +172,13 @@ namespace SabreTools.Helper.Dats
// If we have a different game and we're not at the start of the list, output the end of last item // If we have a different game and we're not at the start of the list, output the end of last item
if (lastgame != null && lastgame.ToLowerInvariant() != rom.Machine.Name.ToLowerInvariant()) if (lastgame != null && lastgame.ToLowerInvariant() != rom.Machine.Name.ToLowerInvariant())
{ {
depth = WriteEndGame(sw, datFormat, rom, splitpath, newsplit, lastgame, depth, out last, logger); depth = WriteEndGame(sw, datFormat, rom, splitpath, newsplit, lastgame, depth, out last);
} }
// If we have a new game, output the beginning of the new item // If we have a new game, output the beginning of the new item
if (lastgame == null || lastgame.ToLowerInvariant() != rom.Machine.Name.ToLowerInvariant()) if (lastgame == null || lastgame.ToLowerInvariant() != rom.Machine.Name.ToLowerInvariant())
{ {
depth = WriteStartGame(sw, datFormat, rom, newsplit, lastgame, depth, last, logger); depth = WriteStartGame(sw, datFormat, rom, newsplit, lastgame, depth, last);
} }
// If we have a "null" game (created by DATFromDir or something similar), log it to file // If we have a "null" game (created by DATFromDir or something similar), log it to file
@@ -193,7 +191,7 @@ namespace SabreTools.Helper.Dats
&& ((Rom)rom).SHA384 == "null" && ((Rom)rom).SHA384 == "null"
&& ((Rom)rom).SHA512 == "null") && ((Rom)rom).SHA512 == "null")
{ {
logger.Verbose("Empty folder found: " + rom.Machine.Name); Globals.Logger.Verbose("Empty folder found: " + rom.Machine.Name);
// If we're in a mode that doesn't allow for actual empty folders, add the blank info // If we're in a mode that doesn't allow for actual empty folders, add the blank info
if (datFormat != DatFormat.CSV if (datFormat != DatFormat.CSV
@@ -221,7 +219,7 @@ namespace SabreTools.Helper.Dats
} }
// Now, output the rom data // Now, output the rom data
WriteRomData(sw, datFormat, rom, lastgame, depth, logger, ignoreblanks); WriteRomData(sw, datFormat, rom, lastgame, depth, ignoreblanks);
// Set the new data to compare against // Set the new data to compare against
splitpath = newsplit; splitpath = newsplit;
@@ -230,16 +228,16 @@ namespace SabreTools.Helper.Dats
} }
// Write the file footer out // Write the file footer out
WriteFooter(sw, datFormat, depth, logger); WriteFooter(sw, datFormat, depth);
logger.Verbose("File written!" + Environment.NewLine); Globals.Logger.Verbose("File written!" + Environment.NewLine);
sw.Dispose(); sw.Dispose();
fs.Dispose(); fs.Dispose();
} }
} }
catch (Exception ex) catch (Exception ex)
{ {
logger.Error(ex.ToString()); Globals.Logger.Error(ex.ToString());
return false; return false;
} }
@@ -251,9 +249,8 @@ namespace SabreTools.Helper.Dats
/// </summary> /// </summary>
/// <param name="sw">StreamWriter to output to</param> /// <param name="sw">StreamWriter to output to</param>
/// <param name="datFormat">Output format to write to</param> /// <param name="datFormat">Output format to write to</param>
/// <param name="logger">Logger object for file and console output</param>
/// <returns>True if the data was written, false on error</returns> /// <returns>True if the data was written, false on error</returns>
private bool WriteHeader(StreamWriter sw, DatFormat datFormat, Logger logger) private bool WriteHeader(StreamWriter sw, DatFormat datFormat)
{ {
try try
{ {
@@ -446,7 +443,7 @@ namespace SabreTools.Helper.Dats
} }
catch (Exception ex) catch (Exception ex)
{ {
logger.Error(ex.ToString()); Globals.Logger.Error(ex.ToString());
return false; return false;
} }
@@ -463,9 +460,8 @@ namespace SabreTools.Helper.Dats
/// <param name="lastgame">The name of the last game to be output</param> /// <param name="lastgame">The name of the last game to be output</param>
/// <param name="depth">Current depth to output file at (SabreDAT only)</param> /// <param name="depth">Current depth to output file at (SabreDAT only)</param>
/// <param name="last">Last known depth to cycle back from (SabreDAT only)</param> /// <param name="last">Last known depth to cycle back from (SabreDAT only)</param>
/// <param name="logger">Logger object for file and console output</param>
/// <returns>The new depth of the tag</returns> /// <returns>The new depth of the tag</returns>
private int WriteStartGame(StreamWriter sw, DatFormat datFormat, DatItem rom, List<string> newsplit, string lastgame, int depth, int last, Logger logger) private int WriteStartGame(StreamWriter sw, DatFormat datFormat, DatItem rom, List<string> newsplit, string lastgame, int depth, int last)
{ {
try try
{ {
@@ -576,7 +572,7 @@ namespace SabreTools.Helper.Dats
} }
catch (Exception ex) catch (Exception ex)
{ {
logger.Error(ex.ToString()); Globals.Logger.Error(ex.ToString());
return depth; return depth;
} }
@@ -594,9 +590,8 @@ namespace SabreTools.Helper.Dats
/// <param name="lastgame">The name of the last game to be output</param> /// <param name="lastgame">The name of the last game to be output</param>
/// <param name="depth">Current depth to output file at (SabreDAT only)</param> /// <param name="depth">Current depth to output file at (SabreDAT only)</param>
/// <param name="last">Last known depth to cycle back from (SabreDAT only)</param> /// <param name="last">Last known depth to cycle back from (SabreDAT only)</param>
/// <param name="logger">Logger object for file and console output</param>
/// <returns>The new depth of the tag</returns> /// <returns>The new depth of the tag</returns>
private int WriteEndGame(StreamWriter sw, DatFormat datFormat, DatItem rom, List<string> splitpath, List<string> newsplit, string lastgame, int depth, out int last, Logger logger) private int WriteEndGame(StreamWriter sw, DatFormat datFormat, DatItem rom, List<string> splitpath, List<string> newsplit, string lastgame, int depth, out int last)
{ {
last = 0; last = 0;
@@ -656,7 +651,7 @@ namespace SabreTools.Helper.Dats
} }
catch (Exception ex) catch (Exception ex)
{ {
logger.Error(ex.ToString()); Globals.Logger.Error(ex.ToString());
return depth; return depth;
} }
@@ -671,10 +666,9 @@ namespace SabreTools.Helper.Dats
/// <param name="rom">RomData object to be output</param> /// <param name="rom">RomData object to be output</param>
/// <param name="lastgame">The name of the last game to be output</param> /// <param name="lastgame">The name of the last game to be output</param>
/// <param name="depth">Current depth to output file at (SabreDAT only)</param> /// <param name="depth">Current depth to output file at (SabreDAT only)</param>
/// <param name="logger">Logger object for file and console output</param>
/// <param name="ignoreblanks">True if blank roms should be skipped on output, false otherwise (default)</param> /// <param name="ignoreblanks">True if blank roms should be skipped on output, false otherwise (default)</param>
/// <returns>True if the data was written, false on error</returns> /// <returns>True if the data was written, false on error</returns>
private bool WriteRomData(StreamWriter sw, DatFormat datFormat, DatItem rom, string lastgame, int depth, Logger logger, bool ignoreblanks = false) private bool WriteRomData(StreamWriter sw, DatFormat datFormat, DatItem rom, string lastgame, int depth, bool ignoreblanks = false)
{ {
// If we are in ignore blanks mode AND we have a blank (0-size) rom, skip // If we are in ignore blanks mode AND we have a blank (0-size) rom, skip
if (ignoreblanks if (ignoreblanks
@@ -1473,7 +1467,7 @@ namespace SabreTools.Helper.Dats
} }
catch (Exception ex) catch (Exception ex)
{ {
logger.Error(ex.ToString()); Globals.Logger.Error(ex.ToString());
return false; return false;
} }
@@ -1486,9 +1480,8 @@ namespace SabreTools.Helper.Dats
/// <param name="sw">StreamWriter to output to</param> /// <param name="sw">StreamWriter to output to</param>
/// <param name="datFormat">Output format to write to</param> /// <param name="datFormat">Output format to write to</param>
/// <param name="depth">Current depth to output file at (SabreDAT only)</param> /// <param name="depth">Current depth to output file at (SabreDAT only)</param>
/// <param name="logger">Logger object for file and console output</param>
/// <returns>True if the data was written, false on error</returns> /// <returns>True if the data was written, false on error</returns>
private bool WriteFooter(StreamWriter sw, DatFormat datFormat, int depth, Logger logger) private bool WriteFooter(StreamWriter sw, DatFormat datFormat, int depth)
{ {
try try
{ {
@@ -1566,7 +1559,7 @@ namespace SabreTools.Helper.Dats
} }
catch (Exception ex) catch (Exception ex)
{ {
logger.Error(ex.ToString()); Globals.Logger.Error(ex.ToString());
return false; return false;
} }

View File

@@ -110,6 +110,7 @@
<ItemGroup> <ItemGroup>
<Compile Include="Data\Constants.cs" /> <Compile Include="Data\Constants.cs" />
<Compile Include="Data\Flags.cs" /> <Compile Include="Data\Flags.cs" />
<Compile Include="Data\Globals.cs" />
<Compile Include="Dats\Partials\DatFile.Bucketing.cs" /> <Compile Include="Dats\Partials\DatFile.Bucketing.cs" />
<Compile Include="Dats\Partials\DatFile.ConvertUpdate.cs" /> <Compile Include="Dats\Partials\DatFile.ConvertUpdate.cs" />
<Compile Include="Dats\Partials\DatFile.DFD.cs" /> <Compile Include="Dats\Partials\DatFile.DFD.cs" />

View File

@@ -72,7 +72,7 @@ namespace SabreTools.Helper.Skippers
SourceFile = Path.GetFileNameWithoutExtension(filename); SourceFile = Path.GetFileNameWithoutExtension(filename);
Logger logger = new Logger(); Logger logger = new Logger();
XmlReader xtr = FileTools.GetXmlTextReader(filename, logger); XmlReader xtr = FileTools.GetXmlTextReader(filename);
if (xtr == null) if (xtr == null)
{ {
@@ -338,16 +338,16 @@ namespace SabreTools.Helper.Skippers
/// <param name="skipperName">Name of the skipper to be used, blank to find a matching skipper</param> /// <param name="skipperName">Name of the skipper to be used, blank to find a matching skipper</param>
/// <param name="logger">Logger object for file and console output</param> /// <param name="logger">Logger object for file and console output</param>
/// <returns>The SkipperRule that matched the file</returns> /// <returns>The SkipperRule that matched the file</returns>
public static SkipperRule GetMatchingRule(string input, string skipperName, Logger logger) public static SkipperRule GetMatchingRule(string input, string skipperName)
{ {
// If the file doesn't exist, return a blank skipper rule // If the file doesn't exist, return a blank skipper rule
if (!File.Exists(input)) if (!File.Exists(input))
{ {
logger.Error("The file '" + input + "' does not exist so it cannot be tested"); Globals.Logger.Error("The file '" + input + "' does not exist so it cannot be tested");
return new SkipperRule(); return new SkipperRule();
} }
return GetMatchingRule(File.OpenRead(input), skipperName, logger); return GetMatchingRule(File.OpenRead(input), skipperName);
} }
/// <summary> /// <summary>
@@ -358,7 +358,7 @@ namespace SabreTools.Helper.Skippers
/// <param name="logger">Logger object for file and console output</param> /// <param name="logger">Logger object for file and console output</param>
/// <param name="keepOpen">True if the underlying stream should be kept open, false otherwise</param> /// <param name="keepOpen">True if the underlying stream should be kept open, false otherwise</param>
/// <returns>The SkipperRule that matched the file</returns> /// <returns>The SkipperRule that matched the file</returns>
public static SkipperRule GetMatchingRule(Stream input, string skipperName, Logger logger, bool keepOpen = false) public static SkipperRule GetMatchingRule(Stream input, string skipperName, bool keepOpen = false)
{ {
SkipperRule skipperRule = new SkipperRule(); SkipperRule skipperRule = new SkipperRule();
@@ -369,7 +369,7 @@ namespace SabreTools.Helper.Skippers
} }
// Loop through and find a Skipper that has the right name // Loop through and find a Skipper that has the right name
logger.Verbose("Beginning search for matching header skip rules"); Globals.Logger.Verbose("Beginning search for matching header skip rules");
List<Skipper> tempList = new List<Skipper>(); List<Skipper> tempList = new List<Skipper>();
tempList.AddRange(List); tempList.AddRange(List);
@@ -519,7 +519,7 @@ namespace SabreTools.Helper.Skippers
input.Dispose(); input.Dispose();
} }
logger.User(" Matching rule found!"); Globals.Logger.User(" Matching rule found!");
return rule; return rule;
} }
} }
@@ -535,7 +535,7 @@ namespace SabreTools.Helper.Skippers
// If we have a blank rule, inform the user // If we have a blank rule, inform the user
if (skipperRule.Tests == null) if (skipperRule.Tests == null)
{ {
logger.Verbose("No matching rule found!"); Globals.Logger.Verbose("No matching rule found!");
} }
return skipperRule; return skipperRule;

View File

@@ -34,16 +34,15 @@ namespace SabreTools.Helper.Skippers
/// </summary> /// </summary>
/// <param name="input">Input file name</param> /// <param name="input">Input file name</param>
/// <param name="output">Output file name</param> /// <param name="output">Output file name</param>
/// <param name="logger">Logger object for file and console output</param>
/// <returns>True if the file was transformed properly, false otherwise</returns> /// <returns>True if the file was transformed properly, false otherwise</returns>
public bool TransformFile(string input, string output, Logger logger) public bool TransformFile(string input, string output)
{ {
bool success = true; bool success = true;
// If the input file doesn't exist, fail // If the input file doesn't exist, fail
if (!File.Exists(input)) if (!File.Exists(input))
{ {
logger.Error("I'm sorry but '" + input + "' doesn't exist!"); Globals.Logger.Error("I'm sorry but '" + input + "' doesn't exist!");
return false; return false;
} }
@@ -53,8 +52,8 @@ namespace SabreTools.Helper.Skippers
Directory.CreateDirectory(Path.GetDirectoryName(output)); Directory.CreateDirectory(Path.GetDirectoryName(output));
} }
logger.User("Attempting to apply rule to '" + input + "'"); Globals.Logger.User("Attempting to apply rule to '" + input + "'");
success = TransformStream(File.Open(input, FileMode.Open, FileAccess.Read, FileShare.ReadWrite), File.OpenWrite(output), logger); success = TransformStream(File.Open(input, FileMode.Open, FileAccess.Read, FileShare.ReadWrite), File.OpenWrite(output));
// If the output file has size 0, delete it // If the output file has size 0, delete it
if (new FileInfo(output).Length == 0) if (new FileInfo(output).Length == 0)
@@ -78,11 +77,10 @@ namespace SabreTools.Helper.Skippers
/// </summary> /// </summary>
/// <param name="input">Input stream</param> /// <param name="input">Input stream</param>
/// <param name="output">Output stream</param> /// <param name="output">Output stream</param>
/// <param name="logger">Logger object for file and console output</param>
/// <param name="keepReadOpen">True if the underlying read stream should be kept open, false otherwise</param> /// <param name="keepReadOpen">True if the underlying read stream should be kept open, false otherwise</param>
/// <param name="keepWriteOpen">True if the underlying write stream should be kept open, false otherwise</param> /// <param name="keepWriteOpen">True if the underlying write stream should be kept open, false otherwise</param>
/// <returns>True if the file was transformed properly, false otherwise</returns> /// <returns>True if the file was transformed properly, false otherwise</returns>
public bool TransformStream(Stream input, Stream output, Logger logger, bool keepReadOpen = false, bool keepWriteOpen = false) public bool TransformStream(Stream input, Stream output, bool keepReadOpen = false, bool keepWriteOpen = false)
{ {
bool success = true; bool success = true;
@@ -92,7 +90,7 @@ namespace SabreTools.Helper.Skippers
|| (Operation > HeaderSkipOperation.Byteswap && (extsize % 4) != 0) || (Operation > HeaderSkipOperation.Byteswap && (extsize % 4) != 0)
|| (Operation > HeaderSkipOperation.Bitswap && (StartOffset == null || StartOffset % 2 == 0))) || (Operation > HeaderSkipOperation.Bitswap && (StartOffset == null || StartOffset % 2 == 0)))
{ {
logger.Error("The stream did not have the correct size to be transformed!"); Globals.Logger.Error("The stream did not have the correct size to be transformed!");
return false; return false;
} }
@@ -101,7 +99,7 @@ namespace SabreTools.Helper.Skippers
BinaryReader br = null; BinaryReader br = null;
try try
{ {
logger.User("Applying found rule to input stream"); Globals.Logger.User("Applying found rule to input stream");
bw = new BinaryWriter(output); bw = new BinaryWriter(output);
br = new BinaryReader(input); br = new BinaryReader(input);
@@ -189,7 +187,7 @@ namespace SabreTools.Helper.Skippers
} }
catch (Exception ex) catch (Exception ex)
{ {
logger.Error(ex.ToString()); Globals.Logger.Error(ex.ToString());
return false; return false;
} }
finally finally

View File

@@ -50,14 +50,13 @@ namespace SabreTools.Helper.Tools
/// <param name="input">Name of the file to be extracted</param> /// <param name="input">Name of the file to be extracted</param>
/// <param name="outDir">Output directory for archive extraction</param> /// <param name="outDir">Output directory for archive extraction</param>
/// <param name="archiveScanLevel">ArchiveScanLevel representing the archive handling levels</param> /// <param name="archiveScanLevel">ArchiveScanLevel representing the archive handling levels</param>
/// <param name="logger">Logger object for file and console output</param>
/// <returns>True if the extraction was a success, false otherwise</returns> /// <returns>True if the extraction was a success, false otherwise</returns>
public static bool ExtractArchive(string input, string outDir, ArchiveScanLevel archiveScanLevel, Logger logger) public static bool ExtractArchive(string input, string outDir, ArchiveScanLevel archiveScanLevel)
{ {
bool encounteredErrors = true; bool encounteredErrors = true;
// First get the archive type // First get the archive type
ArchiveType? at = GetCurrentArchiveType(input, logger); ArchiveType? at = GetCurrentArchiveType(input);
// If we got back null, then it's not an archive, so we we return // If we got back null, then it's not an archive, so we we return
if (at == null) if (at == null)
@@ -70,7 +69,7 @@ namespace SabreTools.Helper.Tools
// 7-zip // 7-zip
if (at == ArchiveType.SevenZip && (archiveScanLevel & ArchiveScanLevel.SevenZipInternal) != 0) if (at == ArchiveType.SevenZip && (archiveScanLevel & ArchiveScanLevel.SevenZipInternal) != 0)
{ {
logger.Verbose("Found archive of type: " + at); Globals.Logger.Verbose("Found archive of type: " + at);
// Create the temp directory // Create the temp directory
Directory.CreateDirectory(outDir); Directory.CreateDirectory(outDir);
@@ -88,7 +87,7 @@ namespace SabreTools.Helper.Tools
// GZip // GZip
else if (at == ArchiveType.GZip && (archiveScanLevel & ArchiveScanLevel.GZipInternal) != 0) else if (at == ArchiveType.GZip && (archiveScanLevel & ArchiveScanLevel.GZipInternal) != 0)
{ {
logger.Verbose("Found archive of type: " + at); Globals.Logger.Verbose("Found archive of type: " + at);
// Create the temp directory // Create the temp directory
Directory.CreateDirectory(outDir); Directory.CreateDirectory(outDir);
@@ -108,7 +107,7 @@ namespace SabreTools.Helper.Tools
// RAR // RAR
else if (at == ArchiveType.Rar && (archiveScanLevel & ArchiveScanLevel.RarInternal) != 0) else if (at == ArchiveType.Rar && (archiveScanLevel & ArchiveScanLevel.RarInternal) != 0)
{ {
logger.Verbose("Found archive of type: " + at); Globals.Logger.Verbose("Found archive of type: " + at);
// Create the temp directory // Create the temp directory
Directory.CreateDirectory(outDir); Directory.CreateDirectory(outDir);
@@ -126,7 +125,7 @@ namespace SabreTools.Helper.Tools
// TAR // TAR
else if (at == ArchiveType.Tar && (archiveScanLevel & ArchiveScanLevel.TarInternal) != 0) else if (at == ArchiveType.Tar && (archiveScanLevel & ArchiveScanLevel.TarInternal) != 0)
{ {
logger.Verbose("Found archive of type: " + at); Globals.Logger.Verbose("Found archive of type: " + at);
// Create the temp directory // Create the temp directory
Directory.CreateDirectory(outDir); Directory.CreateDirectory(outDir);
@@ -144,7 +143,7 @@ namespace SabreTools.Helper.Tools
// Zip // Zip
else if (at == ArchiveType.Zip && (archiveScanLevel & ArchiveScanLevel.ZipInternal) != 0) else if (at == ArchiveType.Zip && (archiveScanLevel & ArchiveScanLevel.ZipInternal) != 0)
{ {
logger.Verbose("Found archive of type: " + at); Globals.Logger.Verbose("Found archive of type: " + at);
// Create the temp directory // Create the temp directory
Directory.CreateDirectory(outDir); Directory.CreateDirectory(outDir);
@@ -216,9 +215,8 @@ namespace SabreTools.Helper.Tools
/// <param name="input">Name of the archive to be extracted</param> /// <param name="input">Name of the archive to be extracted</param>
/// <param name="entryName">Name of the entry to be extracted</param> /// <param name="entryName">Name of the entry to be extracted</param>
/// <param name="tempDir">Temporary directory for archive extraction</param> /// <param name="tempDir">Temporary directory for archive extraction</param>
/// <param name="logger">Logger object for file and console output</param>
/// <returns>Name of the extracted file, null on error</returns> /// <returns>Name of the extracted file, null on error</returns>
public static string ExtractItem(string input, string entryName, string tempDir, Logger logger) public static string ExtractItem(string input, string entryName, string tempDir)
{ {
string realEntry = ""; string realEntry = "";
@@ -226,7 +224,7 @@ namespace SabreTools.Helper.Tools
realEntry = ""; realEntry = "";
// First get the archive type // First get the archive type
ArchiveType? at = GetCurrentArchiveType(input, logger); ArchiveType? at = GetCurrentArchiveType(input);
// If we got back null, then it's not an archive, so we we return // If we got back null, then it's not an archive, so we we return
if (at == null) if (at == null)
@@ -242,7 +240,7 @@ namespace SabreTools.Helper.Tools
SevenZipArchive sza = SevenZipArchive.Open(input, new ReaderOptions { LeaveStreamOpen = false, }); SevenZipArchive sza = SevenZipArchive.Open(input, new ReaderOptions { LeaveStreamOpen = false, });
foreach (SevenZipArchiveEntry entry in sza.Entries) foreach (SevenZipArchiveEntry entry in sza.Entries)
{ {
logger.Verbose("Current entry name: '" + entry.Key + "'"); Globals.Logger.Verbose("Current entry name: '" + entry.Key + "'");
if (entry != null && !entry.IsDirectory && entry.Key.Contains(entryName)) if (entry != null && !entry.IsDirectory && entry.Key.Contains(entryName))
{ {
realEntry = entry.Key; realEntry = entry.Key;
@@ -294,7 +292,7 @@ namespace SabreTools.Helper.Tools
RarArchive ra = RarArchive.Open(input, new ReaderOptions { LeaveStreamOpen = false, }); RarArchive ra = RarArchive.Open(input, new ReaderOptions { LeaveStreamOpen = false, });
foreach (RarArchiveEntry entry in ra.Entries) foreach (RarArchiveEntry entry in ra.Entries)
{ {
logger.Verbose("Current entry name: '" + entry.Key + "'"); Globals.Logger.Verbose("Current entry name: '" + entry.Key + "'");
if (entry != null && !entry.IsDirectory && entry.Key.Contains(entryName)) if (entry != null && !entry.IsDirectory && entry.Key.Contains(entryName))
{ {
realEntry = entry.Key; realEntry = entry.Key;
@@ -318,7 +316,7 @@ namespace SabreTools.Helper.Tools
TarArchive ta = TarArchive.Open(input, new ReaderOptions { LeaveStreamOpen = false, }); TarArchive ta = TarArchive.Open(input, new ReaderOptions { LeaveStreamOpen = false, });
foreach (TarArchiveEntry entry in ta.Entries) foreach (TarArchiveEntry entry in ta.Entries)
{ {
logger.Verbose("Current entry name: '" + entry.Key + "'"); Globals.Logger.Verbose("Current entry name: '" + entry.Key + "'");
if (entry != null && !entry.IsDirectory && entry.Key.Contains(entryName)) if (entry != null && !entry.IsDirectory && entry.Key.Contains(entryName))
{ {
realEntry = entry.Key; realEntry = entry.Key;
@@ -348,7 +346,7 @@ namespace SabreTools.Helper.Tools
for (int i = 0; i < zf.EntriesCount && zr == ZipReturn.ZipGood; i++) for (int i = 0; i < zf.EntriesCount && zr == ZipReturn.ZipGood; i++)
{ {
logger.Verbose("Current entry name: '" + zf.Entries[i].FileName + "'"); Globals.Logger.Verbose("Current entry name: '" + zf.Entries[i].FileName + "'");
if (zf.Entries[i].FileName.Contains(entryName)) if (zf.Entries[i].FileName.Contains(entryName))
{ {
realEntry = zf.Entries[i].FileName; realEntry = zf.Entries[i].FileName;
@@ -382,7 +380,7 @@ namespace SabreTools.Helper.Tools
} }
catch (Exception ex) catch (Exception ex)
{ {
logger.Error(ex.ToString()); Globals.Logger.Error(ex.ToString());
realEntry = ""; realEntry = "";
} }
@@ -397,9 +395,8 @@ namespace SabreTools.Helper.Tools
/// Generate a list of RomData objects from the header values in an archive /// Generate a list of RomData objects from the header values in an archive
/// </summary> /// </summary>
/// <param name="input">Input file to get data from</param> /// <param name="input">Input file to get data from</param>
/// <param name="logger">Logger object for file and console output</param>
/// <returns>List of RomData objects representing the found data</returns> /// <returns>List of RomData objects representing the found data</returns>
public static List<Rom> GetArchiveFileInfo(string input, Logger logger) public static List<Rom> GetArchiveFileInfo(string input)
{ {
List<Rom> roms = new List<Rom>(); List<Rom> roms = new List<Rom>();
string gamename = Path.GetFileNameWithoutExtension(input); string gamename = Path.GetFileNameWithoutExtension(input);
@@ -411,7 +408,7 @@ namespace SabreTools.Helper.Tools
} }
// Next, get the archive type // Next, get the archive type
ArchiveType? at = GetCurrentArchiveType(input, logger); ArchiveType? at = GetCurrentArchiveType(input);
// If we got back null, then it's not an archive, so we we return // If we got back null, then it's not an archive, so we we return
if (at == null) if (at == null)
@@ -422,7 +419,7 @@ namespace SabreTools.Helper.Tools
// If we got back GZip, try to get TGZ info first // If we got back GZip, try to get TGZ info first
else if (at == ArchiveType.GZip) else if (at == ArchiveType.GZip)
{ {
Rom possibleTgz = GetTorrentGZFileInfo(input, logger); Rom possibleTgz = GetTorrentGZFileInfo(input);
// If it was, then add it to the outputs and continue // If it was, then add it to the outputs and continue
if (possibleTgz != null && possibleTgz.Name != null) if (possibleTgz != null && possibleTgz.Name != null)
@@ -435,7 +432,7 @@ namespace SabreTools.Helper.Tools
IReader reader = null; IReader reader = null;
try try
{ {
logger.Verbose("Found archive of type: " + at); Globals.Logger.Verbose("Found archive of type: " + at);
long size = 0; long size = 0;
string crc = ""; string crc = "";
@@ -447,7 +444,7 @@ namespace SabreTools.Helper.Tools
{ {
if (entry != null && !entry.IsDirectory) if (entry != null && !entry.IsDirectory)
{ {
logger.Verbose("Entry found: '" + entry.Key + "': " Globals.Logger.Verbose("Entry found: '" + entry.Key + "': "
+ (size == 0 ? entry.Size : size) + ", " + (size == 0 ? entry.Size : size) + ", "
+ (crc == "" ? entry.Crc.ToString("X").ToLowerInvariant() : crc)); + (crc == "" ? entry.Crc.ToString("X").ToLowerInvariant() : crc));
@@ -483,7 +480,7 @@ namespace SabreTools.Helper.Tools
{ {
if (entry != null && !entry.IsDirectory) if (entry != null && !entry.IsDirectory)
{ {
logger.Verbose("Entry found: '" + entry.Key + "': " Globals.Logger.Verbose("Entry found: '" + entry.Key + "': "
+ (size == 0 ? entry.Size : size) + ", " + (size == 0 ? entry.Size : size) + ", "
+ (crc == "" ? entry.Crc.ToString("X").ToLowerInvariant() : crc)); + (crc == "" ? entry.Crc.ToString("X").ToLowerInvariant() : crc));
@@ -509,7 +506,7 @@ namespace SabreTools.Helper.Tools
{ {
if (entry != null && !entry.IsDirectory) if (entry != null && !entry.IsDirectory)
{ {
logger.Verbose("Entry found: '" + entry.Key + "': " Globals.Logger.Verbose("Entry found: '" + entry.Key + "': "
+ (size == 0 ? entry.Size : size) + ", " + (size == 0 ? entry.Size : size) + ", "
+ (crc == "" ? entry.Crc.ToString("X").ToLowerInvariant() : crc)); + (crc == "" ? entry.Crc.ToString("X").ToLowerInvariant() : crc));
@@ -543,7 +540,7 @@ namespace SabreTools.Helper.Tools
long newsize = (size == 0 ? (long)zf.Entries[i].UncompressedSize : size); long newsize = (size == 0 ? (long)zf.Entries[i].UncompressedSize : size);
string newcrc = BitConverter.ToString(zf.Entries[i].CRC.Reverse().ToArray(), 0, zf.Entries[i].CRC.Length).Replace("-", string.Empty).ToLowerInvariant(); string newcrc = BitConverter.ToString(zf.Entries[i].CRC.Reverse().ToArray(), 0, zf.Entries[i].CRC.Length).Replace("-", string.Empty).ToLowerInvariant();
logger.Verbose("Entry found: '" + newname + "': " + newsize + ", " + newcrc); Globals.Logger.Verbose("Entry found: '" + newname + "': " + newsize + ", " + newcrc);
roms.Add(new Rom roms.Add(new Rom
{ {
@@ -563,7 +560,7 @@ namespace SabreTools.Helper.Tools
} }
catch (Exception ex) catch (Exception ex)
{ {
logger.Error(ex.ToString()); Globals.Logger.Error(ex.ToString());
} }
finally finally
{ {
@@ -577,9 +574,8 @@ namespace SabreTools.Helper.Tools
/// Retrieve file information for a single torrent GZ file /// Retrieve file information for a single torrent GZ file
/// </summary> /// </summary>
/// <param name="input">Filename to get information from</param> /// <param name="input">Filename to get information from</param>
/// <param name="logger">Logger object for file and console output</param>
/// <returns>Populated RomData object if success, empty one on error</returns> /// <returns>Populated RomData object if success, empty one on error</returns>
public static Rom GetTorrentGZFileInfo(string input, Logger logger) public static Rom GetTorrentGZFileInfo(string input)
{ {
// Check for the file existing first // Check for the file existing first
if (!File.Exists(input)) if (!File.Exists(input))
@@ -593,21 +589,21 @@ namespace SabreTools.Helper.Tools
// If we have the romba depot files, just skip them gracefully // If we have the romba depot files, just skip them gracefully
if (datum == ".romba_size" || datum == ".romba_size.backup") if (datum == ".romba_size" || datum == ".romba_size.backup")
{ {
logger.Verbose("Romba depot file found, skipping: " + input); Globals.Logger.Verbose("Romba depot file found, skipping: " + input);
return null; return null;
} }
// Check if the name is the right length // Check if the name is the right length
if (!Regex.IsMatch(datum, @"^[0-9a-f]{" + Constants.SHA1Length + @"}\.gz")) // TODO: When updating to SHA-256, this needs to update to Constants.SHA256Length if (!Regex.IsMatch(datum, @"^[0-9a-f]{" + Constants.SHA1Length + @"}\.gz")) // TODO: When updating to SHA-256, this needs to update to Constants.SHA256Length
{ {
logger.Warning("Non SHA-1 filename found, skipping: '" + Path.GetFullPath(input) + "'"); Globals.Logger.Warning("Non SHA-1 filename found, skipping: '" + Path.GetFullPath(input) + "'");
return null; return null;
} }
// Check if the file is at least the minimum length // Check if the file is at least the minimum length
if (filesize < 40 /* bytes */) if (filesize < 40 /* bytes */)
{ {
logger.Warning("Possibly corrupt file '" + Path.GetFullPath(input) + "' with size " + Style.GetBytesReadable(filesize)); Globals.Logger.Warning("Possibly corrupt file '" + Path.GetFullPath(input) + "' with size " + Style.GetBytesReadable(filesize));
return null; return null;
} }
@@ -666,9 +662,8 @@ namespace SabreTools.Helper.Tools
/// Returns the archive type of an input file /// Returns the archive type of an input file
/// </summary> /// </summary>
/// <param name="input">Input file to check</param> /// <param name="input">Input file to check</param>
/// <param name="logger">Logger object for file and console output</param>
/// <returns>ArchiveType of inputted file (null on error)</returns> /// <returns>ArchiveType of inputted file (null on error)</returns>
public static ArchiveType? GetCurrentArchiveType(string input, Logger logger) public static ArchiveType? GetCurrentArchiveType(string input)
{ {
ArchiveType? outtype = null; ArchiveType? outtype = null;
@@ -743,16 +738,15 @@ namespace SabreTools.Helper.Tools
/// </summary> /// </summary>
/// <param name="input">Name of the input file to check</param> /// <param name="input">Name of the input file to check</param>
/// <param name="archiveScanLevel">ArchiveScanLevel representing the archive handling levels</param> /// <param name="archiveScanLevel">ArchiveScanLevel representing the archive handling levels</param>
/// <param name="logger">Logger object for file and console output</param>
/// <param name="shouldExternalProcess">Output parameter determining if file should be processed externally</param> /// <param name="shouldExternalProcess">Output parameter determining if file should be processed externally</param>
/// <param name="shouldInternalProcess">Output parameter determining if file should be processed internally</param> /// <param name="shouldInternalProcess">Output parameter determining if file should be processed internally</param>
public static void GetInternalExternalProcess(string input, ArchiveScanLevel archiveScanLevel, public static void GetInternalExternalProcess(string input, ArchiveScanLevel archiveScanLevel,
Logger logger, out bool shouldExternalProcess, out bool shouldInternalProcess) out bool shouldExternalProcess, out bool shouldInternalProcess)
{ {
shouldExternalProcess = true; shouldExternalProcess = true;
shouldInternalProcess = true; shouldInternalProcess = true;
ArchiveType? archiveType = GetCurrentArchiveType(input, logger); ArchiveType? archiveType = GetCurrentArchiveType(input);
switch (archiveType) switch (archiveType)
{ {
case null: case null:
@@ -857,8 +851,7 @@ namespace SabreTools.Helper.Tools
/// (INCOMPLETE) Retrieve file information for a RAR file /// (INCOMPLETE) Retrieve file information for a RAR file
/// </summary> /// </summary>
/// <param name="input">Filename to get information from</param> /// <param name="input">Filename to get information from</param>
/// <param name="logger">Logger object for file and console output</param> public static void GetRarFileInfo(string input)
public static void GetRarFileInfo(string input, Logger logger)
{ {
if (!File.Exists(input)) if (!File.Exists(input))
{ {
@@ -1074,9 +1067,8 @@ namespace SabreTools.Helper.Tools
/// (INCOMPLETE) Get the T7Z status of the file /// (INCOMPLETE) Get the T7Z status of the file
/// </summary> /// </summary>
/// <param name="filename">Name of the file to check</param> /// <param name="filename">Name of the file to check</param>
/// <param name="logger">Logger object for file and console output</param>
/// <returns>0 if the file isn't 7z, 1 if the file is t7z, 2 if the file is 7z</returns> /// <returns>0 if the file isn't 7z, 1 if the file is t7z, 2 if the file is 7z</returns>
public static int IsT7z(string filename, Logger logger) public static int IsT7z(string filename)
{ {
int ist7z = 0; int ist7z = 0;
@@ -1106,7 +1098,7 @@ namespace SabreTools.Helper.Tools
} }
catch catch
{ {
logger.Warning("File '" + filename + "' could not be opened"); Globals.Logger.Warning("File '" + filename + "' could not be opened");
ist7z = 0; ist7z = 0;
} }
} }
@@ -1124,16 +1116,15 @@ namespace SabreTools.Helper.Tools
/// <param name="inputFile">Input filename to be moved</param> /// <param name="inputFile">Input filename to be moved</param>
/// <param name="outDir">Output directory to build to</param> /// <param name="outDir">Output directory to build to</param>
/// <param name="rom">RomData representing the new information</param> /// <param name="rom">RomData representing the new information</param>
/// <param name="logger">Logger object for file and console output</param>
/// <param name="date">True if the date from the DAT should be used if available, false otherwise (default)</param> /// <param name="date">True if the date from the DAT should be used if available, false otherwise (default)</param>
/// <returns>True if the archive was written properly, false otherwise</returns> /// <returns>True if the archive was written properly, false otherwise</returns>
public static bool WriteTAR(string inputFile, string outDir, Rom rom, Logger logger, bool date = false) public static bool WriteTAR(string inputFile, string outDir, Rom rom, bool date = false)
{ {
// Wrap the individual inputs into lists // Wrap the individual inputs into lists
List<string> inputFiles = new List<string>() { inputFile }; List<string> inputFiles = new List<string>() { inputFile };
List<Rom> roms = new List<Rom>() { rom }; List<Rom> roms = new List<Rom>() { rom };
return WriteTAR(inputFiles, outDir, roms, logger, date: date); return WriteTAR(inputFiles, outDir, roms, date: date);
} }
/// <summary> /// <summary>
@@ -1142,10 +1133,9 @@ namespace SabreTools.Helper.Tools
/// <param name="inputFile">Input filenames to be moved</param> /// <param name="inputFile">Input filenames to be moved</param>
/// <param name="outDir">Output directory to build to</param> /// <param name="outDir">Output directory to build to</param>
/// <param name="rom">List of Rom representing the new information</param> /// <param name="rom">List of Rom representing the new information</param>
/// <param name="logger">Logger object for file and console output</param>
/// <param name="date">True if the date from the DAT should be used if available, false otherwise (default)</param> /// <param name="date">True if the date from the DAT should be used if available, false otherwise (default)</param>
/// <returns>True if the archive was written properly, false otherwise</returns> /// <returns>True if the archive was written properly, false otherwise</returns>
public static bool WriteTAR(List<string> inputFiles, string outDir, List<Rom> roms, Logger logger, bool date = false) public static bool WriteTAR(List<string> inputFiles, string outDir, List<Rom> roms, bool date = false)
{ {
bool success = false; bool success = false;
string tempFile = Path.Combine(Path.GetTempPath(), "tmp" + Guid.NewGuid().ToString()); string tempFile = Path.Combine(Path.GetTempPath(), "tmp" + Guid.NewGuid().ToString());
@@ -1308,16 +1298,15 @@ namespace SabreTools.Helper.Tools
/// <param name="inputFile">Input filename to be moved</param> /// <param name="inputFile">Input filename to be moved</param>
/// <param name="outDir">Output directory to build to</param> /// <param name="outDir">Output directory to build to</param>
/// <param name="rom">RomData representing the new information</param> /// <param name="rom">RomData representing the new information</param>
/// <param name="logger">Logger object for file and console output</param>
/// <param name="date">True if the date from the DAT should be used if available, false otherwise (default)</param> /// <param name="date">True if the date from the DAT should be used if available, false otherwise (default)</param>
/// <returns>True if the archive was written properly, false otherwise</returns> /// <returns>True if the archive was written properly, false otherwise</returns>
public static bool WriteTorrent7Zip(string inputFile, string outDir, Rom rom, Logger logger, bool date = false) public static bool WriteTorrent7Zip(string inputFile, string outDir, Rom rom, bool date = false)
{ {
// Wrap the individual inputs into lists // Wrap the individual inputs into lists
List<string> inputFiles = new List<string>() { inputFile }; List<string> inputFiles = new List<string>() { inputFile };
List<Rom> roms = new List<Rom>() { rom }; List<Rom> roms = new List<Rom>() { rom };
return WriteTorrent7Zip(inputFiles, outDir, roms, logger, date: date); return WriteTorrent7Zip(inputFiles, outDir, roms, date: date);
} }
/// <summary> /// <summary>
@@ -1326,10 +1315,9 @@ namespace SabreTools.Helper.Tools
/// <param name="inputFile">Input filenames to be moved</param> /// <param name="inputFile">Input filenames to be moved</param>
/// <param name="outDir">Output directory to build to</param> /// <param name="outDir">Output directory to build to</param>
/// <param name="rom">List of Rom representing the new information</param> /// <param name="rom">List of Rom representing the new information</param>
/// <param name="logger">Logger object for file and console output</param>
/// <param name="date">True if the date from the DAT should be used if available, false otherwise (default)</param> /// <param name="date">True if the date from the DAT should be used if available, false otherwise (default)</param>
/// <returns>True if the archive was written properly, false otherwise</returns> /// <returns>True if the archive was written properly, false otherwise</returns>
public static bool WriteTorrent7Zip(List<string> inputFiles, string outDir, List<Rom> roms, Logger logger, bool date = false) public static bool WriteTorrent7Zip(List<string> inputFiles, string outDir, List<Rom> roms, bool date = false)
{ {
bool success = false; bool success = false;
string tempFile = Path.Combine(outDir, "tmp" + Guid.NewGuid().ToString()); string tempFile = Path.Combine(outDir, "tmp" + Guid.NewGuid().ToString());
@@ -1541,15 +1529,14 @@ namespace SabreTools.Helper.Tools
/// <param name="input">File to write from</param> /// <param name="input">File to write from</param>
/// <param name="outDir">Directory to write archive to</param> /// <param name="outDir">Directory to write archive to</param>
/// <param name="romba">True if files should be output in Romba depot folders, false otherwise</param> /// <param name="romba">True if files should be output in Romba depot folders, false otherwise</param>
/// <param name="logger">Logger object for file and console output</param>
/// <returns>True if the write was a success, false otherwise</returns> /// <returns>True if the write was a success, false otherwise</returns>
/// <remarks>This works for now, but it can be sped up by using Ionic.Zip or another zlib wrapper that allows for header values built-in. See edc's code.</remarks> /// <remarks>This works for now, but it can be sped up by using Ionic.Zip or another zlib wrapper that allows for header values built-in. See edc's code.</remarks>
public static bool WriteTorrentGZ(string input, string outDir, bool romba, Logger logger) public static bool WriteTorrentGZ(string input, string outDir, bool romba)
{ {
// Check that the input file exists // Check that the input file exists
if (!File.Exists(input)) if (!File.Exists(input))
{ {
logger.Warning("File " + input + " does not exist!"); Globals.Logger.Warning("File " + input + " does not exist!");
return false; return false;
} }
input = Path.GetFullPath(input); input = Path.GetFullPath(input);
@@ -1562,7 +1549,7 @@ namespace SabreTools.Helper.Tools
outDir = Path.GetFullPath(outDir); outDir = Path.GetFullPath(outDir);
// Now get the Rom info for the file so we have hashes and size // Now get the Rom info for the file so we have hashes and size
Rom rom = FileTools.GetFileInfo(input, logger); Rom rom = FileTools.GetFileInfo(input);
// Get the output file name // Get the output file name
string outfile = null; string outfile = null;
@@ -1634,16 +1621,15 @@ namespace SabreTools.Helper.Tools
/// <param name="inputFile">Input filename to be moved</param> /// <param name="inputFile">Input filename to be moved</param>
/// <param name="outDir">Output directory to build to</param> /// <param name="outDir">Output directory to build to</param>
/// <param name="rom">RomData representing the new information</param> /// <param name="rom">RomData representing the new information</param>
/// <param name="logger">Logger object for file and console output</param>
/// <param name="date">True if the date from the DAT should be used if available, false otherwise (default)</param> /// <param name="date">True if the date from the DAT should be used if available, false otherwise (default)</param>
/// <returns>True if the archive was written properly, false otherwise</returns> /// <returns>True if the archive was written properly, false otherwise</returns>
public static bool WriteTorrentLRZ(string inputFile, string outDir, Rom rom, Logger logger, bool date = false) public static bool WriteTorrentLRZ(string inputFile, string outDir, Rom rom, bool date = false)
{ {
// Wrap the individual inputs into lists // Wrap the individual inputs into lists
List<string> inputFiles = new List<string>() { inputFile }; List<string> inputFiles = new List<string>() { inputFile };
List<Rom> roms = new List<Rom>() { rom }; List<Rom> roms = new List<Rom>() { rom };
return WriteTorrentLRZ(inputFiles, outDir, roms, logger, date: date); return WriteTorrentLRZ(inputFiles, outDir, roms, date: date);
} }
/// <summary> /// <summary>
@@ -1652,10 +1638,9 @@ namespace SabreTools.Helper.Tools
/// <param name="inputFile">Input filenames to be moved</param> /// <param name="inputFile">Input filenames to be moved</param>
/// <param name="outDir">Output directory to build to</param> /// <param name="outDir">Output directory to build to</param>
/// <param name="rom">List of Rom representing the new information</param> /// <param name="rom">List of Rom representing the new information</param>
/// <param name="logger">Logger object for file and console output</param>
/// <param name="date">True if the date from the DAT should be used if available, false otherwise (default)</param> /// <param name="date">True if the date from the DAT should be used if available, false otherwise (default)</param>
/// <returns>True if the archive was written properly, false otherwise</returns> /// <returns>True if the archive was written properly, false otherwise</returns>
public static bool WriteTorrentLRZ(List<string> inputFiles, string outDir, List<Rom> roms, Logger logger, bool date = false) public static bool WriteTorrentLRZ(List<string> inputFiles, string outDir, List<Rom> roms, bool date = false)
{ {
return false; return false;
} }
@@ -1666,16 +1651,15 @@ namespace SabreTools.Helper.Tools
/// <param name="inputFile">Input filename to be moved</param> /// <param name="inputFile">Input filename to be moved</param>
/// <param name="outDir">Output directory to build to</param> /// <param name="outDir">Output directory to build to</param>
/// <param name="rom">RomData representing the new information</param> /// <param name="rom">RomData representing the new information</param>
/// <param name="logger">Logger object for file and console output</param>
/// <param name="date">True if the date from the DAT should be used if available, false otherwise (default)</param> /// <param name="date">True if the date from the DAT should be used if available, false otherwise (default)</param>
/// <returns>True if the archive was written properly, false otherwise</returns> /// <returns>True if the archive was written properly, false otherwise</returns>
public static bool WriteTorrentRAR(string inputFile, string outDir, Rom rom, Logger logger, bool date = false) public static bool WriteTorrentRAR(string inputFile, string outDir, Rom rom, bool date = false)
{ {
// Wrap the individual inputs into lists // Wrap the individual inputs into lists
List<string> inputFiles = new List<string>() { inputFile }; List<string> inputFiles = new List<string>() { inputFile };
List<Rom> roms = new List<Rom>() { rom }; List<Rom> roms = new List<Rom>() { rom };
return WriteTorrentRAR(inputFiles, outDir, roms, logger, date: date); return WriteTorrentRAR(inputFiles, outDir, roms, date: date);
} }
/// <summary> /// <summary>
@@ -1684,10 +1668,9 @@ namespace SabreTools.Helper.Tools
/// <param name="inputFile">Input filenames to be moved</param> /// <param name="inputFile">Input filenames to be moved</param>
/// <param name="outDir">Output directory to build to</param> /// <param name="outDir">Output directory to build to</param>
/// <param name="rom">List of Rom representing the new information</param> /// <param name="rom">List of Rom representing the new information</param>
/// <param name="logger">Logger object for file and console output</param>
/// <param name="date">True if the date from the DAT should be used if available, false otherwise (default)</param> /// <param name="date">True if the date from the DAT should be used if available, false otherwise (default)</param>
/// <returns>True if the archive was written properly, false otherwise</returns> /// <returns>True if the archive was written properly, false otherwise</returns>
public static bool WriteTorrentRAR(List<string> inputFiles, string outDir, List<Rom> roms, Logger logger, bool date = false) public static bool WriteTorrentRAR(List<string> inputFiles, string outDir, List<Rom> roms, bool date = false)
{ {
return false; return false;
} }
@@ -1698,16 +1681,15 @@ namespace SabreTools.Helper.Tools
/// <param name="inputFile">Input filename to be moved</param> /// <param name="inputFile">Input filename to be moved</param>
/// <param name="outDir">Output directory to build to</param> /// <param name="outDir">Output directory to build to</param>
/// <param name="rom">RomData representing the new information</param> /// <param name="rom">RomData representing the new information</param>
/// <param name="logger">Logger object for file and console output</param>
/// <param name="date">True if the date from the DAT should be used if available, false otherwise (default)</param> /// <param name="date">True if the date from the DAT should be used if available, false otherwise (default)</param>
/// <returns>True if the archive was written properly, false otherwise</returns> /// <returns>True if the archive was written properly, false otherwise</returns>
public static bool WriteTorrentXZ(string inputFile, string outDir, Rom rom, Logger logger, bool date = false) public static bool WriteTorrentXZ(string inputFile, string outDir, Rom rom, bool date = false)
{ {
// Wrap the individual inputs into lists // Wrap the individual inputs into lists
List<string> inputFiles = new List<string>() { inputFile }; List<string> inputFiles = new List<string>() { inputFile };
List<Rom> roms = new List<Rom>() { rom }; List<Rom> roms = new List<Rom>() { rom };
return WriteTorrentXZ(inputFiles, outDir, roms, logger, date: date); return WriteTorrentXZ(inputFiles, outDir, roms, date: date);
} }
/// <summary> /// <summary>
@@ -1716,10 +1698,9 @@ namespace SabreTools.Helper.Tools
/// <param name="inputFile">Input filenames to be moved</param> /// <param name="inputFile">Input filenames to be moved</param>
/// <param name="outDir">Output directory to build to</param> /// <param name="outDir">Output directory to build to</param>
/// <param name="rom">List of Rom representing the new information</param> /// <param name="rom">List of Rom representing the new information</param>
/// <param name="logger">Logger object for file and console output</param>
/// <param name="date">True if the date from the DAT should be used if available, false otherwise (default)</param> /// <param name="date">True if the date from the DAT should be used if available, false otherwise (default)</param>
/// <returns>True if the archive was written properly, false otherwise</returns> /// <returns>True if the archive was written properly, false otherwise</returns>
public static bool WriteTorrentXZ(List<string> inputFiles, string outDir, List<Rom> roms, Logger logger, bool date = false) public static bool WriteTorrentXZ(List<string> inputFiles, string outDir, List<Rom> roms, bool date = false)
{ {
return false; return false;
} }
@@ -1730,16 +1711,15 @@ namespace SabreTools.Helper.Tools
/// <param name="inputFile">Input filename to be moved</param> /// <param name="inputFile">Input filename to be moved</param>
/// <param name="outDir">Output directory to build to</param> /// <param name="outDir">Output directory to build to</param>
/// <param name="rom">RomData representing the new information</param> /// <param name="rom">RomData representing the new information</param>
/// <param name="logger">Logger object for file and console output</param>
/// <param name="date">True if the date from the DAT should be used if available, false otherwise (default)</param> /// <param name="date">True if the date from the DAT should be used if available, false otherwise (default)</param>
/// <returns>True if the archive was written properly, false otherwise</returns> /// <returns>True if the archive was written properly, false otherwise</returns>
public static bool WriteTorrentZip(string inputFile, string outDir, Rom rom, Logger logger, bool date = false) public static bool WriteTorrentZip(string inputFile, string outDir, Rom rom, bool date = false)
{ {
// Wrap the individual inputs into lists // Wrap the individual inputs into lists
List<string> inputFiles = new List<string>() { inputFile }; List<string> inputFiles = new List<string>() { inputFile };
List<Rom> roms = new List<Rom>() { rom }; List<Rom> roms = new List<Rom>() { rom };
return WriteTorrentZip(inputFiles, outDir, roms, logger, date: date); return WriteTorrentZip(inputFiles, outDir, roms, date: date);
} }
/// <summary> /// <summary>
@@ -1748,10 +1728,9 @@ namespace SabreTools.Helper.Tools
/// <param name="inputFile">Input filenames to be moved</param> /// <param name="inputFile">Input filenames to be moved</param>
/// <param name="outDir">Output directory to build to</param> /// <param name="outDir">Output directory to build to</param>
/// <param name="rom">List of Rom representing the new information</param> /// <param name="rom">List of Rom representing the new information</param>
/// <param name="logger">Logger object for file and console output</param>
/// <param name="date">True if the date from the DAT should be used if available, false otherwise (default)</param> /// <param name="date">True if the date from the DAT should be used if available, false otherwise (default)</param>
/// <returns>True if the archive was written properly, false otherwise</returns> /// <returns>True if the archive was written properly, false otherwise</returns>
public static bool WriteTorrentZip(List<string> inputFiles, string outDir, List<Rom> roms, Logger logger, bool date = false) public static bool WriteTorrentZip(List<string> inputFiles, string outDir, List<Rom> roms, bool date = false)
{ {
bool success = false; bool success = false;
string tempFile = Path.Combine(outDir, "tmp" + Guid.NewGuid().ToString()); string tempFile = Path.Combine(outDir, "tmp" + Guid.NewGuid().ToString());

View File

@@ -23,8 +23,7 @@ namespace SabreTools.Helper.Tools
/// <param name="header">String representing the header bytes</param> /// <param name="header">String representing the header bytes</param>
/// <param name="SHA1">SHA-1 of the deheadered file</param> /// <param name="SHA1">SHA-1 of the deheadered file</param>
/// <param name="type">Name of the source skipper file</param> /// <param name="type">Name of the source skipper file</param>
/// <param name="logger">Logger object for console and file output</param> public static void AddHeaderToDatabase(string header, string SHA1, string source)
public static void AddHeaderToDatabase(string header, string SHA1, string source, Logger logger)
{ {
bool exists = false; bool exists = false;
@@ -47,7 +46,7 @@ namespace SabreTools.Helper.Tools
"'" + header + "', " + "'" + header + "', " +
"'" + source + "')"; "'" + source + "')";
slc = new SqliteCommand(query, dbc); slc = new SqliteCommand(query, dbc);
logger.Verbose("Result of inserting header: " + slc.ExecuteNonQuery()); Globals.Logger.Verbose("Result of inserting header: " + slc.ExecuteNonQuery());
} }
// Dispose of database objects // Dispose of database objects
@@ -162,9 +161,8 @@ CREATE TABLE IF NOT EXISTS data (
/// Retrieve headers from the database /// Retrieve headers from the database
/// </summary> /// </summary>
/// <param name="SHA1">SHA-1 of the deheadered file</param> /// <param name="SHA1">SHA-1 of the deheadered file</param>
/// <param name="logger">Logger object for console and file output</param>
/// <returns>List of strings representing the headers to add</returns> /// <returns>List of strings representing the headers to add</returns>
public static List<string> RetrieveHeadersFromDatabase(string SHA1, Logger logger) public static List<string> RetrieveHeadersFromDatabase(string SHA1)
{ {
// Ensure the database exists // Ensure the database exists
EnsureDatabase(Constants.HeadererDbSchema, Constants.HeadererFileName, Constants.HeadererConnectionString); EnsureDatabase(Constants.HeadererDbSchema, Constants.HeadererFileName, Constants.HeadererConnectionString);
@@ -184,13 +182,13 @@ CREATE TABLE IF NOT EXISTS data (
{ {
while (sldr.Read()) while (sldr.Read())
{ {
logger.Verbose("Found match with rom type " + sldr.GetString(1)); Globals.Logger.Verbose("Found match with rom type " + sldr.GetString(1));
headers.Add(sldr.GetString(0)); headers.Add(sldr.GetString(0));
} }
} }
else else
{ {
logger.Warning("No matching header could be found!"); Globals.Logger.Warning("No matching header could be found!");
} }
// Dispose of database objects // Dispose of database objects

View File

@@ -65,7 +65,7 @@ namespace SabreTools.Helper.Tools
/// <param name="filename">Name of the file to be parsed</param> /// <param name="filename">Name of the file to be parsed</param>
/// <returns>The DatFormat corresponding to the DAT</returns> /// <returns>The DatFormat corresponding to the DAT</returns>
/// <remarks>There is currently no differentiation between XML and SabreDAT here</remarks> /// <remarks>There is currently no differentiation between XML and SabreDAT here</remarks>
public static DatFormat GetDatFormat(string filename, Logger logger) public static DatFormat GetDatFormat(string filename)
{ {
// Limit the output formats based on extension // Limit the output formats based on extension
string ext = Path.GetExtension(filename).ToLowerInvariant(); string ext = Path.GetExtension(filename).ToLowerInvariant();
@@ -80,12 +80,12 @@ namespace SabreTools.Helper.Tools
} }
// Read the input file, if possible // Read the input file, if possible
logger.Verbose("Attempting to read file to get format: \"" + filename + "\""); Globals.Logger.Verbose("Attempting to read file to get format: \"" + filename + "\"");
// Check if file exists // Check if file exists
if (!File.Exists(filename)) if (!File.Exists(filename))
{ {
logger.Warning("File '" + filename + "' could not read from!"); Globals.Logger.Warning("File '" + filename + "' could not read from!");
return 0; return 0;
} }
@@ -196,13 +196,12 @@ namespace SabreTools.Helper.Tools
/// Retrieve file information for a single file /// Retrieve file information for a single file
/// </summary> /// </summary>
/// <param name="input">Filename to get information from</param> /// <param name="input">Filename to get information from</param>
/// <param name="logger">Logger object for console and file output</param>
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated (defaults to none)</param> /// <param name="omitFromScan">Hash flag saying what hashes should not be calculated (defaults to none)</param>
/// <param name="offset">Set a >0 number for getting hash for part of the file, 0 otherwise (default)</param> /// <param name="offset">Set a >0 number for getting hash for part of the file, 0 otherwise (default)</param>
/// <param name="date">True if the file Date should be included, false otherwise (default)</param> /// <param name="date">True if the file Date should be included, false otherwise (default)</param>
/// <param name="header">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param> /// <param name="header">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <returns>Populated RomData object if success, empty one on error</returns> /// <returns>Populated RomData object if success, empty one on error</returns>
public static Rom GetFileInfo(string input, Logger logger, Hash omitFromScan = 0x0, public static Rom GetFileInfo(string input, Hash omitFromScan = 0x0,
long offset = 0, bool date = false, string header = null) long offset = 0, bool date = false, string header = null)
{ {
// Add safeguard if file doesn't exist // Add safeguard if file doesn't exist
@@ -215,7 +214,7 @@ namespace SabreTools.Helper.Tools
Rom rom = new Rom(); Rom rom = new Rom();
if (header != null) if (header != null)
{ {
SkipperRule rule = Skipper.GetMatchingRule(input, Path.GetFileNameWithoutExtension(header), logger); SkipperRule rule = Skipper.GetMatchingRule(input, Path.GetFileNameWithoutExtension(header));
// If there's a match, get the new information from the stream // If there's a match, get the new information from the stream
if (rule.Tests != null && rule.Tests.Count != 0) if (rule.Tests != null && rule.Tests.Count != 0)
@@ -225,7 +224,7 @@ namespace SabreTools.Helper.Tools
FileStream inputStream = File.OpenRead(input); FileStream inputStream = File.OpenRead(input);
// Transform the stream and get the information from it // Transform the stream and get the information from it
rule.TransformStream(inputStream, outputStream, logger, keepReadOpen: false, keepWriteOpen: true); rule.TransformStream(inputStream, outputStream, keepReadOpen: false, keepWriteOpen: true);
rom = GetStreamInfo(outputStream, outputStream.Length); rom = GetStreamInfo(outputStream, outputStream.Length);
// Dispose of the streams // Dispose of the streams
@@ -331,9 +330,8 @@ namespace SabreTools.Helper.Tools
/// </summary> /// </summary>
/// <param name="file">Name of the file to be parsed</param> /// <param name="file">Name of the file to be parsed</param>
/// <param name="outDir">Output directory to write the file to, empty means the same directory as the input file</param> /// <param name="outDir">Output directory to write the file to, empty means the same directory as the input file</param>
/// <param name="logger">Logger object for console and file output</param>
/// <returns>True if the output file was created, false otherwise</returns> /// <returns>True if the output file was created, false otherwise</returns>
public static bool DetectSkipperAndTransform(string file, string outDir, Logger logger) public static bool DetectSkipperAndTransform(string file, string outDir)
{ {
// Create the output directory if it doesn't exist // Create the output directory if it doesn't exist
if (outDir != "" && !Directory.Exists(outDir)) if (outDir != "" && !Directory.Exists(outDir))
@@ -341,10 +339,10 @@ namespace SabreTools.Helper.Tools
Directory.CreateDirectory(outDir); Directory.CreateDirectory(outDir);
} }
logger.User("\nGetting skipper information for '" + file + "'"); Globals.Logger.User("\nGetting skipper information for '" + file + "'");
// Get the skipper rule that matches the file, if any // Get the skipper rule that matches the file, if any
SkipperRule rule = Skipper.GetMatchingRule(file, "", logger); SkipperRule rule = Skipper.GetMatchingRule(file, "");
// If we have an empty rule, return false // If we have an empty rule, return false
if (rule.Tests == null || rule.Tests.Count == 0 || rule.Operation != HeaderSkipOperation.None) if (rule.Tests == null || rule.Tests.Count == 0 || rule.Operation != HeaderSkipOperation.None)
@@ -352,7 +350,7 @@ namespace SabreTools.Helper.Tools
return false; return false;
} }
logger.User("File has a valid copier header"); Globals.Logger.User("File has a valid copier header");
// Get the header bytes from the file first // Get the header bytes from the file first
string hstr = string.Empty; string hstr = string.Empty;
@@ -368,7 +366,7 @@ namespace SabreTools.Helper.Tools
// Apply the rule to the file // Apply the rule to the file
string newfile = (outDir == "" ? Path.GetFullPath(file) + ".new" : Path.Combine(outDir, Path.GetFileName(file))); string newfile = (outDir == "" ? Path.GetFullPath(file) + ".new" : Path.Combine(outDir, Path.GetFileName(file)));
rule.TransformFile(file, newfile, logger); rule.TransformFile(file, newfile);
// If the output file doesn't exist, return false // If the output file doesn't exist, return false
if (!File.Exists(newfile)) if (!File.Exists(newfile))
@@ -377,8 +375,8 @@ namespace SabreTools.Helper.Tools
} }
// Now add the information to the database if it's not already there // Now add the information to the database if it's not already there
Rom rom = GetFileInfo(newfile, logger); Rom rom = GetFileInfo(newfile);
DatabaseTools.AddHeaderToDatabase(hstr, rom.SHA1, rule.SourceFile, logger); DatabaseTools.AddHeaderToDatabase(hstr, rom.SHA1, rule.SourceFile);
return true; return true;
} }
@@ -387,11 +385,9 @@ namespace SabreTools.Helper.Tools
/// Retrieve a list of just files from inputs /// Retrieve a list of just files from inputs
/// </summary> /// </summary>
/// <param name="inputs">List of strings representing directories and files</param> /// <param name="inputs">List of strings representing directories and files</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logger object for file and console output</param>
/// <param name="appendparent">True if the parent name should be appended after the special character "¬", false otherwise</param> /// <param name="appendparent">True if the parent name should be appended after the special character "¬", false otherwise</param>
/// <returns>List of strings representing just files from the inputs</returns> /// <returns>List of strings representing just files from the inputs</returns>
public static List<string> GetOnlyFilesFromInputs(List<string> inputs, int maxDegreeOfParallelism, Logger logger, bool appendparent = false) public static List<string> GetOnlyFilesFromInputs(List<string> inputs, bool appendparent = false)
{ {
List<string> outputs = new List<string>(); List<string> outputs = new List<string>();
foreach (string input in inputs) foreach (string input in inputs)
@@ -410,11 +406,11 @@ namespace SabreTools.Helper.Tools
} }
catch (PathTooLongException) catch (PathTooLongException)
{ {
logger.Warning("The path for " + file + " was too long"); Globals.Logger.Warning("The path for " + file + " was too long");
} }
catch (Exception ex) catch (Exception ex)
{ {
logger.Error(ex.ToString()); Globals.Logger.Error(ex.ToString());
} }
} }
} }
@@ -426,11 +422,11 @@ namespace SabreTools.Helper.Tools
} }
catch (PathTooLongException) catch (PathTooLongException)
{ {
logger.Warning("The path for " + input + " was too long"); Globals.Logger.Warning("The path for " + input + " was too long");
} }
catch (Exception ex) catch (Exception ex)
{ {
logger.Error(ex.ToString()); Globals.Logger.Error(ex.ToString());
} }
} }
} }
@@ -442,16 +438,15 @@ namespace SabreTools.Helper.Tools
/// Get the XmlTextReader associated with a file, if possible /// Get the XmlTextReader associated with a file, if possible
/// </summary> /// </summary>
/// <param name="filename">Name of the file to be parsed</param> /// <param name="filename">Name of the file to be parsed</param>
/// <param name="logger">Logger object for console and file output</param>
/// <returns>The XmlTextReader representing the (possibly converted) file, null otherwise</returns> /// <returns>The XmlTextReader representing the (possibly converted) file, null otherwise</returns>
public static XmlReader GetXmlTextReader(string filename, Logger logger) public static XmlReader GetXmlTextReader(string filename)
{ {
logger.Verbose("Attempting to read file: \"" + filename + "\""); Globals.Logger.Verbose("Attempting to read file: \"" + filename + "\"");
// Check if file exists // Check if file exists
if (!File.Exists(filename)) if (!File.Exists(filename))
{ {
logger.Warning("File '" + filename + "' could not read from!"); Globals.Logger.Warning("File '" + filename + "' could not read from!");
return null; return null;
} }
@@ -472,9 +467,8 @@ namespace SabreTools.Helper.Tools
/// </summary> /// </summary>
/// <param name="file">Name of the file to be parsed</param> /// <param name="file">Name of the file to be parsed</param>
/// <param name="outDir">Output directory to write the file to, empty means the same directory as the input file</param> /// <param name="outDir">Output directory to write the file to, empty means the same directory as the input file</param>
/// <param name="logger">Logger object for console and file output</param>
/// <returns>True if a header was found and appended, false otherwise</returns> /// <returns>True if a header was found and appended, false otherwise</returns>
public static bool RestoreHeader(string file, string outDir, Logger logger) public static bool RestoreHeader(string file, string outDir)
{ {
// Create the output directory if it doesn't exist // Create the output directory if it doesn't exist
if (outDir != "" && !Directory.Exists(outDir)) if (outDir != "" && !Directory.Exists(outDir))
@@ -483,10 +477,10 @@ namespace SabreTools.Helper.Tools
} }
// First, get the SHA-1 hash of the file // First, get the SHA-1 hash of the file
Rom rom = GetFileInfo(file, logger); Rom rom = GetFileInfo(file);
// Retrieve a list of all related headers from the database // Retrieve a list of all related headers from the database
List<string> headers = DatabaseTools.RetrieveHeadersFromDatabase(rom.SHA1, logger); List<string> headers = DatabaseTools.RetrieveHeadersFromDatabase(rom.SHA1);
// If we have nothing retrieved, we return false // If we have nothing retrieved, we return false
if (headers.Count == 0) if (headers.Count == 0)
@@ -497,11 +491,11 @@ namespace SabreTools.Helper.Tools
// Now loop through and create the reheadered files, if possible // Now loop through and create the reheadered files, if possible
for (int i = 0; i < headers.Count; i++) for (int i = 0; i < headers.Count; i++)
{ {
logger.User("Creating reheadered file: " + Globals.Logger.User("Creating reheadered file: " +
(outDir == "" ? Path.GetFullPath(file) + ".new" : Path.Combine(outDir, Path.GetFileName(file))) + i); (outDir == "" ? Path.GetFullPath(file) + ".new" : Path.Combine(outDir, Path.GetFileName(file))) + i);
AppendBytesToFile(file, AppendBytesToFile(file,
(outDir == "" ? Path.GetFullPath(file) + ".new" : Path.Combine(outDir, Path.GetFileName(file))) + i, headers[i], string.Empty); (outDir == "" ? Path.GetFullPath(file) + ".new" : Path.Combine(outDir, Path.GetFileName(file))) + i, headers[i], string.Empty);
logger.User("Reheadered file created!"); Globals.Logger.User("Reheadered file created!");
} }
return true; return true;

View File

@@ -113,12 +113,12 @@ namespace SabreTools
string basePath = Path.GetFullPath(path); string basePath = Path.GetFullPath(path);
bool success = datdata.PopulateFromDir(basePath, omitFromScan, removeDateFromAutomaticName, parseArchivesAsFiles, enableGzip, bool success = datdata.PopulateFromDir(basePath, omitFromScan, removeDateFromAutomaticName, parseArchivesAsFiles, enableGzip,
addBlankFilesForEmptyFolder, addFileDates, tempDir, copyFiles, headerToCheckAgainst, _maxDegreeOfParallelism, _logger); addBlankFilesForEmptyFolder, addFileDates, tempDir, copyFiles, headerToCheckAgainst);
// If it was a success, write the DAT out // If it was a success, write the DAT out
if (success) if (success)
{ {
datdata.WriteToFile(outDir, _maxDegreeOfParallelism, _logger); datdata.WriteToFile(outDir);
} }
// Otherwise, show the help // Otherwise, show the help
@@ -142,13 +142,13 @@ namespace SabreTools
{ {
if (File.Exists(input)) if (File.Exists(input))
{ {
FileTools.DetectSkipperAndTransform(input, outDir, _logger); FileTools.DetectSkipperAndTransform(input, outDir);
} }
else if (Directory.Exists(input)) else if (Directory.Exists(input))
{ {
foreach (string sub in Directory.EnumerateFiles(input, "*", SearchOption.AllDirectories)) foreach (string sub in Directory.EnumerateFiles(input, "*", SearchOption.AllDirectories))
{ {
FileTools.DetectSkipperAndTransform(sub, outDir, _logger); FileTools.DetectSkipperAndTransform(sub, outDir);
} }
} }
} }
@@ -169,22 +169,22 @@ namespace SabreTools
if (File.Exists(input)) if (File.Exists(input))
{ {
DatFile datFile = new DatFile(); DatFile datFile = new DatFile();
datFile.Parse(Path.GetFullPath(input), 0, 0, _logger); datFile.Parse(Path.GetFullPath(input), 0, 0);
datFile.SplitByExt(outDir, Path.GetDirectoryName(input), exta, extb, _maxDegreeOfParallelism, _logger); datFile.SplitByExt(outDir, Path.GetDirectoryName(input), exta, extb);
} }
else if (Directory.Exists(input)) else if (Directory.Exists(input))
{ {
foreach (string file in Directory.EnumerateFiles(input, "*", SearchOption.AllDirectories)) foreach (string file in Directory.EnumerateFiles(input, "*", SearchOption.AllDirectories))
{ {
DatFile datFile = new DatFile(); DatFile datFile = new DatFile();
datFile.Parse(Path.GetFullPath(file), 0, 0, _logger); datFile.Parse(Path.GetFullPath(file), 0, 0);
datFile.SplitByExt(outDir, (input.EndsWith(Path.DirectorySeparatorChar.ToString()) ? input : input + Path.DirectorySeparatorChar), datFile.SplitByExt(outDir, (input.EndsWith(Path.DirectorySeparatorChar.ToString()) ? input : input + Path.DirectorySeparatorChar),
exta, extb, _maxDegreeOfParallelism, _logger); exta, extb);
} }
} }
else else
{ {
_logger.Error(input + " is not a valid file or folder!"); Globals.Logger.Error(input + " is not a valid file or folder!");
Console.WriteLine(); Console.WriteLine();
_help.OutputIndividualFeature("Extension Split"); _help.OutputIndividualFeature("Extension Split");
return; return;
@@ -205,22 +205,21 @@ namespace SabreTools
if (File.Exists(input)) if (File.Exists(input))
{ {
DatFile datFile = new DatFile(); DatFile datFile = new DatFile();
datFile.Parse(Path.GetFullPath(input), 0, 0, _logger); datFile.Parse(Path.GetFullPath(input), 0, 0);
datFile.SplitByHash(outDir, Path.GetDirectoryName(input), _maxDegreeOfParallelism, _logger); datFile.SplitByHash(outDir, Path.GetDirectoryName(input));
} }
else if (Directory.Exists(input)) else if (Directory.Exists(input))
{ {
foreach (string file in Directory.EnumerateFiles(input, "*", SearchOption.AllDirectories)) foreach (string file in Directory.EnumerateFiles(input, "*", SearchOption.AllDirectories))
{ {
DatFile datFile = new DatFile(); DatFile datFile = new DatFile();
datFile.Parse(Path.GetFullPath(file), 0, 0, _logger); datFile.Parse(Path.GetFullPath(file), 0, 0);
datFile.SplitByHash(outDir, (input.EndsWith(Path.DirectorySeparatorChar.ToString()) ? input : input + Path.DirectorySeparatorChar), datFile.SplitByHash(outDir, (input.EndsWith(Path.DirectorySeparatorChar.ToString()) ? input : input + Path.DirectorySeparatorChar));
_maxDegreeOfParallelism, _logger);
} }
} }
else else
{ {
_logger.Error(input + " is not a valid file or folder!"); Globals.Logger.Error(input + " is not a valid file or folder!");
Console.WriteLine(); Console.WriteLine();
_help.OutputIndividualFeature("Hash Split"); _help.OutputIndividualFeature("Hash Split");
return; return;
@@ -239,13 +238,13 @@ namespace SabreTools
{ {
if (File.Exists(input)) if (File.Exists(input))
{ {
FileTools.RestoreHeader(input, outDir, _logger); FileTools.RestoreHeader(input, outDir);
} }
else if (Directory.Exists(input)) else if (Directory.Exists(input))
{ {
foreach (string sub in Directory.EnumerateFiles(input, "*", SearchOption.AllDirectories)) foreach (string sub in Directory.EnumerateFiles(input, "*", SearchOption.AllDirectories))
{ {
FileTools.RestoreHeader(sub, outDir, _logger); FileTools.RestoreHeader(sub, outDir);
} }
} }
} }
@@ -266,22 +265,22 @@ namespace SabreTools
if (File.Exists(input)) if (File.Exists(input))
{ {
DatFile datFile = new DatFile(); DatFile datFile = new DatFile();
datFile.Parse(Path.GetFullPath(input), 0, 0, _logger, keep: true); datFile.Parse(Path.GetFullPath(input), 0, 0, keep: true);
datFile.SplitByLevel(outDir, Path.GetDirectoryName(input), shortname, basedat, _maxDegreeOfParallelism, _logger); datFile.SplitByLevel(outDir, Path.GetDirectoryName(input), shortname, basedat);
} }
else if (Directory.Exists(input)) else if (Directory.Exists(input))
{ {
foreach (string file in Directory.EnumerateFiles(input, "*", SearchOption.AllDirectories)) foreach (string file in Directory.EnumerateFiles(input, "*", SearchOption.AllDirectories))
{ {
DatFile datFile = new DatFile(); DatFile datFile = new DatFile();
datFile.Parse(Path.GetFullPath(file), 0, 0, _logger, keep: true); datFile.Parse(Path.GetFullPath(file), 0, 0, keep: true);
datFile.SplitByLevel(outDir, (input.EndsWith(Path.DirectorySeparatorChar.ToString()) ? input : input + Path.DirectorySeparatorChar), datFile.SplitByLevel(outDir, (input.EndsWith(Path.DirectorySeparatorChar.ToString()) ? input : input + Path.DirectorySeparatorChar),
shortname, basedat, _maxDegreeOfParallelism, _logger); shortname, basedat);
} }
} }
else else
{ {
_logger.Error(input + " is not a valid file or folder!"); Globals.Logger.Error(input + " is not a valid file or folder!");
Console.WriteLine(); Console.WriteLine();
_help.OutputIndividualFeature("Level Split"); _help.OutputIndividualFeature("Level Split");
return; return;
@@ -317,19 +316,18 @@ namespace SabreTools
ArchiveScanLevel asl = ArchiveTools.GetArchiveScanLevelFromNumbers(sevenzip, gz, rar, zip); ArchiveScanLevel asl = ArchiveTools.GetArchiveScanLevelFromNumbers(sevenzip, gz, rar, zip);
DateTime start = DateTime.Now; DateTime start = DateTime.Now;
_logger.User("Populating internal DAT..."); Globals.Logger.User("Populating internal DAT...");
// Add all of the input DATs into one huge internal DAT // Add all of the input DATs into one huge internal DAT
DatFile datdata = new DatFile(); DatFile datdata = new DatFile();
foreach (string datfile in datfiles) foreach (string datfile in datfiles)
{ {
datdata.Parse(datfile, 99, 99, new Filter(), splitType, false /* trim */, false /* single */, null /* root */, datdata.Parse(datfile, 99, 99, new Filter(), splitType, false /* trim */, false /* single */, null /* root */, keep: true, useTags: true);
_maxDegreeOfParallelism, _logger, keep: true, useTags: true);
} }
_logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff")); Globals.Logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
datdata.RebuildGeneric(inputs, outDir, tempDir, quickScan, date, delete, inverse, outputFormat, romba, asl, datdata.RebuildGeneric(inputs, outDir, tempDir, quickScan, date, delete, inverse, outputFormat, romba, asl,
updateDat, headerToCheckAgainst, _maxDegreeOfParallelism, _logger); updateDat, headerToCheckAgainst);
} }
/// <summary> /// <summary>
@@ -351,19 +349,18 @@ namespace SabreTools
bool inverse, OutputFormat outputFormat, bool romba, bool updateDat, string headerToCheckAgainst, SplitType splitType) bool inverse, OutputFormat outputFormat, bool romba, bool updateDat, string headerToCheckAgainst, SplitType splitType)
{ {
DateTime start = DateTime.Now; DateTime start = DateTime.Now;
_logger.User("Populating internal DAT..."); Globals.Logger.User("Populating internal DAT...");
// Add all of the input DATs into one huge internal DAT // Add all of the input DATs into one huge internal DAT
DatFile datdata = new DatFile(); DatFile datdata = new DatFile();
foreach (string datfile in datfiles) foreach (string datfile in datfiles)
{ {
datdata.Parse(datfile, 99, 99, new Filter(), splitType, false /* trim */, false /* single */, null /* root */, datdata.Parse(datfile, 99, 99, new Filter(), splitType, false /* trim */, false /* single */, null /* root */, keep: true, useTags: true);
_maxDegreeOfParallelism, _logger, keep: true, useTags: true);
} }
_logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff")); Globals.Logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
datdata.RebuildDepot(inputs, outDir, tempDir, date, delete, inverse, outputFormat, romba, datdata.RebuildDepot(inputs, outDir, tempDir, date, delete, inverse, outputFormat, romba,
updateDat, headerToCheckAgainst, _maxDegreeOfParallelism, _logger); updateDat, headerToCheckAgainst);
} }
/// <summary> /// <summary>
@@ -379,7 +376,7 @@ namespace SabreTools
private static void InitStats(List<string> inputs, string filename, string outDir, bool single, bool baddumpCol, bool nodumpCol, private static void InitStats(List<string> inputs, string filename, string outDir, bool single, bool baddumpCol, bool nodumpCol,
StatDatFormat statDatFormat) StatDatFormat statDatFormat)
{ {
DatFile.OutputStats(inputs, filename, outDir, single, baddumpCol, nodumpCol, statDatFormat, _maxDegreeOfParallelism, _logger); DatFile.OutputStats(inputs, filename, outDir, single, baddumpCol, nodumpCol, statDatFormat);
} }
/// <summary> /// <summary>
@@ -395,22 +392,21 @@ namespace SabreTools
if (File.Exists(input)) if (File.Exists(input))
{ {
DatFile datFile = new DatFile(); DatFile datFile = new DatFile();
datFile.Parse(Path.GetFullPath(input), 0, 0, _logger); datFile.Parse(Path.GetFullPath(input), 0, 0);
datFile.SplitByType(outDir, Path.GetFullPath(Path.GetDirectoryName(input)), _maxDegreeOfParallelism, _logger); datFile.SplitByType(outDir, Path.GetFullPath(Path.GetDirectoryName(input)));
} }
else if (Directory.Exists(input)) else if (Directory.Exists(input))
{ {
foreach (string file in Directory.EnumerateFiles(input, "*", SearchOption.AllDirectories)) foreach (string file in Directory.EnumerateFiles(input, "*", SearchOption.AllDirectories))
{ {
DatFile datFile = new DatFile(); DatFile datFile = new DatFile();
datFile.Parse(Path.GetFullPath(file), 0, 0, _logger); datFile.Parse(Path.GetFullPath(file), 0, 0);
datFile.SplitByType(outDir, Path.GetFullPath((input.EndsWith(Path.DirectorySeparatorChar.ToString()) ? input : input + Path.DirectorySeparatorChar)), datFile.SplitByType(outDir, Path.GetFullPath((input.EndsWith(Path.DirectorySeparatorChar.ToString()) ? input : input + Path.DirectorySeparatorChar)));
_maxDegreeOfParallelism, _logger);
} }
} }
else else
{ {
_logger.Error(input + " is not a valid file or folder!"); Globals.Logger.Error(input + " is not a valid file or folder!");
Console.WriteLine(); Console.WriteLine();
_help.OutputIndividualFeature("Type Split"); _help.OutputIndividualFeature("Type Split");
return; return;
@@ -547,7 +543,7 @@ namespace SabreTools
fm = ForceMerging.Full; fm = ForceMerging.Full;
break; break;
default: default:
_logger.Warning(forcemerge + " is not a valid merge flag"); Globals.Logger.Warning(forcemerge + " is not a valid merge flag");
break; break;
} }
} }
@@ -570,7 +566,7 @@ namespace SabreTools
fn = ForceNodump.Ignore; fn = ForceNodump.Ignore;
break; break;
default: default:
_logger.Warning(forcend + " is not a valid nodump flag"); Globals.Logger.Warning(forcend + " is not a valid nodump flag");
break; break;
} }
} }
@@ -590,7 +586,7 @@ namespace SabreTools
fp = ForcePacking.Unzip; fp = ForcePacking.Unzip;
break; break;
default: default:
_logger.Warning(forcepack + " is not a valid packing flag"); Globals.Logger.Warning(forcepack + " is not a valid packing flag");
break; break;
} }
} }
@@ -697,7 +693,7 @@ namespace SabreTools
}; };
userInputDat.DetermineUpdateType(inputs, outDir, merge, diffMode, inplace, skip, bare, clean, descAsName, userInputDat.DetermineUpdateType(inputs, outDir, merge, diffMode, inplace, skip, bare, clean, descAsName,
filter, splitType, trim, single, root, _maxDegreeOfParallelism, _logger); filter, splitType, trim, single, root);
} }
/// <summary> /// <summary>
@@ -717,18 +713,17 @@ namespace SabreTools
ArchiveScanLevel asl = ArchiveTools.GetArchiveScanLevelFromNumbers(1, 1, 1, 1); ArchiveScanLevel asl = ArchiveTools.GetArchiveScanLevelFromNumbers(1, 1, 1, 1);
DateTime start = DateTime.Now; DateTime start = DateTime.Now;
_logger.User("Populating internal DAT..."); Globals.Logger.User("Populating internal DAT...");
// Add all of the input DATs into one huge internal DAT // Add all of the input DATs into one huge internal DAT
DatFile datdata = new DatFile(); DatFile datdata = new DatFile();
foreach (string datfile in datfiles) foreach (string datfile in datfiles)
{ {
datdata.Parse(datfile, 99, 99, new Filter(), splitType, false /* trim */, false /* single */, null /* root */, datdata.Parse(datfile, 99, 99, new Filter(), splitType, false /* trim */, false /* single */, null /* root */, keep: true, useTags: true);
_maxDegreeOfParallelism, _logger, keep: true, useTags: true);
} }
_logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff")); Globals.Logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
datdata.VerifyGeneric(inputs, tempDir, hashOnly, quickScan, headerToCheckAgainst, _maxDegreeOfParallelism, _logger); datdata.VerifyGeneric(inputs, tempDir, hashOnly, quickScan, headerToCheckAgainst);
} }
/// <summary> /// <summary>
@@ -743,18 +738,17 @@ namespace SabreTools
string headerToCheckAgainst, SplitType splitType) string headerToCheckAgainst, SplitType splitType)
{ {
DateTime start = DateTime.Now; DateTime start = DateTime.Now;
_logger.User("Populating internal DAT..."); Globals.Logger.User("Populating internal DAT...");
// Add all of the input DATs into one huge internal DAT // Add all of the input DATs into one huge internal DAT
DatFile datdata = new DatFile(); DatFile datdata = new DatFile();
foreach (string datfile in datfiles) foreach (string datfile in datfiles)
{ {
datdata.Parse(datfile, 99, 99, new Filter(), splitType, false /* trim */, false /* single */, null /* root */, datdata.Parse(datfile, 99, 99, new Filter(), splitType, false /* trim */, false /* single */, null /* root */, keep: true, useTags: true);
_maxDegreeOfParallelism, _logger, keep: true, useTags: true);
} }
_logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff")); Globals.Logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
datdata.VerifyDepot(inputs, tempDir, headerToCheckAgainst, _maxDegreeOfParallelism, _logger); datdata.VerifyDepot(inputs, tempDir, headerToCheckAgainst);
} }
#endregion #endregion

View File

@@ -26,9 +26,7 @@ namespace SabreTools
public partial class SabreTools public partial class SabreTools
{ {
// Private required variables // Private required variables
private static Logger _logger;
private static Help _help; private static Help _help;
private static int _maxDegreeOfParallelism;
/// <summary> /// <summary>
/// Start menu or use supplied parameters /// Start menu or use supplied parameters
@@ -37,7 +35,7 @@ namespace SabreTools
public static void Main(string[] args) public static void Main(string[] args)
{ {
// Perform initial setup and verification // Perform initial setup and verification
_logger = new Logger(true, "sabretools.log"); Globals.Logger = new Logger(true, "sabretools.log");
// Create a new Help object for this program // Create a new Help object for this program
_help = RetrieveHelp(); _help = RetrieveHelp();
@@ -65,7 +63,7 @@ namespace SabreTools
if ((new List<string>(args)).Contains("--credits")) if ((new List<string>(args)).Contains("--credits"))
{ {
_help.OutputCredits(); _help.OutputCredits();
_logger.Close(); Globals.Logger.Close();
return; return;
} }
@@ -73,7 +71,7 @@ namespace SabreTools
if (args.Length == 0) if (args.Length == 0)
{ {
_help.OutputGenericHelp(); _help.OutputGenericHelp();
_logger.Close(); Globals.Logger.Close();
return; return;
} }
@@ -173,9 +171,9 @@ namespace SabreTools
// Verify that the flag is valid // Verify that the flag is valid
if (!_help.TopLevelFlag(feature)) if (!_help.TopLevelFlag(feature))
{ {
_logger.User("\"" + feature + "\" is not valid feature flag"); Globals.Logger.User("\"" + feature + "\" is not valid feature flag");
_help.OutputIndividualFeature(feature); _help.OutputIndividualFeature(feature);
_logger.Close(); Globals.Logger.Close();
return; return;
} }
@@ -193,7 +191,7 @@ namespace SabreTools
{ {
_help.OutputGenericHelp(); _help.OutputGenericHelp();
} }
_logger.Close(); Globals.Logger.Close();
return; return;
case "-d": case "-d":
case "--d2d": case "--d2d":
@@ -255,7 +253,7 @@ namespace SabreTools
// If we don't have a valid flag, feed it through the help system // If we don't have a valid flag, feed it through the help system
default: default:
_help.OutputIndividualFeature(feature); _help.OutputIndividualFeature(feature);
_logger.Close(); Globals.Logger.Close();
return; return;
} }
@@ -268,9 +266,9 @@ namespace SabreTools
// Verify that the current flag is proper for the feature // Verify that the current flag is proper for the feature
if (!_help[feature].ValidateInput(args[i])) if (!_help[feature].ValidateInput(args[i]))
{ {
_logger.Error("Invalid input detected: " + args[i]); Globals.Logger.Error("Invalid input detected: " + args[i]);
_help.OutputIndividualFeature(feature); _help.OutputIndividualFeature(feature);
_logger.Close(); Globals.Logger.Close();
return; return;
} }
@@ -643,8 +641,8 @@ namespace SabreTools
case "--dat": case "--dat":
if (!File.Exists(args[++i])) if (!File.Exists(args[++i]))
{ {
_logger.Error("DAT must be a valid file: " + args[i]); Globals.Logger.Error("DAT must be a valid file: " + args[i]);
_logger.Close(); Globals.Logger.Close();
return; return;
} }
datfiles.Add(args[++i]); datfiles.Add(args[++i]);
@@ -687,7 +685,7 @@ namespace SabreTools
break; break;
case "-gt": case "-gt":
case "--game-type": case "--game-type":
filter.MachineTypes |= Filter.GetMachineTypeFromString(args[++i], _logger); filter.MachineTypes |= Filter.GetMachineTypeFromString(args[++i]);
break; break;
case "-gz": case "-gz":
case "--gz": case "--gz":
@@ -706,7 +704,7 @@ namespace SabreTools
break; break;
case "-is": case "-is":
case "--status": case "--status":
filter.ItemStatuses |= Filter.GetStatusFromString(args[++i], _logger); filter.ItemStatuses |= Filter.GetStatusFromString(args[++i]);
break; break;
case "-md5": case "-md5":
case "--md5": case "--md5":
@@ -714,7 +712,14 @@ namespace SabreTools
break; break;
case "-mt": case "-mt":
case "--mt": case "--mt":
Int32.TryParse(args[++i], out _maxDegreeOfParallelism); if (Int32.TryParse(args[++i], out int mdop))
{
Globals.MaxDegreeOfParallelism = mdop;
}
else
{
Globals.MaxDegreeOfParallelism = 4;
}
break; break;
case "-n": case "-n":
case "--name": case "--name":
@@ -730,11 +735,11 @@ namespace SabreTools
break; break;
case "-ngt": case "-ngt":
case "--not-gtype": case "--not-gtype":
filter.NotMachineTypes |= Filter.GetMachineTypeFromString(args[++i], _logger); filter.NotMachineTypes |= Filter.GetMachineTypeFromString(args[++i]);
break; break;
case "-nis": case "-nis":
case "--not-status": case "--not-status":
filter.NotItemStatuses |= Filter.GetStatusFromString(args[++i], _logger); filter.NotItemStatuses |= Filter.GetStatusFromString(args[++i]);
break; break;
case "-nmd5": case "-nmd5":
case "--not-md5": case "--not-md5":
@@ -911,8 +916,8 @@ namespace SabreTools
case "--dat": case "--dat":
if (!File.Exists(split[1])) if (!File.Exists(split[1]))
{ {
_logger.Error("DAT must be a valid file: " + split[1]); Globals.Logger.Error("DAT must be a valid file: " + split[1]);
_logger.Close(); Globals.Logger.Close();
return; return;
} }
datfiles.Add(split[1]); datfiles.Add(split[1]);
@@ -955,7 +960,7 @@ namespace SabreTools
break; break;
case "-gt": case "-gt":
case "--game-type": case "--game-type":
filter.MachineTypes |= Filter.GetMachineTypeFromString(split[1], _logger); filter.MachineTypes |= Filter.GetMachineTypeFromString(split[1]);
break; break;
case "-gz": case "-gz":
case "--gz": case "--gz":
@@ -974,7 +979,7 @@ namespace SabreTools
break; break;
case "-is": case "-is":
case "--status": case "--status":
filter.ItemStatuses |= Filter.GetStatusFromString(split[1], _logger); filter.ItemStatuses |= Filter.GetStatusFromString(split[1]);
break; break;
case "-md5": case "-md5":
case "--md5": case "--md5":
@@ -982,7 +987,14 @@ namespace SabreTools
break; break;
case "-mt": case "-mt":
case "--mt": case "--mt":
Int32.TryParse(split[1], out _maxDegreeOfParallelism); if (Int32.TryParse(split[1], out int odop))
{
Globals.MaxDegreeOfParallelism = odop;
}
else
{
Globals.MaxDegreeOfParallelism = 4;
}
break; break;
case "-n": case "-n":
case "--name": case "--name":
@@ -998,11 +1010,11 @@ namespace SabreTools
break; break;
case "-ngt": case "-ngt":
case "--not-gtype": case "--not-gtype":
filter.NotMachineTypes |= Filter.GetMachineTypeFromString(split[1], _logger); filter.NotMachineTypes |= Filter.GetMachineTypeFromString(split[1]);
break; break;
case "-nis": case "-nis":
case "--not-status": case "--not-status":
filter.NotItemStatuses |= Filter.GetStatusFromString(split[1], _logger); filter.NotItemStatuses |= Filter.GetStatusFromString(split[1]);
break; break;
case "-nmd5": case "-nmd5":
case "--not-md5": case "--not-md5":
@@ -1130,8 +1142,8 @@ namespace SabreTools
} }
else else
{ {
_logger.Error("Invalid input detected: " + args[i]); Globals.Logger.Error("Invalid input detected: " + args[i]);
_logger.Close(); Globals.Logger.Close();
return; return;
} }
break; break;
@@ -1143,8 +1155,8 @@ namespace SabreTools
} }
else else
{ {
_logger.Error("Invalid input detected: " + args[i]); Globals.Logger.Error("Invalid input detected: " + args[i]);
_logger.Close(); Globals.Logger.Close();
return; return;
} }
break; break;
@@ -1154,18 +1166,18 @@ namespace SabreTools
// If none of the feature flags is enabled, show the help screen // If none of the feature flags is enabled, show the help screen
if (!(datFromDir | extract | restore | sort | sortDepot | splitByExt | splitByHash | splitByLevel | splitByType | stats | update | verify | verifyDepot)) if (!(datFromDir | extract | restore | sort | sortDepot | splitByExt | splitByHash | splitByLevel | splitByType | stats | update | verify | verifyDepot))
{ {
_logger.Error("At least one feature switch must be enabled"); Globals.Logger.Error("At least one feature switch must be enabled");
_help.OutputGenericHelp(); _help.OutputGenericHelp();
_logger.Close(); Globals.Logger.Close();
return; return;
} }
// If more than one switch is enabled, show the help screen // If more than one switch is enabled, show the help screen
if (!(datFromDir ^ extract ^ restore ^ sort ^ sortDepot ^ splitByExt ^ splitByHash ^ splitByLevel ^ splitByType ^ stats ^ update ^ verify ^ verifyDepot)) if (!(datFromDir ^ extract ^ restore ^ sort ^ sortDepot ^ splitByExt ^ splitByHash ^ splitByLevel ^ splitByType ^ stats ^ update ^ verify ^ verifyDepot))
{ {
_logger.Error("Only one feature switch is allowed at a time"); Globals.Logger.Error("Only one feature switch is allowed at a time");
_help.OutputGenericHelp(); _help.OutputGenericHelp();
_logger.Close(); Globals.Logger.Close();
return; return;
} }
@@ -1173,9 +1185,9 @@ namespace SabreTools
if (inputs.Count == 0 if (inputs.Count == 0
&& (datFromDir || extract || restore || splitByExt || splitByHash || splitByLevel || splitByType || stats || update || verify || verifyDepot)) && (datFromDir || extract || restore || splitByExt || splitByHash || splitByLevel || splitByType || stats || update || verify || verifyDepot))
{ {
_logger.Error("This feature requires at least one input"); Globals.Logger.Error("This feature requires at least one input");
_help.OutputIndividualFeature(feature); _help.OutputIndividualFeature(feature);
_logger.Close(); Globals.Logger.Close();
return; return;
} }
@@ -1272,7 +1284,7 @@ namespace SabreTools
_help.OutputGenericHelp(); _help.OutputGenericHelp();
} }
_logger.Close(); Globals.Logger.Close();
return; return;
} }
} }