[DatFile] Proliferate multithreading a little more

This commit is contained in:
Matt Nadareski
2017-03-01 20:28:32 -08:00
parent 300f608d95
commit 98bbbfa5d9
8 changed files with 119 additions and 103 deletions

View File

@@ -405,7 +405,7 @@ namespace RombaSharp
// First get a list of SHA-1's from the input DATs
DatFile datroot = new DatFile { Type = "SuperDAT", };
datroot.PopulateFromDir(_dats, Hash.SHA256 & Hash.SHA384 & Hash.SHA512, false, false, false, false, false, _tmpdir, false, null, 4, _logger);
datroot.BucketBy(SortedBy.SHA1, false /* mergeroms */, _logger);
datroot.BucketBy(SortedBy.SHA1, false /* mergeroms */, 4 /* maxDegreeOfParallelism */, _logger);
// Create a List of dat hashes in the database (SHA-1)
List<string> databaseDats = new List<string>();
@@ -435,7 +435,7 @@ namespace RombaSharp
unneeded.Add(hash);
}
}
datroot.BucketBy(SortedBy.Game, false /* mergeroms */, _logger, norename: true);
datroot.BucketBy(SortedBy.Game, false /* mergeroms */, 4 /* maxDegreeOfParallelism */, _logger, norename: true);
_logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
@@ -619,7 +619,7 @@ namespace RombaSharp
// Now rescan the depot itself
DatFile depot = new DatFile();
depot.PopulateFromDir(depotname, Hash.SHA256 & Hash.SHA384 & Hash.SHA512, false, false, true, false, false, _tmpdir, false, null, _workers, _logger);
depot.BucketBy(SortedBy.SHA1, false /* mergeroms */, _logger);
depot.BucketBy(SortedBy.SHA1, false /* mergeroms */, 4 /* maxDegreeOfParallelism */, _logger);
// Set the base queries to use
string crcquery = "INSERT OR IGNORE INTO crc (crc) VALUES";

View File

@@ -243,9 +243,10 @@ namespace SabreTools.Helper.Dats
/// Check if a DAT contains the given rom
/// </summary>
/// <param name="datdata">Dat to match against</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logger object for console and/or file output</param>
/// <returns>True if it contains the rom, false otherwise</returns>
public bool HasDuplicates(DatFile datdata, Logger logger)
public bool HasDuplicates(DatFile datdata, int maxDegreeOfParallelism, Logger logger)
{
// Check for an empty rom list first
if (datdata.Count == 0)
@@ -254,7 +255,7 @@ namespace SabreTools.Helper.Dats
}
// We want to get the proper key for the DatItem
string key = SortAndGetKey(datdata, logger);
string key = SortAndGetKey(datdata, maxDegreeOfParallelism, logger);
// If the key doesn't exist, return the empty list
if (!datdata.ContainsKey(key))
@@ -280,10 +281,11 @@ namespace SabreTools.Helper.Dats
/// List all duplicates found in a DAT based on a rom
/// </summary>
/// <param name="datdata">Dat to match against</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logger object for console and/or file output</param>
/// <param name="remove">True to remove matched roms from the input, false otherwise (default)</param>
/// <returns>List of matched DatItem objects</returns>
public List<DatItem> GetDuplicates(DatFile datdata, Logger logger, bool remove = false)
public List<DatItem> GetDuplicates(DatFile datdata, int maxDegreeOfParallelism, Logger logger, bool remove = false)
{
List<DatItem> output = new List<DatItem>();
@@ -294,7 +296,7 @@ namespace SabreTools.Helper.Dats
}
// We want to get the proper key for the DatItem
string key = SortAndGetKey(datdata, logger);
string key = SortAndGetKey(datdata, maxDegreeOfParallelism, logger);
// If the key doesn't exist, return the empty list
if (!datdata.ContainsKey(key))
@@ -333,7 +335,7 @@ namespace SabreTools.Helper.Dats
/// <param name="datdata">Dat to match against</param>
/// <param name="logger">Logger object for console and/or file output</param>
/// <returns>Key to try to use</returns>
private string SortAndGetKey(DatFile datdata, Logger logger)
private string SortAndGetKey(DatFile datdata, int maxDegreeOfParallelism, Logger logger)
{
string key = null;
@@ -345,12 +347,12 @@ namespace SabreTools.Helper.Dats
if (_itemType == ItemType.Rom)
{
key = ((Rom)this).SHA512;
datdata.BucketBy(SortedBy.SHA512, false /* mergeroms */, logger);
datdata.BucketBy(SortedBy.SHA512, false /* mergeroms */, maxDegreeOfParallelism, logger);
}
else
{
key = ((Disk)this).SHA512;
datdata.BucketBy(SortedBy.SHA512, false /* mergeroms */, logger);
datdata.BucketBy(SortedBy.SHA512, false /* mergeroms */, maxDegreeOfParallelism, logger);
}
}
@@ -362,12 +364,12 @@ namespace SabreTools.Helper.Dats
if (_itemType == ItemType.Rom)
{
key = ((Rom)this).SHA384;
datdata.BucketBy(SortedBy.SHA384, false /* mergeroms */, logger);
datdata.BucketBy(SortedBy.SHA384, false /* mergeroms */, maxDegreeOfParallelism, logger);
}
else
{
key = ((Disk)this).SHA384;
datdata.BucketBy(SortedBy.SHA384, false /* mergeroms */, logger);
datdata.BucketBy(SortedBy.SHA384, false /* mergeroms */, maxDegreeOfParallelism, logger);
}
}
@@ -379,12 +381,12 @@ namespace SabreTools.Helper.Dats
if (_itemType == ItemType.Rom)
{
key = ((Rom)this).SHA256;
datdata.BucketBy(SortedBy.SHA256, false /* mergeroms */, logger);
datdata.BucketBy(SortedBy.SHA256, false /* mergeroms */, maxDegreeOfParallelism, logger);
}
else
{
key = ((Disk)this).SHA256;
datdata.BucketBy(SortedBy.SHA256, false /* mergeroms */, logger);
datdata.BucketBy(SortedBy.SHA256, false /* mergeroms */, maxDegreeOfParallelism, logger);
}
}
@@ -396,12 +398,12 @@ namespace SabreTools.Helper.Dats
if (_itemType == ItemType.Rom)
{
key = ((Rom)this).SHA1;
datdata.BucketBy(SortedBy.SHA1, false /* mergeroms */, logger);
datdata.BucketBy(SortedBy.SHA1, false /* mergeroms */, maxDegreeOfParallelism, logger);
}
else
{
key = ((Disk)this).SHA1;
datdata.BucketBy(SortedBy.SHA1, false /* mergeroms */, logger);
datdata.BucketBy(SortedBy.SHA1, false /* mergeroms */, maxDegreeOfParallelism, logger);
}
}
@@ -413,12 +415,12 @@ namespace SabreTools.Helper.Dats
if (_itemType == ItemType.Rom)
{
key = ((Rom)this).MD5;
datdata.BucketBy(SortedBy.MD5, false /* mergeroms */, logger);
datdata.BucketBy(SortedBy.MD5, false /* mergeroms */, maxDegreeOfParallelism, logger);
}
else
{
key = ((Disk)this).MD5;
datdata.BucketBy(SortedBy.MD5, false /* mergeroms */, logger);
datdata.BucketBy(SortedBy.MD5, false /* mergeroms */, maxDegreeOfParallelism, logger);
}
}
@@ -426,21 +428,21 @@ namespace SabreTools.Helper.Dats
else if (_itemType == ItemType.Disk)
{
key = ((Disk)this).MD5;
datdata.BucketBy(SortedBy.MD5, false /* mergeroms */, logger);
datdata.BucketBy(SortedBy.MD5, false /* mergeroms */, maxDegreeOfParallelism, logger);
}
// If we've gotten here and we have a Rom, sort by CRC
else if (_itemType == ItemType.Rom)
{
key = ((Rom)this).CRC;
datdata.BucketBy(SortedBy.CRC, false /* mergeroms */, logger);
datdata.BucketBy(SortedBy.CRC, false /* mergeroms */, maxDegreeOfParallelism, logger);
}
// Otherwise, we use -1 as the key
else
{
key = "-1";
datdata.BucketBy(SortedBy.Size, false /* mergeroms */, logger);
datdata.BucketBy(SortedBy.Size, false /* mergeroms */, maxDegreeOfParallelism, logger);
}
return key;

View File

@@ -19,10 +19,11 @@ namespace SabreTools.Helper.Dats
/// </summary>
/// <param name="bucketBy">SortedBy enum representing how to sort the individual items</param>
/// <param name="mergeroms">True if roms should be deduped, false otherwise</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logger object for file and console output</param>
/// <param name="lower">True if the key should be lowercased (default), false otherwise</param>
/// <param name="norename">True if games should only be compared on game and file name, false if system and source are counted</param>
public void BucketBy(SortedBy bucketBy, bool mergeroms, Logger logger, bool lower = true, bool norename = true)
public void BucketBy(SortedBy bucketBy, bool mergeroms, int maxDegreeOfParallelism, Logger logger, bool lower = true, bool norename = true)
{
// If we already have the right sorting, trust it
if (_sortedBy == bucketBy)
@@ -41,6 +42,7 @@ namespace SabreTools.Helper.Dats
// First do the initial sort of all of the roms
List<string> keys = Keys.ToList();
Parallel.ForEach(keys,
new ParallelOptions() { MaxDegreeOfParallelism = maxDegreeOfParallelism },
key =>
{
List<DatItem> roms = this[key];
@@ -129,6 +131,7 @@ namespace SabreTools.Helper.Dats
// Now go through and sort all of the individual lists
keys = sortable.Keys.ToList();
Parallel.ForEach(keys,
new ParallelOptions() { MaxDegreeOfParallelism = maxDegreeOfParallelism },
key =>
{
List<DatItem> sortedlist = sortable[key];
@@ -152,13 +155,14 @@ namespace SabreTools.Helper.Dats
/// Use cloneof tags to create non-merged sets and remove the tags plus using the device_ref tags to get full sets
/// </summary>
/// <param name="mergeroms">True if roms should be deduped, false otherwise</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logger object for file and console output</param>
public void CreateFullyNonMergedSets(bool mergeroms, Logger logger)
public void CreateFullyNonMergedSets(bool mergeroms, int maxDegreeOfParallelism, Logger logger)
{
logger.User("Creating fully non-merged sets from the DAT");
// For sake of ease, the first thing we want to do is sort by game
BucketBy(SortedBy.Game, mergeroms, logger, norename: true);
BucketBy(SortedBy.Game, mergeroms, maxDegreeOfParallelism, logger, norename: true);
_sortedBy = SortedBy.Default;
// Now we want to loop through all of the games and set the correct information
@@ -179,13 +183,14 @@ namespace SabreTools.Helper.Dats
/// Use cloneof tags to create merged sets and remove the tags
/// </summary>
/// <param name="mergeroms">True if roms should be deduped, false otherwise</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logger object for file and console output</param>
public void CreateMergedSets(bool mergeroms, Logger logger)
public void CreateMergedSets(bool mergeroms, int maxDegreeOfParallelism, Logger logger)
{
logger.User("Creating merged sets from the DAT");
// For sake of ease, the first thing we want to do is sort by game
BucketBy(SortedBy.Game, mergeroms, logger, norename: true);
BucketBy(SortedBy.Game, mergeroms, maxDegreeOfParallelism, logger, norename: true);
_sortedBy = SortedBy.Default;
// Now we want to loop through all of the games and set the correct information
@@ -202,13 +207,14 @@ namespace SabreTools.Helper.Dats
/// Use cloneof tags to create non-merged sets and remove the tags
/// </summary>
/// <param name="mergeroms">True if roms should be deduped, false otherwise</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logger object for file and console output</param>
public void CreateNonMergedSets(bool mergeroms, Logger logger)
public void CreateNonMergedSets(bool mergeroms, int maxDegreeOfParallelism, Logger logger)
{
logger.User("Creating non-merged sets from the DAT");
// For sake of ease, the first thing we want to do is sort by game
BucketBy(SortedBy.Game, mergeroms, logger, norename: true);
BucketBy(SortedBy.Game, mergeroms, maxDegreeOfParallelism, logger, norename: true);
_sortedBy = SortedBy.Default;
// Now we want to loop through all of the games and set the correct information
@@ -225,13 +231,14 @@ namespace SabreTools.Helper.Dats
/// Use cloneof and romof tags to create split sets and remove the tags
/// </summary>
/// <param name="mergeroms">True if roms should be deduped, false otherwise</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logger object for file and console output</param>
public void CreateSplitSets(bool mergeroms, Logger logger)
public void CreateSplitSets(bool mergeroms, int maxDegreeOfParallelism, Logger logger)
{
logger.User("Creating split sets from the DAT");
// For sake of ease, the first thing we want to do is sort by game
BucketBy(SortedBy.Game, mergeroms, logger, norename: true);
BucketBy(SortedBy.Game, mergeroms, maxDegreeOfParallelism, logger, norename: true);
_sortedBy = SortedBy.Default;
// Now we want to loop through all of the games and set the correct information

View File

@@ -66,17 +66,17 @@ namespace SabreTools.Helper.Dats
// Modify the Dictionary if necessary and output the results
if (diff != 0 && diff < DiffMode.Cascade)
{
DiffNoCascade(diff, outDir, newInputFileNames, logger);
DiffNoCascade(diff, outDir, newInputFileNames, maxDegreeOfParallelism, logger);
}
// If we're in cascade and diff, output only cascaded diffs
else if (diff != 0 && diff >= DiffMode.Cascade)
{
DiffCascade(outDir, inplace, newInputFileNames, datHeaders, skip, logger);
DiffCascade(outDir, inplace, newInputFileNames, datHeaders, skip, maxDegreeOfParallelism, logger);
}
// Output all entries with user-defined merge
else
{
MergeNoDiff(outDir, newInputFileNames, datHeaders, logger);
MergeNoDiff(outDir, newInputFileNames, datHeaders, maxDegreeOfParallelism, logger);
}
}
// Otherwise, loop through all of the inputs individually
@@ -120,7 +120,8 @@ namespace SabreTools.Helper.Dats
MergeRoms = MergeRoms,
};
datHeaders[i].Parse(input.Split('¬')[0], i, 0, filter, splitType, trim, single, root, logger, true, clean, descAsName);
datHeaders[i].Parse(input.Split('¬')[0], i, 0, filter, splitType, trim, single,
root, maxDegreeOfParallelism, logger, true, clean, descAsName);
});
logger.User("Processing complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
@@ -148,8 +149,9 @@ namespace SabreTools.Helper.Dats
/// <param name="diff">Non-zero flag for diffing mode, zero otherwise</param>
/// <param name="outDir">Output directory to write the DATs to</param>
/// <param name="inputs">List of inputs to write out from</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logging object for console and file output</param>
public void DiffNoCascade(DiffMode diff, string outDir, List<string> inputs, Logger logger)
public void DiffNoCascade(DiffMode diff, string outDir, List<string> inputs, int maxDegreeOfParallelism, Logger logger)
{
DateTime start = DateTime.Now;
logger.User("Initializing all output DATs");
@@ -275,13 +277,13 @@ namespace SabreTools.Helper.Dats
// Output the difflist (a-b)+(b-a) diff
if ((diff & DiffMode.NoDupes) != 0)
{
outerDiffData.WriteToFile(outDir, logger);
outerDiffData.WriteToFile(outDir, maxDegreeOfParallelism, logger);
}
// Output the (ab) diff
if ((diff & DiffMode.Dupes) != 0)
{
dupeData.WriteToFile(outDir, logger);
dupeData.WriteToFile(outDir, maxDegreeOfParallelism, logger);
}
// Output the individual (a-b) DATs
@@ -296,7 +298,7 @@ namespace SabreTools.Helper.Dats
: (Path.GetDirectoryName(split[0]).Remove(0, split[1].Length)));
// Try to output the file
outDats[j].WriteToFile(path, logger);
outDats[j].WriteToFile(path, maxDegreeOfParallelism, logger);
}
}
logger.User("Outputting complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
@@ -310,8 +312,9 @@ namespace SabreTools.Helper.Dats
/// <param name="inputs">List of inputs to write out from</param>
/// <param name="datHeaders">Dat headers used optionally</param>
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logging object for console and file output</param>
public void DiffCascade(string outDir, bool inplace, List<string> inputs, List<DatFile> datHeaders, bool skip, Logger logger)
public void DiffCascade(string outDir, bool inplace, List<string> inputs, List<DatFile> datHeaders, bool skip, int maxDegreeOfParallelism, Logger logger)
{
string post = "";
@@ -395,7 +398,7 @@ namespace SabreTools.Helper.Dats
}
// Try to output the file
outDats[j].WriteToFile(path, logger);
outDats[j].WriteToFile(path, maxDegreeOfParallelism, logger);
}
logger.User("Outputting complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
}
@@ -406,8 +409,9 @@ namespace SabreTools.Helper.Dats
/// <param name="outDir">Output directory to write the DATs to</param>
/// <param name="inputs">List of inputs to write out from</param>
/// <param name="datHeaders">Dat headers used optionally</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logging object for console and file output</param>
public void MergeNoDiff(string outDir, List<string> inputs, List<DatFile> datHeaders, Logger logger)
public void MergeNoDiff(string outDir, List<string> inputs, List<DatFile> datHeaders, int maxDegreeOfParallelism, Logger logger)
{
// If we're in SuperDAT mode, prefix all games with their respective DATs
if (Type == "SuperDAT")
@@ -434,7 +438,7 @@ namespace SabreTools.Helper.Dats
}
// Try to output the file
WriteToFile(outDir, logger);
WriteToFile(outDir, maxDegreeOfParallelism, logger);
}
/// <summary>
@@ -548,11 +552,11 @@ namespace SabreTools.Helper.Dats
DatFile innerDatdata = new DatFile(this);
logger.User("Processing \"" + Path.GetFileName(inputFileName) + "\"");
innerDatdata.Parse(inputFileName, 0, 0, filter, splitType, trim, single,
root, logger, true, clean, descAsName,
root, maxDegreeOfParallelism, logger, true, clean, descAsName,
keepext: ((innerDatdata.DatFormat & DatFormat.TSV) != 0 || (innerDatdata.DatFormat & DatFormat.CSV) != 0));
// Try to output the file
innerDatdata.WriteToFile((outDir == "" ? Path.GetDirectoryName(inputFileName) : outDir), logger, overwrite: (outDir != ""));
innerDatdata.WriteToFile((outDir == "" ? Path.GetDirectoryName(inputFileName) : outDir), maxDegreeOfParallelism, logger, overwrite: (outDir != ""));
}
else if (Directory.Exists(inputFileName))
{
@@ -565,11 +569,12 @@ namespace SabreTools.Helper.Dats
logger.User("Processing \"" + Path.GetFullPath(file).Remove(0, inputFileName.Length) + "\"");
DatFile innerDatdata = new DatFile(this);
innerDatdata.Parse(file, 0, 0, filter, splitType,
trim, single, root, logger, true, clean, descAsName,
trim, single, root, maxDegreeOfParallelism, logger, true, clean, descAsName,
keepext: ((innerDatdata.DatFormat & DatFormat.TSV) != 0 || (innerDatdata.DatFormat & DatFormat.CSV) != 0));
// Try to output the file
innerDatdata.WriteToFile((outDir == "" ? Path.GetDirectoryName(file) : outDir + Path.GetDirectoryName(file).Remove(0, inputFileName.Length - 1)), logger, overwrite: (outDir != ""));
innerDatdata.WriteToFile((outDir == "" ? Path.GetDirectoryName(file) : outDir + Path.GetDirectoryName(file).Remove(0, inputFileName.Length - 1)),
maxDegreeOfParallelism, logger, overwrite: (outDir != ""));
});
}
else

View File

@@ -38,7 +38,7 @@ namespace SabreTools.Helper.Dats
public void Parse(string filename, int sysid, int srcid, Logger logger,
bool keep = false, bool clean = false, bool descAsName = false, bool keepext = false, bool useTags = false)
{
Parse(filename, sysid, srcid, new Filter(), SplitType.None, false, false, "", logger,
Parse(filename, sysid, srcid, new Filter(), SplitType.None, false, false, "", 4, logger,
keep: keep, clean: clean, descAsName: descAsName, keepext: keepext, useTags: useTags);
}
@@ -54,6 +54,7 @@ namespace SabreTools.Helper.Dats
/// <param name="single">True if all games should be replaced by '!', false otherwise</param>
/// <param name="root">String representing root directory to compare against for length calculation</param>
/// <param name="logger">Logger object for console and/or file output</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="keep">True if full pathnames are to be kept, false otherwise (default)</param>
/// <param name="clean">True if game names are sanitized, false otherwise (default)</param>
/// <param name="descAsName">True if descriptions should be used as names, false otherwise (default)</param>
@@ -75,6 +76,7 @@ namespace SabreTools.Helper.Dats
string root,
// Miscellaneous
int maxDegreeOfParallelism,
Logger logger,
bool keep = false,
bool clean = false,
@@ -181,16 +183,16 @@ namespace SabreTools.Helper.Dats
switch (splitType)
{
case SplitType.FullNonMerged:
CreateFullyNonMergedSets(false, logger);
CreateFullyNonMergedSets(false, maxDegreeOfParallelism, logger);
break;
case SplitType.NonMerged:
CreateNonMergedSets(false, logger);
CreateNonMergedSets(false, maxDegreeOfParallelism, logger);
break;
case SplitType.Merged:
CreateMergedSets(false, logger);
CreateMergedSets(false, maxDegreeOfParallelism, logger);
break;
case SplitType.Split:
CreateSplitSets(false, logger);
CreateSplitSets(false, maxDegreeOfParallelism, logger);
break;
}
}

View File

@@ -145,7 +145,7 @@ namespace SabreTools.Helper.Dats
}
// Now that we have a list of depots, we want to sort the input DAT by SHA-1
BucketBy(SortedBy.SHA1, false /* mergeroms */, logger);
BucketBy(SortedBy.SHA1, false /* mergeroms */, maxDegreeOfParallelism, logger);
// Then we want to loop through each of the hashes and see if we can rebuild
List<string> hashes = Keys.ToList();
@@ -189,7 +189,8 @@ namespace SabreTools.Helper.Dats
}
// Otherwise, we rebuild that file to all locations that we need to
RebuildIndividualFile(fileinfo, foundpath, outDir, tempDir, date, inverse, outputFormat, romba, updateDat, true /*isZip*/, headerToCheckAgainst, logger);
RebuildIndividualFile(fileinfo, foundpath, outDir, tempDir, date, inverse, outputFormat, romba,
updateDat, true /*isZip*/, headerToCheckAgainst, maxDegreeOfParallelism, logger);
}
logger.User("Rebuilding complete in: " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
@@ -202,7 +203,7 @@ namespace SabreTools.Helper.Dats
_fileName = "fixDAT_" + _fileName;
_name = "fixDAT_" + _name;
_description = "fixDAT_" + _description;
WriteToFile(outDir, logger);
WriteToFile(outDir, maxDegreeOfParallelism, logger);
}
return success;
@@ -347,7 +348,7 @@ namespace SabreTools.Helper.Dats
_fileName = "fixDAT_" + _fileName;
_name = "fixDAT_" + _name;
_description = "fixDAT_" + _description;
WriteToFile(outDir, logger);
WriteToFile(outDir, maxDegreeOfParallelism, logger);
}
return success;
@@ -395,7 +396,7 @@ namespace SabreTools.Helper.Dats
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
Rom rom = FileTools.GetFileInfo(file, logger, omitFromScan: (quickScan ? Hash.SecureHashes : Hash.DeepHashes), header: headerToCheckAgainst);
usedExternally = RebuildIndividualFile(rom, file, outDir, tempSubDir, date, inverse, outputFormat,
romba, updateDat, false /* isZip */, headerToCheckAgainst, logger);
romba, updateDat, false /* isZip */, headerToCheckAgainst, maxDegreeOfParallelism, logger);
}
// If we're supposed to scan the file internally
@@ -410,7 +411,7 @@ namespace SabreTools.Helper.Dats
foreach (Rom rom in extracted)
{
usedInternally &= RebuildIndividualFile(rom, file, outDir, tempSubDir, date, inverse, outputFormat,
romba, updateDat, true /* isZip */, headerToCheckAgainst, logger);
romba, updateDat, true /* isZip */, headerToCheckAgainst, maxDegreeOfParallelism, logger);
}
}
// Otherwise, attempt to extract the files to the temporary directory
@@ -430,7 +431,7 @@ namespace SabreTools.Helper.Dats
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
Rom rom = FileTools.GetFileInfo(entry, logger, omitFromScan: (quickScan ? Hash.SecureHashes : Hash.DeepHashes));
usedInternally &= RebuildIndividualFile(rom, entry, outDir, tempSubDir, date, inverse, outputFormat,
romba, updateDat, false /* isZip */, headerToCheckAgainst, logger);
romba, updateDat, false /* isZip */, headerToCheckAgainst, maxDegreeOfParallelism, logger);
}
}
// Otherwise, just get the info on the file itself
@@ -439,7 +440,7 @@ namespace SabreTools.Helper.Dats
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
Rom rom = FileTools.GetFileInfo(file, logger, omitFromScan: (quickScan ? Hash.SecureHashes : Hash.DeepHashes));
usedExternally = RebuildIndividualFile(rom, file, outDir, tempSubDir, date, inverse, outputFormat,
romba, updateDat, false /* isZip */, headerToCheckAgainst, logger);
romba, updateDat, false /* isZip */, headerToCheckAgainst, maxDegreeOfParallelism, logger);
}
}
}
@@ -481,25 +482,27 @@ namespace SabreTools.Helper.Dats
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
/// <param name="isZip">True if the input file is an archive, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logger object for file and console output</param>
/// <returns>True if the file was able to be rebuilt, false otherwise</returns>
/// <remarks>
/// TODO: If going from a TGZ file to a TGZ file, don't extract, just copy
/// </remarks>
private bool RebuildIndividualFile(Rom rom, string file, string outDir, string tempDir, bool date,
bool inverse, OutputFormat outputFormat, bool romba, bool updateDat, bool isZip, string headerToCheckAgainst, Logger logger)
bool inverse, OutputFormat outputFormat, bool romba, bool updateDat, bool isZip, string headerToCheckAgainst,
int maxDegreeOfParallelism, Logger logger)
{
// Set the output value
bool rebuilt = false;
// Find if the file has duplicates in the DAT
bool hasDuplicates = rom.HasDuplicates(this, logger);
bool hasDuplicates = rom.HasDuplicates(this, maxDegreeOfParallelism, logger);
// If it has duplicates and we're not filtering, rebuild it
if (hasDuplicates && !inverse)
{
// Get the list of duplicates to rebuild to
List<DatItem> dupes = rom.GetDuplicates(this, logger, remove: updateDat);
List<DatItem> dupes = rom.GetDuplicates(this, maxDegreeOfParallelism, logger, remove: updateDat);
// If we don't have any duplicates, continue
if (dupes.Count == 0)
@@ -672,13 +675,13 @@ namespace SabreTools.Helper.Dats
Rom headerless = FileTools.GetFileInfo(file + ".new", logger);
// Find if the file has duplicates in the DAT
hasDuplicates = headerless.HasDuplicates(this, logger);
hasDuplicates = headerless.HasDuplicates(this, maxDegreeOfParallelism, logger);
// If it has duplicates and we're not filtering, rebuild it
if (hasDuplicates && !inverse)
{
// Get the list of duplicates to rebuild to
List<DatItem> dupes = headerless.GetDuplicates(this, logger, remove: updateDat);
List<DatItem> dupes = headerless.GetDuplicates(this, maxDegreeOfParallelism, logger, remove: updateDat);
// If we don't have any duplicates, continue
if (dupes.Count == 0)
@@ -786,9 +789,10 @@ namespace SabreTools.Helper.Dats
/// <param name="hashOnly">True if only hashes should be checked, false for full file information</param>
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logger object for file and console output</param>
/// <returns>True if verification was a success, false otherwise</returns>
public bool VerifyDepot(List<string> inputs, string tempDir, string headerToCheckAgainst, Logger logger)
public bool VerifyDepot(List<string> inputs, string tempDir, string headerToCheckAgainst, int maxDegreeOfParallelism, Logger logger)
{
// Check the temp directory
if (String.IsNullOrEmpty(tempDir))
@@ -830,7 +834,7 @@ namespace SabreTools.Helper.Dats
}
// Now that we have a list of depots, we want to sort the input DAT by SHA-1
BucketBy(SortedBy.SHA1, false /* mergeroms */, logger);
BucketBy(SortedBy.SHA1, false /* mergeroms */, maxDegreeOfParallelism, logger);
// Then we want to loop through each of the hashes and see if we can rebuild
List<string> hashes = Keys.ToList();
@@ -874,7 +878,7 @@ namespace SabreTools.Helper.Dats
}
// Now we want to remove all duplicates from the DAT
fileinfo.GetDuplicates(this, logger, remove: true);
fileinfo.GetDuplicates(this, maxDegreeOfParallelism, logger, remove: true);
}
logger.User("Verifying complete in: " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
@@ -883,7 +887,7 @@ namespace SabreTools.Helper.Dats
_fileName = "fixDAT_" + _fileName;
_name = "fixDAT_" + _name;
_description = "fixDAT_" + _description;
WriteToFile(null, logger);
WriteToFile(null, maxDegreeOfParallelism, logger);
return success;
}
@@ -896,9 +900,11 @@ namespace SabreTools.Helper.Dats
/// <param name="hashOnly">True if only hashes should be checked, false for full file information</param>
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logger object for file and console output</param>
/// <returns>True if verification was a success, false otherwise</returns>
public bool VerifyGeneric(List<string> inputs, string tempDir, bool hashOnly, bool quickScan, string headerToCheckAgainst, Logger logger)
public bool VerifyGeneric(List<string> inputs, string tempDir, bool hashOnly, bool quickScan,
string headerToCheckAgainst, int maxDegreeOfParallelism, Logger logger)
{
// Check the temp directory exists
if (String.IsNullOrEmpty(tempDir))
@@ -941,7 +947,7 @@ namespace SabreTools.Helper.Dats
if (hashOnly)
{
// First we need to sort by hash to get duplicates
BucketBy(SortedBy.SHA1, false /* mergeroms */, logger);
BucketBy(SortedBy.SHA1, false /* mergeroms */, maxDegreeOfParallelism, logger);
// Then follow the same tactics as before
foreach (string key in Keys)
@@ -977,7 +983,7 @@ namespace SabreTools.Helper.Dats
}
// Now output the fixdat to the main folder
success &= matched.WriteToFile("", logger, stats: true);
success &= matched.WriteToFile("", maxDegreeOfParallelism, logger, stats: true);
return success;
}

View File

@@ -29,6 +29,7 @@ namespace SabreTools.Helper.Dats
/// </summary>
/// <param name="datdata">All information for creating the datfile header</param>
/// <param name="outDir">Set the output directory</param>
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
/// <param name="logger">Logger object for console and/or file output</param>
/// <param name="norename">True if games should only be compared on game and file name (default), false if system and source are counted</param>
/// <param name="stats">True if DAT statistics should be output on write, false otherwise (default)</param>
@@ -39,7 +40,8 @@ namespace SabreTools.Helper.Dats
/// The following features have been requested for file output:
/// - Have the ability to strip special (non-ASCII) characters from rom information
/// </remarks>
public bool WriteToFile(string outDir, Logger logger, bool norename = true, bool stats = false, bool ignoreblanks = false, bool overwrite = true)
public bool WriteToFile(string outDir, int maxDegreeOfParallelism, Logger logger,
bool norename = true, bool stats = false, bool ignoreblanks = false, bool overwrite = true)
{
// If there's nothing there, abort
if (Count == 0)
@@ -109,7 +111,7 @@ namespace SabreTools.Helper.Dats
}
// Bucket roms by game name and optionally dedupe
BucketBy(SortedBy.Game, MergeRoms, logger, norename: norename);
BucketBy(SortedBy.Game, MergeRoms, maxDegreeOfParallelism, logger, norename: norename);
// Output the number of items we're going to be writing
logger.User("A total of " + Count + " items will be written out to file");

View File

@@ -1130,10 +1130,8 @@ namespace SabreTools.Helper.Tools
public static bool WriteTAR(string inputFile, string outDir, Rom rom, Logger logger, bool date = false)
{
// Wrap the individual inputs into lists
List<string> inputFiles = new List<string>();
inputFiles.Add(inputFile);
List<Rom> roms = new List<Rom>();
roms.Add(rom);
List<string> inputFiles = new List<string>() { inputFile };
List<Rom> roms = new List<Rom>() { rom };
return WriteTAR(inputFiles, outDir, roms, logger, date: date);
}
@@ -1316,10 +1314,8 @@ namespace SabreTools.Helper.Tools
public static bool WriteTorrent7Zip(string inputFile, string outDir, Rom rom, Logger logger, bool date = false)
{
// Wrap the individual inputs into lists
List<string> inputFiles = new List<string>();
inputFiles.Add(inputFile);
List<Rom> roms = new List<Rom>();
roms.Add(rom);
List<string> inputFiles = new List<string>() { inputFile };
List<Rom> roms = new List<Rom>() { rom };
return WriteTorrent7Zip(inputFiles, outDir, roms, logger, date: date);
}
@@ -1378,9 +1374,11 @@ namespace SabreTools.Helper.Tools
// If the archive doesn't exist, create it and put the single file
if (!File.Exists(archiveFileName))
{
zipFile = new SevenZipCompressor();
zipFile.ArchiveFormat = OutArchiveFormat.SevenZip;
zipFile.CompressionLevel = SevenZip.CompressionLevel.Normal;
zipFile = new SevenZipCompressor()
{
ArchiveFormat = OutArchiveFormat.SevenZip,
CompressionLevel = SevenZip.CompressionLevel.Normal,
};
// Map all inputs to index
Dictionary<string, int> inputIndexMap = new Dictionary<string, int>();
@@ -1452,9 +1450,11 @@ namespace SabreTools.Helper.Tools
}
// Otherwise, process the old zipfile
zipFile = new SevenZipCompressor();
zipFile.ArchiveFormat = OutArchiveFormat.SevenZip;
zipFile.CompressionLevel = SevenZip.CompressionLevel.Normal;
zipFile = new SevenZipCompressor()
{
ArchiveFormat = OutArchiveFormat.SevenZip,
CompressionLevel = SevenZip.CompressionLevel.Normal,
};
Stream zipFileStream = File.OpenWrite(tempFile);
// Get the order for the entries with the new file
@@ -1640,10 +1640,8 @@ namespace SabreTools.Helper.Tools
public static bool WriteTorrentLRZ(string inputFile, string outDir, Rom rom, Logger logger, bool date = false)
{
// Wrap the individual inputs into lists
List<string> inputFiles = new List<string>();
inputFiles.Add(inputFile);
List<Rom> roms = new List<Rom>();
roms.Add(rom);
List<string> inputFiles = new List<string>() { inputFile };
List<Rom> roms = new List<Rom>() { rom };
return WriteTorrentLRZ(inputFiles, outDir, roms, logger, date: date);
}
@@ -1674,10 +1672,8 @@ namespace SabreTools.Helper.Tools
public static bool WriteTorrentRAR(string inputFile, string outDir, Rom rom, Logger logger, bool date = false)
{
// Wrap the individual inputs into lists
List<string> inputFiles = new List<string>();
inputFiles.Add(inputFile);
List<Rom> roms = new List<Rom>();
roms.Add(rom);
List<string> inputFiles = new List<string>() { inputFile };
List<Rom> roms = new List<Rom>() { rom };
return WriteTorrentRAR(inputFiles, outDir, roms, logger, date: date);
}
@@ -1708,10 +1704,8 @@ namespace SabreTools.Helper.Tools
public static bool WriteTorrentXZ(string inputFile, string outDir, Rom rom, Logger logger, bool date = false)
{
// Wrap the individual inputs into lists
List<string> inputFiles = new List<string>();
inputFiles.Add(inputFile);
List<Rom> roms = new List<Rom>();
roms.Add(rom);
List<string> inputFiles = new List<string>() { inputFile };
List<Rom> roms = new List<Rom>() { rom };
return WriteTorrentXZ(inputFiles, outDir, roms, logger, date: date);
}
@@ -1742,10 +1736,8 @@ namespace SabreTools.Helper.Tools
public static bool WriteTorrentZip(string inputFile, string outDir, Rom rom, Logger logger, bool date = false)
{
// Wrap the individual inputs into lists
List<string> inputFiles = new List<string>();
inputFiles.Add(inputFile);
List<Rom> roms = new List<Rom>();
roms.Add(rom);
List<string> inputFiles = new List<string>() { inputFile };
List<Rom> roms = new List<Rom>() { rom };
return WriteTorrentZip(inputFiles, outDir, roms, logger, date: date);
}