diff --git a/RombaSharp/Partials/RombaSharp.Inits.cs b/RombaSharp/Partials/RombaSharp.Inits.cs
index 1cdb2dd3..c1a7087d 100644
--- a/RombaSharp/Partials/RombaSharp.Inits.cs
+++ b/RombaSharp/Partials/RombaSharp.Inits.cs
@@ -176,7 +176,7 @@ namespace RombaSharp
ArchiveScanLevel asl = ArchiveTools.GetArchiveScanLevelFromNumbers((onlyNeeded ? 0 : 1), (onlyNeeded ? 0 : 1), (onlyNeeded ? 0 : 1), (onlyNeeded ? 0 : 1));
need.RebuildGeneric(onlyDirs, _depots.Keys.ToList()[0], _tmpdir, false /*quickScan*/, false /*date*/,
false /*delete*/, false /*inverse*/, OutputFormat.TorrentGzip, true /*romba*/, asl, false /*updateDat*/,
- null /*headerToCheckAgainst*/, 4 /*maxDegreeOfParallelism*/, _logger);
+ null /*headerToCheckAgainst*/, _workers /*maxDegreeOfParallelism*/, _logger);
}
///
@@ -216,7 +216,7 @@ namespace RombaSharp
ArchiveScanLevel asl = ArchiveTools.GetArchiveScanLevelFromNumbers(1, 1, 1, 1);
datFile.RebuildDepot(onlineDepots, outputFolder, _tmpdir, false /*date*/,
false /*delete*/, false /*inverse*/, (copy ? OutputFormat.TorrentGzip : OutputFormat.TorrentZip), copy,
- false /*updateDat*/, null /*headerToCheckAgainst*/, 4 /*maxDegreeOfParallelism*/, _logger);
+ false /*updateDat*/, null /*headerToCheckAgainst*/, _workers /*maxDegreeOfParallelism*/, _logger);
}
}
@@ -253,7 +253,7 @@ namespace RombaSharp
datdata.PopulateFromDir(input, Hash.SHA256 & Hash.SHA384 & Hash.SHA512 /* omitFromScan */, true /* bare */, false /* archivesAsFiles */,
true /* enableGzip */, false /* addBlanks */, false /* addDate */, _tmpdir /* tempDir */, false /* copyFiles */,
null /* headerToCheckAgainst */, _workers /* maxDegreeOfParallelism */, _logger);
- datdata.WriteToFile("", logger);
+ datdata.WriteToFile("", _workers, logger);
}
logger.Close();
}
diff --git a/SabreTools/Partials/SabreTools.Inits.cs b/SabreTools/Partials/SabreTools.Inits.cs
index 4c464582..b0a8b884 100644
--- a/SabreTools/Partials/SabreTools.Inits.cs
+++ b/SabreTools/Partials/SabreTools.Inits.cs
@@ -120,7 +120,7 @@ namespace SabreTools
// If it was a success, write the DAT out
if (success)
{
- datdata.WriteToFile(outDir, _logger);
+ datdata.WriteToFile(outDir, maxDegreeOfParallelism, _logger);
}
// Otherwise, show the help
@@ -163,7 +163,8 @@ namespace SabreTools
/// First extension to split on
/// Second extension to split on
/// Output directory for the split files
- private static void InitExtSplit(List inputs, List exta, List extb, string outDir)
+ /// Integer representing the maximum amount of parallelization to be used
+ private static void InitExtSplit(List inputs, List exta, List extb, string outDir, int maxDegreeOfParallelism)
{
// Loop over the input files
foreach (string input in inputs)
@@ -172,7 +173,7 @@ namespace SabreTools
{
DatFile datFile = new DatFile();
datFile.Parse(Path.GetFullPath(input), 0, 0, _logger);
- datFile.SplitByExt(outDir, Path.GetDirectoryName(input), exta, extb, _logger);
+ datFile.SplitByExt(outDir, Path.GetDirectoryName(input), exta, extb, maxDegreeOfParallelism, _logger);
}
else if (Directory.Exists(input))
{
@@ -180,7 +181,8 @@ namespace SabreTools
{
DatFile datFile = new DatFile();
datFile.Parse(Path.GetFullPath(file), 0, 0, _logger);
- datFile.SplitByExt(outDir, (input.EndsWith(Path.DirectorySeparatorChar.ToString()) ? input : input + Path.DirectorySeparatorChar), exta, extb, _logger);
+ datFile.SplitByExt(outDir, (input.EndsWith(Path.DirectorySeparatorChar.ToString()) ? input : input + Path.DirectorySeparatorChar),
+ exta, extb, maxDegreeOfParallelism, _logger);
}
}
else
@@ -198,7 +200,8 @@ namespace SabreTools
///
/// List of inputs to be used
/// Output directory for the split files
- private static void InitHashSplit(List inputs, string outDir)
+ /// Integer representing the maximum amount of parallelization to be used
+ private static void InitHashSplit(List inputs, string outDir, int maxDegreeOfParallelism)
{
// Loop over the input files
foreach (string input in inputs)
@@ -207,7 +210,7 @@ namespace SabreTools
{
DatFile datFile = new DatFile();
datFile.Parse(Path.GetFullPath(input), 0, 0, _logger);
- datFile.SplitByHash(outDir, Path.GetDirectoryName(input), _logger);
+ datFile.SplitByHash(outDir, Path.GetDirectoryName(input), maxDegreeOfParallelism, _logger);
}
else if (Directory.Exists(input))
{
@@ -215,7 +218,8 @@ namespace SabreTools
{
DatFile datFile = new DatFile();
datFile.Parse(Path.GetFullPath(file), 0, 0, _logger);
- datFile.SplitByHash(outDir, (input.EndsWith(Path.DirectorySeparatorChar.ToString()) ? input : input + Path.DirectorySeparatorChar), _logger);
+ datFile.SplitByHash(outDir, (input.EndsWith(Path.DirectorySeparatorChar.ToString()) ? input : input + Path.DirectorySeparatorChar),
+ maxDegreeOfParallelism, _logger);
}
}
else
@@ -258,7 +262,8 @@ namespace SabreTools
/// Output directory for the split files
/// True if short filenames should be used, false otherwise
/// True if original filenames should be used as the base for output filename, false otherwise
- private static void InitLevelSplit(List inputs, string outDir, bool shortname, bool basedat)
+ /// Integer representing the maximum amount of parallelization to be used
+ private static void InitLevelSplit(List inputs, string outDir, bool shortname, bool basedat, int maxDegreeOfParallelism)
{
// Loop over the input files
foreach (string input in inputs)
@@ -267,7 +272,7 @@ namespace SabreTools
{
DatFile datFile = new DatFile();
datFile.Parse(Path.GetFullPath(input), 0, 0, _logger, keep: true);
- datFile.SplitByLevel(outDir, Path.GetDirectoryName(input), shortname, basedat, _logger);
+ datFile.SplitByLevel(outDir, Path.GetDirectoryName(input), shortname, basedat, maxDegreeOfParallelism, _logger);
}
else if (Directory.Exists(input))
{
@@ -275,7 +280,8 @@ namespace SabreTools
{
DatFile datFile = new DatFile();
datFile.Parse(Path.GetFullPath(file), 0, 0, _logger, keep: true);
- datFile.SplitByLevel(outDir, (input.EndsWith(Path.DirectorySeparatorChar.ToString()) ? input : input + Path.DirectorySeparatorChar), shortname, basedat, _logger);
+ datFile.SplitByLevel(outDir, (input.EndsWith(Path.DirectorySeparatorChar.ToString()) ? input : input + Path.DirectorySeparatorChar),
+ shortname, basedat, maxDegreeOfParallelism, _logger);
}
}
else
@@ -323,8 +329,8 @@ namespace SabreTools
DatFile datdata = new DatFile();
foreach (string datfile in datfiles)
{
- datdata.Parse(datfile, 99, 99, new Filter(), splitType, false /* trim */, false /* single */, null /* root */, _logger,
- keep: true, useTags: true);
+ datdata.Parse(datfile, 99, 99, new Filter(), splitType, false /* trim */, false /* single */, null /* root */,
+ maxDegreeOfParallelism, _logger, keep: true, useTags: true);
}
_logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
@@ -358,8 +364,8 @@ namespace SabreTools
DatFile datdata = new DatFile();
foreach (string datfile in datfiles)
{
- datdata.Parse(datfile, 99, 99, new Filter(), splitType, false /* trim */, false /* single */, null /* root */, _logger,
- keep: true, useTags: true);
+ datdata.Parse(datfile, 99, 99, new Filter(), splitType, false /* trim */, false /* single */, null /* root */,
+ maxDegreeOfParallelism, _logger, keep: true, useTags: true);
}
_logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
@@ -377,9 +383,11 @@ namespace SabreTools
/// True if baddumps should be included in output, false otherwise
/// True if nodumps should be included in output, false otherwise
/// Set the statistics output format to use
- private static void InitStats(List inputs, string filename, string outDir, bool single, bool baddumpCol, bool nodumpCol, StatDatFormat statDatFormat)
+ /// Integer representing the maximum amount of parallelization to be used
+ private static void InitStats(List inputs, string filename, string outDir, bool single, bool baddumpCol, bool nodumpCol,
+ StatDatFormat statDatFormat, int maxDegreeOfParallelism)
{
- DatFile.OutputStats(inputs, filename, outDir, single, baddumpCol, nodumpCol, statDatFormat, _logger);
+ DatFile.OutputStats(inputs, filename, outDir, single, baddumpCol, nodumpCol, statDatFormat, maxDegreeOfParallelism, _logger);
}
///
@@ -387,7 +395,8 @@ namespace SabreTools
///
/// List of inputs to be used
/// Output directory for the split files
- private static void InitTypeSplit(List inputs, string outDir)
+ /// Integer representing the maximum amount of parallelization to be used
+ private static void InitTypeSplit(List inputs, string outDir, int maxDegreeOfParallelism)
{
// Loop over the input files
foreach (string input in inputs)
@@ -396,7 +405,7 @@ namespace SabreTools
{
DatFile datFile = new DatFile();
datFile.Parse(Path.GetFullPath(input), 0, 0, _logger);
- datFile.SplitByType(outDir, Path.GetFullPath(Path.GetDirectoryName(input)), _logger);
+ datFile.SplitByType(outDir, Path.GetFullPath(Path.GetDirectoryName(input)), maxDegreeOfParallelism, _logger);
}
else if (Directory.Exists(input))
{
@@ -404,7 +413,8 @@ namespace SabreTools
{
DatFile datFile = new DatFile();
datFile.Parse(Path.GetFullPath(file), 0, 0, _logger);
- datFile.SplitByType(outDir, Path.GetFullPath((input.EndsWith(Path.DirectorySeparatorChar.ToString()) ? input : input + Path.DirectorySeparatorChar)), _logger);
+ datFile.SplitByType(outDir, Path.GetFullPath((input.EndsWith(Path.DirectorySeparatorChar.ToString()) ? input : input + Path.DirectorySeparatorChar)),
+ maxDegreeOfParallelism, _logger);
}
}
else
@@ -714,8 +724,9 @@ namespace SabreTools
/// True to enable external scanning of archives, false otherwise
/// Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise
/// Type of the split that should be performed (split, merged, fully merged)
+ /// Integer representing the maximum amount of parallelization to be used
private static void InitVerify(List datfiles, List inputs, string tempDir,
- bool hashOnly, bool quickScan, string headerToCheckAgainst, SplitType splitType)
+ bool hashOnly, bool quickScan, string headerToCheckAgainst, SplitType splitType, int maxDegreeOfParallelism)
{
// Get the archive scanning level
ArchiveScanLevel asl = ArchiveTools.GetArchiveScanLevelFromNumbers(1, 1, 1, 1);
@@ -727,12 +738,12 @@ namespace SabreTools
DatFile datdata = new DatFile();
foreach (string datfile in datfiles)
{
- datdata.Parse(datfile, 99, 99, new Filter(), splitType, false /* trim */, false /* single */, null /* root */, _logger,
- keep: true, useTags: true);
+ datdata.Parse(datfile, 99, 99, new Filter(), splitType, false /* trim */, false /* single */, null /* root */,
+ maxDegreeOfParallelism, _logger, keep: true, useTags: true);
}
_logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
- datdata.VerifyGeneric(inputs, tempDir, hashOnly, quickScan, headerToCheckAgainst, _logger);
+ datdata.VerifyGeneric(inputs, tempDir, hashOnly, quickScan, headerToCheckAgainst, maxDegreeOfParallelism, _logger);
}
///
@@ -743,8 +754,9 @@ namespace SabreTools
/// Temporary directory for archive extraction
/// Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise
/// Type of the split that should be performed (split, merged, fully merged)
+ /// Integer representing the maximum amount of parallelization to be used
private static void InitVerifyDepot(List datfiles, List inputs, string tempDir,
- string headerToCheckAgainst, SplitType splitType)
+ string headerToCheckAgainst, SplitType splitType, int maxDegreeOfParallelism)
{
DateTime start = DateTime.Now;
_logger.User("Populating internal DAT...");
@@ -753,12 +765,12 @@ namespace SabreTools
DatFile datdata = new DatFile();
foreach (string datfile in datfiles)
{
- datdata.Parse(datfile, 99, 99, new Filter(), splitType, false /* trim */, false /* single */, null /* root */, _logger,
- keep: true, useTags: true);
+ datdata.Parse(datfile, 99, 99, new Filter(), splitType, false /* trim */, false /* single */, null /* root */,
+ maxDegreeOfParallelism, _logger, keep: true, useTags: true);
}
_logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
- datdata.VerifyDepot(inputs, tempDir, headerToCheckAgainst, _logger);
+ datdata.VerifyDepot(inputs, tempDir, headerToCheckAgainst, maxDegreeOfParallelism, _logger);
}
#endregion
diff --git a/SabreTools/SabreTools.cs b/SabreTools/SabreTools.cs
index 7bfae7a2..2b1e5b76 100644
--- a/SabreTools/SabreTools.cs
+++ b/SabreTools/SabreTools.cs
@@ -1218,31 +1218,31 @@ namespace SabreTools
// Split a DAT by extension
else if (splitByExt)
{
- InitExtSplit(inputs, exta, extb, outDir);
+ InitExtSplit(inputs, exta, extb, outDir, maxParallelism);
}
// Split a DAT by available hashes
else if (splitByHash)
{
- InitHashSplit(inputs, outDir);
+ InitHashSplit(inputs, outDir, maxParallelism);
}
// Split a SuperDAT by lowest available level
else if (splitByLevel)
{
- InitLevelSplit(inputs, outDir, shortname, basedat);
+ InitLevelSplit(inputs, outDir, shortname, basedat, maxParallelism);
}
// Split a DAT by item type
else if (splitByType)
{
- InitTypeSplit(inputs, outDir);
+ InitTypeSplit(inputs, outDir, maxParallelism);
}
// Get statistics on input files
else if (stats)
{
- InitStats(inputs, filename, outDir, single, showBaddumpColumn, showNodumpColumn, statDatFormat);
+ InitStats(inputs, filename, outDir, single, showBaddumpColumn, showNodumpColumn, statDatFormat, maxParallelism);
}
// Convert, update, merge, diff, and filter a DAT or folder of DATs
@@ -1257,13 +1257,13 @@ namespace SabreTools
// If we're using the verifier
else if (verify)
{
- InitVerify(datfiles, inputs, tempDir, hashOnly, quickScan, header, splitType);
+ InitVerify(datfiles, inputs, tempDir, hashOnly, quickScan, header, splitType, maxParallelism);
}
// If we're using the depot verifier
else if (verifyDepot)
{
- InitVerifyDepot(datfiles, inputs, tempDir, header, splitType);
+ InitVerifyDepot(datfiles, inputs, tempDir, header, splitType, maxParallelism);
}
// If nothing is set, show the help