mirror of
https://github.com/claunia/SabreTools.git
synced 2025-12-16 19:14:27 +00:00
[SabreTools] Add merge flags to sort/sort-depot
This commit is contained in:
@@ -478,6 +478,23 @@ Options:
|
||||
1 Only hash contents of the archive
|
||||
2 Only hash archive itself (treat like a regular file)
|
||||
|
||||
-dm, --dat-merged Force creating merged sets in the output
|
||||
Preprocess the DAT to have parent sets contain all items from the children based
|
||||
on the cloneof tag. This is incompatible with the other --dat-X flags.
|
||||
|
||||
-ds, --dat-split Force creating split sets in the output
|
||||
Preprocess the DAT to remove redundant files between parents and children based
|
||||
on the romof and cloneof tags. This is incompatible with the other --dat-X flags.
|
||||
|
||||
-dnm, --dat-nonmerged Force creating non-merged sets in the output
|
||||
Preprocess the DAT to have child sets contain all items from the parent set based
|
||||
on the cloneof tag. This is incompatible with the other --dat-X flags.
|
||||
|
||||
-df, --dat-fullnonmerged Force creating fully non-merged sets in the output
|
||||
Preprocess the DAT to have child sets contain all items from the parent sets based
|
||||
on the cloneof and romof tags as well as device references. This is incompatible with
|
||||
the other --dat-X flags.
|
||||
|
||||
-mt={4} Amount of threads to use
|
||||
Optionally, set the number of threads to use for the multithreaded operations.
|
||||
The default is 4 threads; -1 means unlimited threads created. If the user specifies
|
||||
@@ -576,6 +593,23 @@ Options:
|
||||
to be hashed without possibly variant information. If a particular header skipper is
|
||||
defined, and that skipper exists, then it will be used instead of trying to find one
|
||||
that matches.
|
||||
|
||||
-dm, --dat-merged Force creating merged sets in the output
|
||||
Preprocess the DAT to have parent sets contain all items from the children based
|
||||
on the cloneof tag. This is incompatible with the other --dat-X flags.
|
||||
|
||||
-ds, --dat-split Force creating split sets in the output
|
||||
Preprocess the DAT to remove redundant files between parents and children based
|
||||
on the romof and cloneof tags. This is incompatible with the other --dat-X flags.
|
||||
|
||||
-dnm, --dat-nonmerged Force creating non-merged sets in the output
|
||||
Preprocess the DAT to have child sets contain all items from the parent set based
|
||||
on the cloneof tag. This is incompatible with the other --dat-X flags.
|
||||
|
||||
-df, --dat-fullnonmerged Force creating fully non-merged sets in the output
|
||||
Preprocess the DAT to have child sets contain all items from the parent sets based
|
||||
on the cloneof and romof tags as well as device references. This is incompatible with
|
||||
the other --dat-X flags.
|
||||
|
||||
-mt={4} Amount of threads to use
|
||||
Optionally, set the number of threads to use for the multithreaded operations.
|
||||
|
||||
@@ -425,6 +425,26 @@ namespace SabreTools
|
||||
"Set scanning level for ZIP archives (default 1)",
|
||||
FeatureType.String,
|
||||
null));
|
||||
sort.AddFeature("dat-merged", new Feature(
|
||||
new List<string>() { "-dm", "--dat-merged" },
|
||||
"Force creating merged sets",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
sort.AddFeature("dat-split", new Feature(
|
||||
new List<string>() { "-ds", "--dat-split" },
|
||||
"Force creating split sets",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
sort.AddFeature("dat-nonmerged", new Feature(
|
||||
new List<string>() { "-dnm", "--dat-nonmerged" },
|
||||
"Force creating non-merged sets",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
sort.AddFeature("dat-fullnonmerged", new Feature(
|
||||
new List<string>() { "-df", "--dat-fullnonmerged" },
|
||||
"Force creating fully non-merged sets",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
sort.AddFeature("update-dat", new Feature(
|
||||
new List<string>() { "-ud", "--update-dat" },
|
||||
"Output updated DAT to output directory",
|
||||
@@ -525,6 +545,26 @@ namespace SabreTools
|
||||
"Set a header skipper to use, blank means all",
|
||||
FeatureType.String,
|
||||
null));
|
||||
sortDepot.AddFeature("dat-merged", new Feature(
|
||||
new List<string>() { "-dm", "--dat-merged" },
|
||||
"Force creating merged sets",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
sortDepot.AddFeature("dat-split", new Feature(
|
||||
new List<string>() { "-ds", "--dat-split" },
|
||||
"Force creating split sets",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
sortDepot.AddFeature("dat-nonmerged", new Feature(
|
||||
new List<string>() { "-dnm", "--dat-nonmerged" },
|
||||
"Force creating non-merged sets",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
sortDepot.AddFeature("dat-fullnonmerged", new Feature(
|
||||
new List<string>() { "-df", "--dat-fullnonmerged" },
|
||||
"Force creating fully non-merged sets",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
sortDepot.AddFeature("update-dat", new Feature(
|
||||
new List<string>() { "-ud", "--update-dat" },
|
||||
"Output updated DAT to output directory",
|
||||
|
||||
@@ -309,10 +309,11 @@ namespace SabreTools
|
||||
/// <param name="zip">Integer representing the archive handling level for Zip</param>
|
||||
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
|
||||
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
|
||||
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
|
||||
private static void InitSort(List<string> datfiles, List<string> inputs, string outDir, string tempDir, bool quickScan, bool date, bool delete,
|
||||
bool inverse, OutputFormat outputFormat, bool romba, int sevenzip, int gz, int rar, int zip, bool updateDat, string headerToCheckAgainst,
|
||||
int maxDegreeOfParallelism)
|
||||
SplitType splitType, int maxDegreeOfParallelism)
|
||||
{
|
||||
// Get the archive scanning level
|
||||
ArchiveScanLevel asl = ArchiveTools.GetArchiveScanLevelFromNumbers(sevenzip, gz, rar, zip);
|
||||
@@ -324,7 +325,7 @@ namespace SabreTools
|
||||
DatFile datdata = new DatFile();
|
||||
foreach (string datfile in datfiles)
|
||||
{
|
||||
datdata.Parse(datfile, 99, 99, _logger, keep: true, softlist: true);
|
||||
datdata.Parse(datfile, 99, 99, new Filter(), splitType, false /* trim */, false /* single */, null /* root */, _logger, keep: true, softlist: true);
|
||||
}
|
||||
_logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||
|
||||
@@ -346,9 +347,10 @@ namespace SabreTools
|
||||
/// <param name="romba">True if files should be output in Romba depot folders, false otherwise</param>
|
||||
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
|
||||
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
|
||||
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
|
||||
private static void InitSortDepot(List<string> datfiles, List<string> inputs, string outDir, string tempDir, bool date, bool delete,
|
||||
bool inverse, OutputFormat outputFormat, bool romba, bool updateDat, string headerToCheckAgainst, int maxDegreeOfParallelism)
|
||||
bool inverse, OutputFormat outputFormat, bool romba, bool updateDat, string headerToCheckAgainst, SplitType splitType, int maxDegreeOfParallelism)
|
||||
{
|
||||
DateTime start = DateTime.Now;
|
||||
_logger.User("Populating internal DAT...");
|
||||
@@ -357,7 +359,7 @@ namespace SabreTools
|
||||
DatFile datdata = new DatFile();
|
||||
foreach (string datfile in datfiles)
|
||||
{
|
||||
datdata.Parse(datfile, 99, 99, _logger, keep: true, softlist: true);
|
||||
datdata.Parse(datfile, 99, 99, new Filter(), splitType, false /* trim */, false /* single */, null /* root */, _logger, keep: true, softlist: true);
|
||||
}
|
||||
_logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||
|
||||
|
||||
@@ -1122,14 +1122,14 @@ namespace SabreTools
|
||||
else if (sort)
|
||||
{
|
||||
InitSort(datfiles, inputs, outDir, tempDir, quickScan, addFileDates, delete, inverse,
|
||||
outputFormat, romba, sevenzip, gz, rar, zip, updateDat, header, maxParallelism);
|
||||
outputFormat, romba, sevenzip, gz, rar, zip, updateDat, header, splitType, maxParallelism);
|
||||
}
|
||||
|
||||
// If we're using the sorter from depot
|
||||
else if (sortDepot)
|
||||
{
|
||||
InitSortDepot(datfiles, inputs, outDir, tempDir, addFileDates, delete, inverse,
|
||||
outputFormat, romba, updateDat, header, maxParallelism);
|
||||
outputFormat, romba, updateDat, header, splitType, maxParallelism);
|
||||
}
|
||||
|
||||
// Split a DAT by extension
|
||||
|
||||
Reference in New Issue
Block a user