mirror of
https://github.com/claunia/SabreTools.git
synced 2025-12-16 19:14:27 +00:00
[SabreTools, README.1ST] Depot variants wrapped in
This commit is contained in:
@@ -483,6 +483,11 @@ Options:
|
||||
This sets an output folder to be used when the files are created. If
|
||||
a path is not defined, the application directory is used instead.
|
||||
|
||||
-dep, --depot Assume directories are romba depots
|
||||
Normally, input directories will be treated with no special format.
|
||||
If this flag is used, all input directories will be assumed to be
|
||||
romba-style depots.
|
||||
|
||||
-d, --delete Delete fully rebuilt input files
|
||||
Optionally, the input files, once processed and fully matched, can
|
||||
be deleted. This can be useful when the original file structure is
|
||||
@@ -649,154 +654,6 @@ Options:
|
||||
Once the files that were able to rebuilt are taken care of, a DAT of
|
||||
the files that could not be matched will be output to the output
|
||||
directory.
|
||||
|
||||
-ssd, --sort-depot Sort input depots by a set of DATs
|
||||
This feature allows the user to quickly rebuild based on a supplied DAT
|
||||
file(s). By default all files will be rebuilt to uncompressed folders in
|
||||
the output directory. This is different than the generic sort above as
|
||||
it requires that all inputs are Romba-compatible depots.
|
||||
|
||||
-dat= Name of the DAT to be used for the various options
|
||||
The user-supplied DAT used to check which files need to be rebuilt.
|
||||
Multiple occurrences of this flag are allowed.
|
||||
|
||||
-out= Set the name of the output directory
|
||||
This sets an output folder to be used when the files are created. If
|
||||
a path is not defined, the application directory is used instead.
|
||||
|
||||
-d, --delete Delete fully rebuilt input files
|
||||
Optionally, the input files, once processed and fully matched, can
|
||||
be deleted. This can be useful when the original file structure is
|
||||
no longer needed or if there is limited space on the source drive.
|
||||
|
||||
-in, --inverse Match files not in the DAT
|
||||
Instead of the normal behavior of rebuilding using a DAT, this flag
|
||||
allows the user to use the DAT as a filter instead. All files that
|
||||
are found in the DAT will be skipped and everything else will be
|
||||
output in the selected format.
|
||||
|
||||
-ad, --add-date Write dates for each file parsed, if available
|
||||
If this flag is set, the the date in the DAT will be used for the
|
||||
output file instead of the standard date and time for TorrentZip.
|
||||
This will technically invalidate the output files as proper
|
||||
TorrentZip files because the date will not match the standard.
|
||||
|
||||
-t7z Enable Torrent 7zip output
|
||||
Instead of ouputting the files to folder, files will be rebuilt to
|
||||
Torrent7Zip (T7Z) files. This format is based on the LZMA container
|
||||
format 7zip, but with custom header information. This is currently
|
||||
unused by any major application. Currently does not produce proper
|
||||
Torrent-compatible outputs.
|
||||
|
||||
-tar Enable Tape ARchive output
|
||||
Instead of outputting the fiels to folder, files will be rebuilt to
|
||||
Tape ARchive (TAR) files. This format is a standardized storage
|
||||
archive without any compression, usually used with other compression
|
||||
formats around it. It is widely used in backup applications and
|
||||
source code archives.
|
||||
|
||||
-tgz Enable Torrent GZ output
|
||||
Instead of outputting the files to folder, files will be rebuilt to
|
||||
TorrentGZ (TGZ) files. This format is based on the GZip archive
|
||||
format, but with custom header information and a file name replaced
|
||||
by the SHA-1 of the file inside. This is primarily used by external
|
||||
tool Romba (https://github.com/uwedeportivo/romba), but may be used
|
||||
more widely in the future.
|
||||
|
||||
-r, --romba Enable Romba depot directory output
|
||||
As an extension of the parent flag, this outputs the TGZ files
|
||||
into directories based on the structure used by Romba. This uses
|
||||
nested folders using the first 4 bytes of the SHA-1, 1 byte for
|
||||
each layer of the directory name. It also includes two auxilary
|
||||
files, .romba_size and .romba_size.backup, that have the
|
||||
compressed size of the folder inside for use with Romba.
|
||||
|
||||
-tlrz Enable Torrent Long-Range Zip output [UNIMPLEMENTED]
|
||||
Instead of ouputting the files to folder, files will be rebuilt to
|
||||
Torrent Long-Range Zip (TLRZ) files. This format is based on the
|
||||
LRZip file format as defined at https://github.com/ckolivas/lrzip
|
||||
but with custom header information. This is currently unused by any
|
||||
major application.
|
||||
|
||||
-tlz4 Enable Torrent LZ4 output [UNIMPLEMENTED]
|
||||
Instead of ouputting the files to folder, files will be rebuilt to
|
||||
Torrent LZ4 (TLZ4) files. This format is based on the LZ4 file
|
||||
format as defined at https://github.com/lz4/lz4 but with custom
|
||||
header information. This is currently unused by any major
|
||||
application.
|
||||
|
||||
-trar Enable Torrent RAR output [UNIMPLEMENTED]
|
||||
Instead of outputting files to folder, files will be rebuilt to
|
||||
Torrent RAR (TRAR) files. This format is based on the RAR propietary
|
||||
format but with custom header information. This is currently unused
|
||||
by any major application.
|
||||
|
||||
-txz Enable Torrent XZ output [UNSUPPORTED]
|
||||
Instead of outputting files to folder, files will be rebuilt to
|
||||
Torrent XZ (TXZ) files. This format is based on the LZMA container
|
||||
format XZ, but with custom header information. This is currently
|
||||
unused by any major application. Currently does not produce proper
|
||||
Torrent-compatible outputs.
|
||||
|
||||
-tzip Enable Torrent Zip output
|
||||
Instead of ouputting files to folder, files will be rebuilt to
|
||||
TorrentZip (TZip) files. This format is based on the ZIP archive
|
||||
format, but with custom header information. This is primarily used
|
||||
by external tool RomVault (http://www.romvault.com/) and is already
|
||||
widely used.
|
||||
|
||||
-tzpaq Enable Torrent ZPAQ output [UNIMPLEMENTED]
|
||||
Instead of ouputting the files to folder, files will be rebuilt to
|
||||
Torrent ZPAQ (TZPAQ) files. This format is based on the ZPAQ file
|
||||
format as defined at https://github.com/zpaq/zpaq but with custom
|
||||
header information. This is currently unused by any major
|
||||
application.
|
||||
|
||||
-tzstd Enable Torrent Zstd output [UNIMPLEMENTED]
|
||||
Instead of ouputting the files to folder, files will be rebuilt to
|
||||
Torrent Zstd (TZstd) files. This format is based on the Zstd file
|
||||
format as defined at https://github.com/skbkontur/ZstdNet but with
|
||||
custom header information. This is currently unused by any major
|
||||
application.
|
||||
|
||||
-h=, --header= Remove headers from hash calculations
|
||||
If this is set, then all files that have copier headers that are
|
||||
detected will have them removed from the hash calculation. This will
|
||||
allow for a headered collection to be hashed without possibly
|
||||
variant information. If a particular header skipper is defined, and
|
||||
that skipper exists, then it will be used instead of trying to find
|
||||
one that matches.
|
||||
|
||||
-dm, --dat-merged Force creating merged sets
|
||||
Preprocess the DAT to have parent sets contain all items from the
|
||||
children based on the cloneof tag. This is incompatible with the
|
||||
other --dat-X flags.
|
||||
|
||||
-ds, --dat-split Force creating split sets
|
||||
Preprocess the DAT to remove redundant files between parents and
|
||||
children based on the romof and cloneof tags. This is incompatible
|
||||
with the other --dat-X flags.
|
||||
|
||||
-dnm, --dat-nonmerged Force creating non-merged sets
|
||||
Preprocess the DAT to have child sets contain all items from the
|
||||
parent set based on the cloneof tag. This is incompatible with the
|
||||
other --dat-X flags.
|
||||
|
||||
-df, --dat-fullnonmerged Force creating fully non-merged sets
|
||||
Preprocess the DAT to have child sets contain all items from the
|
||||
parent sets based on the cloneof and romof tags as well as device
|
||||
references. This is incompatible with the other --dat-X flags.
|
||||
|
||||
-mt={4} Amount of threads to use
|
||||
Optionally, set the number of threads to use for the multithreaded
|
||||
operations. The default is 4 threads; -1 means unlimited threads
|
||||
created. If the user specifies that only 1 thread is to be used, it
|
||||
defaults to the original, serial implementation of the DFD code.
|
||||
|
||||
-upd, --update-dat Output updated DAT
|
||||
Once the files that were able to rebuilt are taken care of, a DAT of
|
||||
the files that could not be matched will be output to the output
|
||||
directory.
|
||||
|
||||
-st, --stats Get statistics on all input DATs
|
||||
This will output by default the combined statistics for all input DAT
|
||||
@@ -1334,7 +1191,12 @@ Options:
|
||||
-dat= Name of the DAT to be used for the various options
|
||||
The user-supplied DAT used to check which files need to be verified.
|
||||
Multiple occurrences of this flag are allowed.
|
||||
|
||||
|
||||
-dep, --depot Assume directories are romba depots
|
||||
Normally, input directories will be treated with no special format.
|
||||
If this flag is used, all input directories will be assumed to be
|
||||
romba-style depots.
|
||||
|
||||
-ho, --hash-only Check files by hash only
|
||||
This sets a mode where files are not checked based on name but
|
||||
rather hash alone. This allows verification of (possibly)
|
||||
@@ -1380,44 +1242,7 @@ Options:
|
||||
Preprocess the DAT to have child sets contain all items from the
|
||||
parent sets based on the cloneof and romof tags as well as device
|
||||
references. This is incompatible with the other --dat-X flags.
|
||||
|
||||
-ved, --verify-depot Verify a depot against an input DAT
|
||||
When used, this will use an input DAT or set of DATs to blindly check
|
||||
against an input depot. This is based on the sort-depot code in which
|
||||
the base directories are assumed to be romba-style depots.
|
||||
|
||||
-dat= Name of the DAT to be used for the various options
|
||||
The user-supplied DAT used to check which files need to be verified.
|
||||
Multiple occurrences of this flag are allowed.
|
||||
|
||||
-h=, --header= Remove headers from hash calculations
|
||||
If this is set, then all files that have copier headers that are
|
||||
detected will have them removed from the hash calculation. This will
|
||||
allow for a headered collection to be hashed without possibly
|
||||
variant information. If a particular header skipper is defined, and
|
||||
that skipper exists, then it will be used instead of trying to find
|
||||
one that matches.
|
||||
|
||||
-dm, --dat-merged Force checking merged sets
|
||||
Preprocess the DAT to have parent sets contain all items from the
|
||||
children based on the cloneof tag. This is incompatible with the
|
||||
other --dat-X flags.
|
||||
|
||||
-ds, --dat-split Force checking split sets
|
||||
Preprocess the DAT to remove redundant files between parents and
|
||||
children based on the romof and cloneof tags. This is incompatible
|
||||
with the other --dat-X flags.
|
||||
|
||||
-dnm, --dat-nonmerged Force checking non-merged sets
|
||||
Preprocess the DAT to have child sets contain all items from the
|
||||
parent set based on the cloneof tag. This is incompatible with the
|
||||
other --dat-X flags.
|
||||
|
||||
-df, --dat-fullnonmerged Force checking fully non-merged sets
|
||||
Preprocess the DAT to have child sets contain all items from the
|
||||
parent sets based on the cloneof and romof tags as well as device
|
||||
references. This is incompatible with the other --dat-X flags.
|
||||
|
||||
** Section 3.0 - Examples
|
||||
|
||||
Here are a few usage examples based on features that are commonly used by most
|
||||
|
||||
@@ -409,6 +409,11 @@ namespace SabreTools
|
||||
"Output directory",
|
||||
FeatureType.String,
|
||||
null));
|
||||
sort.AddFeature("depot", new Feature(
|
||||
new List<string>() { "-dep", "--depot" },
|
||||
"Assume directories are romba depots",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
sort.AddFeature("delete", new Feature(
|
||||
new List<string>() { "-del", "--delete" },
|
||||
"Delete fully rebuilt input files",
|
||||
@@ -562,140 +567,6 @@ namespace SabreTools
|
||||
FeatureType.String,
|
||||
null));
|
||||
|
||||
// Create the Sort Depot feature
|
||||
Feature sortDepot = new Feature(
|
||||
new List<string>() { "-ssd", "--sort-depot" },
|
||||
"Sort romba depots by a set of DATs",
|
||||
FeatureType.Flag,
|
||||
null);
|
||||
sortDepot.AddFeature("dat", new Feature(
|
||||
new List<string>() { "-dat", "--dat" },
|
||||
"Input DAT to rebuild against",
|
||||
FeatureType.List,
|
||||
null));
|
||||
sortDepot.AddFeature("out", new Feature(
|
||||
new List<string>() { "-out", "--out" },
|
||||
"Output directory",
|
||||
FeatureType.String,
|
||||
null));
|
||||
sortDepot.AddFeature("delete", new Feature(
|
||||
new List<string>() { "-del", "--delete" },
|
||||
"Delete fully rebuilt input files",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
sortDepot.AddFeature("inverse", new Feature(
|
||||
new List<string>() { "-in", "--inverse" },
|
||||
"Rebuild only files not in DAT",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
sortDepot.AddFeature("add-date", new Feature(
|
||||
new List<string>() { "-ad", "--add-date" },
|
||||
"Add original dates from DAT, if possible",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
sortDepot.AddFeature("t7z", new Feature(
|
||||
new List<string>() { "-t7z", "--t7z" },
|
||||
"Enable Torrent7z output",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
sortDepot.AddFeature("tar", new Feature(
|
||||
new List<string>() { "-tar", "--tar" },
|
||||
"Enable TAR output",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
sortDepot.AddFeature("tgz", new Feature(
|
||||
new List<string>() { "-tgz", "--tgz" },
|
||||
"Enable TorrentGZ output",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
sortDepot["tgz"].AddFeature("romba", new Feature(
|
||||
new List<string>() { "-r", "--romba" },
|
||||
"Enable Romba depot dir output",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
/*
|
||||
sortDepot.AddFeature("tlrz", new Feature(
|
||||
new List<string>() { "-tlrz", "--tlrz" },
|
||||
"Enable TorrentLRZ output",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
*/
|
||||
/*
|
||||
sortDepot.AddFeature("tlz4", new Feature(
|
||||
new List<string>() { "-tlz4", "--tlz4" },
|
||||
"Enable TorrentLZ4 output",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
*/
|
||||
/*
|
||||
sortDepot.AddFeature("trar", new Feature(
|
||||
new List<string>() { "-trar", "--trar" },
|
||||
"Enable TorrentRAR output",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
*/
|
||||
/*
|
||||
sortDepot.AddFeature("txz", new Feature(
|
||||
new List<string>() { "-txz", "--txz" },
|
||||
"Enable TorrentXZ output",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
*/
|
||||
sortDepot.AddFeature("tzip", new Feature(
|
||||
new List<string>() { "-tzip", "--tzip" },
|
||||
"Enable TorrentZip output",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
/*
|
||||
sortDepot.AddFeature("tzpaq", new Feature(
|
||||
new List<string>() { "-tzpaq", "--tzpaq" },
|
||||
"Enable TorrentZPAQ output",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
*/
|
||||
/*
|
||||
sortDepot.AddFeature("tzstd", new Feature(
|
||||
new List<string>() { "-tzstd", "--tzstd" },
|
||||
"Enable TorrentZstd output",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
*/
|
||||
sortDepot.AddFeature("header", new Feature(
|
||||
new List<string>() { "-h", "--header" },
|
||||
"Set a header skipper to use, blank means all",
|
||||
FeatureType.String,
|
||||
null));
|
||||
sortDepot.AddFeature("dat-merged", new Feature(
|
||||
new List<string>() { "-dm", "--dat-merged" },
|
||||
"Force creating merged sets",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
sortDepot.AddFeature("dat-split", new Feature(
|
||||
new List<string>() { "-ds", "--dat-split" },
|
||||
"Force creating split sets",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
sortDepot.AddFeature("dat-nonmerged", new Feature(
|
||||
new List<string>() { "-dnm", "--dat-nonmerged" },
|
||||
"Force creating non-merged sets",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
sortDepot.AddFeature("dat-fullnonmerged", new Feature(
|
||||
new List<string>() { "-df", "--dat-fullnonmerged" },
|
||||
"Force creating fully non-merged sets",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
sortDepot.AddFeature("update-dat", new Feature(
|
||||
new List<string>() { "-ud", "--update-dat" },
|
||||
"Output updated DAT to output directory",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
sortDepot.AddFeature("mt", new Feature(
|
||||
new List<string>() { "-mt", "--mt" },
|
||||
"Amount of threads to use (default 4, -1 unlimted)",
|
||||
FeatureType.String,
|
||||
null));
|
||||
|
||||
// Create the Stats feature
|
||||
Feature stats = new Feature(
|
||||
new List<string>() { "-st", "--stats" },
|
||||
@@ -1484,6 +1355,11 @@ namespace SabreTools
|
||||
"Input DAT to verify against",
|
||||
FeatureType.List,
|
||||
null));
|
||||
verify.AddFeature("depot", new Feature(
|
||||
new List<string>() { "-dep", "--depot" },
|
||||
"Assume directories are romba depots",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
verify.AddFeature("temp", new Feature(
|
||||
new List<string>() { "-t", "--temp" },
|
||||
"Set the temporary directory to use",
|
||||
@@ -1530,48 +1406,6 @@ namespace SabreTools
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
|
||||
// Create the Verify Depot feature
|
||||
Feature verifyDepot = new Feature(
|
||||
new List<string>() { "-ved", "--verify-depot" },
|
||||
"Verify a depot against DATs",
|
||||
FeatureType.Flag,
|
||||
null);
|
||||
verifyDepot.AddFeature("dat", new Feature(
|
||||
new List<string>() { "-dat", "--dat" },
|
||||
"Input DAT to verify against",
|
||||
FeatureType.List,
|
||||
null));
|
||||
verifyDepot.AddFeature("temp", new Feature(
|
||||
new List<string>() { "-t", "--temp" },
|
||||
"Set the temporary directory to use",
|
||||
FeatureType.String,
|
||||
null));
|
||||
verifyDepot.AddFeature("header", new Feature(
|
||||
new List<string>() { "-h", "--header" },
|
||||
"Set a header skipper to use, blank means all",
|
||||
FeatureType.String,
|
||||
null));
|
||||
verifyDepot.AddFeature("dat-merged", new Feature(
|
||||
new List<string>() { "-dm", "--dat-merged" },
|
||||
"Force checking merged sets",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
verifyDepot.AddFeature("dat-split", new Feature(
|
||||
new List<string>() { "-ds", "--dat-split" },
|
||||
"Force checking split sets",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
verifyDepot.AddFeature("dat-nonmerged", new Feature(
|
||||
new List<string>() { "-dnm", "--dat-nonmerged" },
|
||||
"Force checking non-merged sets",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
verifyDepot.AddFeature("dat-fullnonmerged", new Feature(
|
||||
new List<string>() { "-df", "--dat-fullnonmerged" },
|
||||
"Force checking fully non-merged sets",
|
||||
FeatureType.Flag,
|
||||
null));
|
||||
|
||||
// Now, add all of the main features to the Help object
|
||||
help.Add("Help", helpFeature);
|
||||
help.Add("Script", script);
|
||||
@@ -1582,12 +1416,10 @@ namespace SabreTools
|
||||
help.Add("Level Split", levelSplit);
|
||||
help.Add("Restore", restore);
|
||||
help.Add("Sort", sort);
|
||||
help.Add("Sort Depot", sortDepot);
|
||||
help.Add("Stats", stats);
|
||||
help.Add("Type Split", typeSplit);
|
||||
help.Add("Update", update);
|
||||
help.Add("Verify", verify);
|
||||
help.Add("Verify Depot", verifyDepot);
|
||||
|
||||
return help;
|
||||
}
|
||||
|
||||
@@ -319,6 +319,7 @@ namespace SabreTools
|
||||
/// <param name="datfiles">Names of the DATs to compare against</param>
|
||||
/// <param name="inputs">List of input files/folders to check</param>
|
||||
/// <param name="outDir">Output directory to use to build to</param>
|
||||
/// <param name="depot">True if the input direcories are treated as romba depots, false otherwise</param>
|
||||
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
|
||||
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
|
||||
/// <param name="delete">True if input files should be deleted, false otherwise</param>
|
||||
@@ -333,7 +334,7 @@ namespace SabreTools
|
||||
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
|
||||
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
|
||||
private static void InitSort(List<string> datfiles, List<string> inputs, string outDir, bool quickScan, bool date, bool delete,
|
||||
private static void InitSort(List<string> datfiles, List<string> inputs, string outDir, bool depot, bool quickScan, bool date, bool delete,
|
||||
bool inverse, OutputFormat outputFormat, bool romba, int sevenzip, int gz, int rar, int zip, bool updateDat, string headerToCheckAgainst,
|
||||
SplitType splitType, bool chdsAsFiles)
|
||||
{
|
||||
@@ -354,43 +355,17 @@ namespace SabreTools
|
||||
|
||||
watch.Stop();
|
||||
|
||||
datdata.RebuildGeneric(inputs, outDir, quickScan, date, delete, inverse, outputFormat, romba, asl,
|
||||
updateDat, headerToCheckAgainst, chdsAsFiles);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Wrap sorting files from a depot using an input DAT
|
||||
/// </summary>
|
||||
/// <param name="datfiles">Names of the DATs to compare against</param>
|
||||
/// <param name="inputs">List of input files/folders to check</param>
|
||||
/// <param name="outDir">Output directory to use to build to</param>
|
||||
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
|
||||
/// <param name="delete">True if input files should be deleted, false otherwise</param>
|
||||
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
|
||||
/// <param name="outputFormat">Output format that files should be written to</param>
|
||||
/// <param name="romba">True if files should be output in Romba depot folders, false otherwise</param>
|
||||
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
|
||||
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
|
||||
private static void InitSortDepot(List<string> datfiles, List<string> inputs, string outDir, bool date, bool delete,
|
||||
bool inverse, OutputFormat outputFormat, bool romba, bool updateDat, string headerToCheckAgainst, SplitType splitType)
|
||||
{
|
||||
InternalStopwatch watch = new InternalStopwatch("Populating internal DAT");
|
||||
|
||||
// Get a list of files from the input datfiles
|
||||
datfiles = Utilities.GetOnlyFilesFromInputs(datfiles);
|
||||
|
||||
// Add all of the input DATs into one huge internal DAT
|
||||
DatFile datdata = new DatFile();
|
||||
foreach (string datfile in datfiles)
|
||||
// If we have the depot flag, repsect it
|
||||
if (depot)
|
||||
{
|
||||
datdata.Parse(datfile, 99, 99, splitType, keep: true, useTags: true);
|
||||
}
|
||||
|
||||
watch.Stop();
|
||||
|
||||
datdata.RebuildDepot(inputs, outDir, date, delete, inverse, outputFormat, romba,
|
||||
datdata.RebuildDepot(inputs, outDir, date, delete, inverse, outputFormat, romba,
|
||||
updateDat, headerToCheckAgainst);
|
||||
}
|
||||
else
|
||||
{
|
||||
datdata.RebuildGeneric(inputs, outDir, quickScan, date, delete, inverse, outputFormat, romba, asl,
|
||||
updateDat, headerToCheckAgainst, chdsAsFiles);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -740,12 +715,13 @@ namespace SabreTools
|
||||
/// </summary>
|
||||
/// <param name="datfiles">Names of the DATs to compare against</param>
|
||||
/// <param name="inputs">Input directories to compare against</param>
|
||||
/// <param name="depot">True if the input direcories are treated as romba depots, false otherwise</param>
|
||||
/// <param name="hashOnly">True if only hashes should be checked, false for full file information</param>
|
||||
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
|
||||
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
|
||||
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
|
||||
private static void InitVerify(List<string> datfiles, List<string> inputs, bool hashOnly, bool quickScan,
|
||||
private static void InitVerify(List<string> datfiles, List<string> inputs, bool depot, bool hashOnly, bool quickScan,
|
||||
string headerToCheckAgainst, SplitType splitType, bool chdsAsFiles)
|
||||
{
|
||||
// Get the archive scanning level
|
||||
@@ -765,33 +741,15 @@ namespace SabreTools
|
||||
|
||||
watch.Stop();
|
||||
|
||||
datdata.VerifyGeneric(inputs, hashOnly, quickScan, headerToCheckAgainst, chdsAsFiles);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Wrap verifying files from a depot using an input DAT
|
||||
/// </summary>
|
||||
/// <param name="datfiles">Names of the DATs to compare against</param>
|
||||
/// <param name="inputs">Input directories to compare against</param>
|
||||
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
|
||||
private static void InitVerifyDepot(List<string> datfiles, List<string> inputs, string headerToCheckAgainst, SplitType splitType)
|
||||
{
|
||||
InternalStopwatch watch = new InternalStopwatch("Populating internal DAT");
|
||||
|
||||
// Get a list of files from the input datfiles
|
||||
datfiles = Utilities.GetOnlyFilesFromInputs(datfiles);
|
||||
|
||||
// Add all of the input DATs into one huge internal DAT
|
||||
DatFile datdata = new DatFile();
|
||||
foreach (string datfile in datfiles)
|
||||
// If we have the depot flag, repsect it
|
||||
if (depot)
|
||||
{
|
||||
datdata.Parse(datfile, 99, 99, splitType, keep: true, useTags: true);
|
||||
datdata.VerifyDepot(inputs, headerToCheckAgainst);
|
||||
}
|
||||
else
|
||||
{
|
||||
datdata.VerifyGeneric(inputs, hashOnly, quickScan, headerToCheckAgainst, chdsAsFiles);
|
||||
}
|
||||
|
||||
watch.Stop();
|
||||
|
||||
datdata.VerifyDepot(inputs, headerToCheckAgainst);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
@@ -77,15 +77,13 @@ namespace SabreTools
|
||||
extract = false,
|
||||
restore = false,
|
||||
sort = false,
|
||||
sortDepot = false,
|
||||
splitByExt = false,
|
||||
splitByHash = false,
|
||||
splitByLevel = false,
|
||||
splitByType = false,
|
||||
stats = false,
|
||||
update = false,
|
||||
verify = false,
|
||||
verifyDepot = false;
|
||||
verify = false;
|
||||
|
||||
// User flags
|
||||
bool addBlankFilesForEmptyFolder = false,
|
||||
@@ -97,6 +95,7 @@ namespace SabreTools
|
||||
copyFiles = false,
|
||||
datPrefix = false,
|
||||
delete = false,
|
||||
depot = false,
|
||||
descAsName = false,
|
||||
excludeOf = false,
|
||||
hashOnly = false,
|
||||
@@ -227,10 +226,6 @@ namespace SabreTools
|
||||
case "--sort":
|
||||
sort = true;
|
||||
break;
|
||||
case "-ssd":
|
||||
case "--sort-depot":
|
||||
sortDepot = true;
|
||||
break;
|
||||
case "-st":
|
||||
case "--stats":
|
||||
stats = true;
|
||||
@@ -247,10 +242,6 @@ namespace SabreTools
|
||||
case "--verify":
|
||||
verify = true;
|
||||
break;
|
||||
case "-ved":
|
||||
case "--verify-depot":
|
||||
verifyDepot = true;
|
||||
break;
|
||||
|
||||
// If we don't have a valid flag, feed it through the help system
|
||||
default:
|
||||
@@ -341,6 +332,10 @@ namespace SabreTools
|
||||
case "--delete":
|
||||
delete = true;
|
||||
break;
|
||||
case "-dep":
|
||||
case "--depot":
|
||||
depot = true;
|
||||
break;
|
||||
case "-df":
|
||||
case "--dat-fullnonmerged":
|
||||
splitType = SplitType.FullNonMerged;
|
||||
@@ -1246,7 +1241,7 @@ namespace SabreTools
|
||||
}
|
||||
|
||||
// If none of the feature flags is enabled, show the help screen
|
||||
if (!(datFromDir | extract | restore | sort | sortDepot | splitByExt | splitByHash | splitByLevel | splitByType | stats | update | verify | verifyDepot))
|
||||
if (!(datFromDir | extract | restore | sort | splitByExt | splitByHash | splitByLevel | splitByType | stats | update | verify))
|
||||
{
|
||||
Globals.Logger.Error("At least one feature switch must be enabled");
|
||||
_help.OutputGenericHelp();
|
||||
@@ -1255,7 +1250,7 @@ namespace SabreTools
|
||||
}
|
||||
|
||||
// If more than one switch is enabled, show the help screen
|
||||
if (!(datFromDir ^ extract ^ restore ^ sort ^ sortDepot ^ splitByExt ^ splitByHash ^ splitByLevel ^ splitByType ^ stats ^ update ^ verify ^ verifyDepot))
|
||||
if (!(datFromDir ^ extract ^ restore ^ sort ^ splitByExt ^ splitByHash ^ splitByLevel ^ splitByType ^ stats ^ update ^ verify))
|
||||
{
|
||||
Globals.Logger.Error("Only one feature switch is allowed at a time");
|
||||
_help.OutputGenericHelp();
|
||||
@@ -1265,7 +1260,7 @@ namespace SabreTools
|
||||
|
||||
// If a switch that requires a filename is set and no file is, show the help screen
|
||||
if (inputs.Count == 0
|
||||
&& (datFromDir || extract || restore || splitByExt || splitByHash || splitByLevel || splitByType || stats || update || verify || verifyDepot))
|
||||
&& (datFromDir || extract || restore || splitByExt || splitByHash || splitByLevel || splitByType || stats || update || verify))
|
||||
{
|
||||
Globals.Logger.Error("This feature requires at least one input");
|
||||
_help.OutputIndividualFeature(feature);
|
||||
@@ -1298,17 +1293,10 @@ namespace SabreTools
|
||||
// If we're using the sorter
|
||||
else if (sort)
|
||||
{
|
||||
InitSort(datfiles, inputs, outDir, quickScan, addFileDates, delete, inverse,
|
||||
InitSort(datfiles, inputs, outDir, depot, quickScan, addFileDates, delete, inverse,
|
||||
outputFormat, romba, sevenzip, gz, rar, zip, updateDat, header, splitType, chdsAsFiles);
|
||||
}
|
||||
|
||||
// If we're using the sorter from depot
|
||||
else if (sortDepot)
|
||||
{
|
||||
InitSortDepot(datfiles, inputs, outDir, addFileDates, delete, inverse,
|
||||
outputFormat, romba, updateDat, header, splitType);
|
||||
}
|
||||
|
||||
// Split a DAT by extension
|
||||
else if (splitByExt)
|
||||
{
|
||||
@@ -1351,13 +1339,7 @@ namespace SabreTools
|
||||
// If we're using the verifier
|
||||
else if (verify)
|
||||
{
|
||||
InitVerify(datfiles, inputs, hashOnly, quickScan, header, splitType, chdsAsFiles);
|
||||
}
|
||||
|
||||
// If we're using the depot verifier
|
||||
else if (verifyDepot)
|
||||
{
|
||||
InitVerifyDepot(datfiles, inputs, header, splitType);
|
||||
InitVerify(datfiles, inputs, depot, hashOnly, quickScan, header, splitType, chdsAsFiles);
|
||||
}
|
||||
|
||||
// If nothing is set, show the help
|
||||
|
||||
Reference in New Issue
Block a user