[ALL] Add filtering support to D2D and Verify

Verify came for free since it uses the D2D code
This commit is contained in:
Matt Nadareski
2019-01-08 17:55:27 -08:00
parent 6be188dc0d
commit 723537f556
6 changed files with 3448 additions and 3055 deletions

View File

@@ -66,8 +66,8 @@ namespace RombaSharp
foreach (string dir in onlyDirs) foreach (string dir in onlyDirs)
{ {
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
df.PopulateFromDir(dir, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, null, true); df.PopulateFromDir(dir, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, null, true, null);
df.PopulateFromDir(dir, Hash.DeepHashes, false, true, SkipFileType.None, false, false, _tmpdir, false, null, true); df.PopulateFromDir(dir, Hash.DeepHashes, false, true, SkipFileType.None, false, false, _tmpdir, false, null, true, null);
} }
// Create an empty Dat for files that need to be rebuilt // Create an empty Dat for files that need to be rebuilt
@@ -390,7 +390,7 @@ namespace RombaSharp
// Now run the D2D on the input and write out // Now run the D2D on the input and write out
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
datfile.PopulateFromDir(source, Hash.DeepHashes, true /* bare */, false /* archivesAsFiles */, SkipFileType.None, false /* addBlanks */, datfile.PopulateFromDir(source, Hash.DeepHashes, true /* bare */, false /* archivesAsFiles */, SkipFileType.None, false /* addBlanks */,
false /* addDate */, _tmpdir, false /* copyFiles */, null /* headerToCheckAgainst */, true /* chdsAsFiles */); false /* addDate */, _tmpdir, false /* copyFiles */, null /* headerToCheckAgainst */, true /* chdsAsFiles */, null /* filter */);
datfile.Write(outDir: outdat); datfile.Write(outDir: outdat);
} }
@@ -897,7 +897,7 @@ namespace RombaSharp
// First get a list of SHA-1's from the input DATs // First get a list of SHA-1's from the input DATs
DatFile datroot = new DatFile { Type = "SuperDAT", }; DatFile datroot = new DatFile { Type = "SuperDAT", };
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
datroot.PopulateFromDir(_dats, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, null, true); datroot.PopulateFromDir(_dats, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, null, true, null);
datroot.BucketBy(SortedBy.SHA1, DedupeType.None); datroot.BucketBy(SortedBy.SHA1, DedupeType.None);
// Create a List of dat hashes in the database (SHA-1) // Create a List of dat hashes in the database (SHA-1)
@@ -1013,7 +1013,7 @@ namespace RombaSharp
// Now rescan the depot itself // Now rescan the depot itself
DatFile depot = new DatFile(); DatFile depot = new DatFile();
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
depot.PopulateFromDir(depotname, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, null, true); depot.PopulateFromDir(depotname, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, null, true, null);
depot.BucketBy(SortedBy.SHA1, DedupeType.None); depot.BucketBy(SortedBy.SHA1, DedupeType.None);
// Set the base queries to use // Set the base queries to use

View File

@@ -3277,8 +3277,9 @@ namespace SabreTools.Library.DatFiles
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param> /// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param> /// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param> /// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
/// <param name="filter">Filter object to be passed to the DatItem level</param>
public bool PopulateFromDir(string basePath, Hash omitFromScan, bool bare, bool archivesAsFiles, SkipFileType skipFileType, public bool PopulateFromDir(string basePath, Hash omitFromScan, bool bare, bool archivesAsFiles, SkipFileType skipFileType,
bool addBlanks, bool addDate, string tempDir, bool copyFiles, string headerToCheckAgainst, bool chdsAsFiles) bool addBlanks, bool addDate, string tempDir, bool copyFiles, string headerToCheckAgainst, bool chdsAsFiles, Filter filter)
{ {
// If the description is defined but not the name, set the name from the description // If the description is defined but not the name, set the name from the description
if (String.IsNullOrWhiteSpace(Name) && !String.IsNullOrWhiteSpace(Description)) if (String.IsNullOrWhiteSpace(Name) && !String.IsNullOrWhiteSpace(Description))
@@ -3379,6 +3380,12 @@ namespace SabreTools.Library.DatFiles
Utilities.TryDeleteDirectory(tempDir); Utilities.TryDeleteDirectory(tempDir);
} }
// If we have a valid filter, perform the filtering now
if (filter != null && filter != default(Filter))
{
filter.FilterDatFile(this);
}
return true; return true;
} }
@@ -4523,8 +4530,9 @@ namespace SabreTools.Library.DatFiles
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param> /// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param> /// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param> /// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
/// <param name="filter">Filter object to be passed to the DatItem level</param>
/// <returns>True if verification was a success, false otherwise</returns> /// <returns>True if verification was a success, false otherwise</returns>
public bool VerifyGeneric(List<string> inputs, bool hashOnly, bool quickScan, string headerToCheckAgainst, bool chdsAsFiles) public bool VerifyGeneric(List<string> inputs, bool hashOnly, bool quickScan, string headerToCheckAgainst, bool chdsAsFiles, Filter filter)
{ {
// TODO: We want the cross section of what's the folder and what's in the DAT. Right now, it just has what's in the DAT that's not in the folder // TODO: We want the cross section of what's the folder and what's in the DAT. Right now, it just has what's in the DAT that's not in the folder
bool success = true; bool success = true;
@@ -4535,7 +4543,7 @@ namespace SabreTools.Library.DatFiles
{ {
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
PopulateFromDir(input, (quickScan ? Hash.SecureHashes : Hash.DeepHashes) /* omitFromScan */, true /* bare */, false /* archivesAsFiles */, PopulateFromDir(input, (quickScan ? Hash.SecureHashes : Hash.DeepHashes) /* omitFromScan */, true /* bare */, false /* archivesAsFiles */,
SkipFileType.None, false /* addBlanks */, false /* addDate */, "" /* tempDir */, false /* copyFiles */, headerToCheckAgainst, chdsAsFiles); SkipFileType.None, false /* addBlanks */, false /* addDate */, "" /* tempDir */, false /* copyFiles */, headerToCheckAgainst, chdsAsFiles, filter);
} }
// Setup the fixdat // Setup the fixdat

View File

@@ -330,6 +330,165 @@ Options:
compare against the input DATs. This flag forces all CHDs to be compare against the input DATs. This flag forces all CHDs to be
treated like regular files. treated like regular files.
-gn=, --game-name= Filter by game name
Include only items with this game name in the output. Additionally,
the user can specify an exact match or full C#-style regex for
pattern matching. Multiple instances of this flag are allowed.
-ngn=, --not-game-name= Filter by not game name
Include only items without this game name in the output.
Additionally, the user can specify an exact match or full C#-style
regex for pattern matching. Multiple instances of this flag are
allowed.
-gd=, --game-description= Filter by game description
Include only items with this game description in the output.
Additionally, the user can specify an exact match or full C#-style
regex for pattern matching. Multiple instances of this flag are
allowed.
-ngd=, --not-game-description= Filter by not game description
Include only items without this game description in the output.
Additionally, the user can specify an exact match or full C#-style
regex for pattern matching. Multiple instances of this flag are
allowed.
-ofg, --match-of-tags Allow cloneof and romof tags to match game name filters
If filter or exclude by game name is used, this flag will allow those
filters to be checked against the romof and cloneof tags as well.
This can allow for more advanced set-building, especially in
arcade-based sets.
-rn=, --item-name= Filter by item name
Include only items with this item name in the output. Additionally,
the user can specify an exact match or full C#-style regex for
pattern matching. Multiple instances of this flag are allowed.
-nrn=, --not-item-name= Filter by not item name
Include only items without this item name in the output.
Additionally, the user can specify an exact match or full C#-style
regex for pattern matching. Multiple instances of this flag are
allowed.
-rt=, --item-type= Filter by item type
Include only items with this item type in the output. Additionally,
the user can specify an exact match or full C#-style regex for
pattern matching. Multiple instances of this flag are allowed.
-nrt=, --not-item-type= Filter by not item type
Include only items without this item type in the output.
Additionally, the user can specify an exact match or full C#-style
regex for pattern matching. Multiple instances of this flag are
allowed.
-sgt=, --greater= Filter by size >=
Only include items whose size is greater than or equal to this value
in the output DAT. Users can specify either a regular integer number
or a number with a standard postfix. e.g. 8kb => 8000 or 8kib => 8192
-slt=, --less= Filter by size =<
Only include items whose size is less than or equal to this value in
the output DAT. Users can specify either a regular integer number or
a number with a standard postfix. e.g. 8kb => 8000 or 8kib => 8192
-seq=, --equal= Filter by size ==
Only include items of this exact size in the output DAT. Users can
specify either a regular integer number or a number with a standard
postfix. e.g. 8kb => 8000 or 8kib => 8192
-crc=, --crc= Filter by CRC hash
Include only items with this CRC hash in the output. Additionally,
the user can specify an exact match or full C#-style regex for
pattern matching. Multiple instances of this flag are allowed.
-ncrc=, --not-crc= Filter by not CRC hash
Include only items without this CRC hash in the output. Additionally,
the user can specify an exact match or full C#-style regex for
pattern matching. Multiple instances of this flag are allowed.
-md5=, --md5= Filter by MD5 hash
Include only items with this MD5 hash in the output. Additionally,
the user can specify an exact match or full C#-style regex for
pattern matching. Multiple instances of this flag are allowed.
-nmd5=, --not-md5= Filter by not MD5 hash
Include only items without this MD5 hash in the output. Additionally,
the user can specify an exact match or full C#-style regex for
pattern matching. Multiple instances of this flag are allowed.
-sha1=, --sha1= Filter by SHA-1 hash
Include only items with this SHA-1 hash in the output. Additionally,
the user can specify an exact match or full C#-style regex for
pattern matching. Multiple instances of this flag are allowed.
-nsha1=, --not-sha1= Filter by not SHA-1 hash
Include only items without this SHA-1 hash in the output.
Additionally, the user can specify an exact match or full C#-style
regex for pattern matching. Multiple instances of this flag are
allowed.
-sha256=, --sha256= Filter by SHA-256 hash
Include only items with this SHA-256 hash in the output.
Additionally, the user can specify an exact match or full C#-style
regex for pattern matching. Multiple instances of this flag are
allowed.
-nsha256=, --not-sha256= Filter by not SHA-256 hash
Include only items without this SHA-256 hash in the output.
Additionally, the user can specify an exact match or full C#-style
regex for pattern matching. Multiple instances of this flag are
allowed.
-sha384=, --sha384= Filter by SHA-384 hash
Include only items with this SHA-384 hash in the output.
Additionally, the user can specify an exact match or full C#-style
regex for pattern matching. Multiple instances of this flag are
allowed.
-nsha384=, --not-sha384= Filter by not SHA-384 hash
Include only items without this SHA-384 hash in the output.
Additionally, the user can specify an exact match or full C#-style
regex for pattern matching. Multiple instances of this flag are
allowed.
-sha512=, --sha512= Filter by SHA-512 hash
Include only items with this SHA-512 hash in the output.
Additionally, the user can specify an exact match or full C#-style
regex for pattern matching. Multiple instances of this flag are
allowed.
-nsha512=, --not-sha512= Filter by not SHA-512 hash
Include only items without this SHA-512 hash in the output.
Additionally, the user can specify an exact match or full C#-style
regex for pattern matching. Multiple instances of this flag are
allowed.
-is=, --status= Include only items with a given status
Include only items with this item status in the output. Multiple
instances of this flag are allowed.
Possible values are: None, Good, BadDump, Nodump, Verified
-nis=, --not-status= Exclude only items with a given status
Include only items without this item status in the output. Multiple
instances of this flag are allowed.
Possible values are: None, Good, BadDump, Nodump, Verified
-gt=, --game-type= Include only games with a given type
Include only items with this game type in the output. Multiple
instances of this flag are allowed.
Possible values are: None, Bios, Device, Mechanical
-ngt=, --not-game-type= Exclude only games with a given type
Include only items without this game type in the output. Multiple
instances of this flag are allowed.
Possible values are: None, Bios, Device, Mechanical
-run, --runnable Include only items that are marked runnable
This allows users to include only verified runnable games.
-nrun, --not-runnable Include only items that are not marked runnable
This allows users to include only unrunnable games.
-t=, --temp= Set the temporary directory to use -t=, --temp= Set the temporary directory to use
Optionally, a temp folder can be supplied in the case the default Optionally, a temp folder can be supplied in the case the default
temp directory is not preferred. temp directory is not preferred.
@@ -1368,6 +1527,165 @@ Options:
parent sets based on the cloneof and romof tags as well as device parent sets based on the cloneof and romof tags as well as device
references. This is incompatible with the other --dat-X flags. references. This is incompatible with the other --dat-X flags.
-gn=, --game-name= Filter by game name
Include only items with this game name in the output. Additionally,
the user can specify an exact match or full C#-style regex for
pattern matching. Multiple instances of this flag are allowed.
-ngn=, --not-game-name= Filter by not game name
Include only items without this game name in the output.
Additionally, the user can specify an exact match or full C#-style
regex for pattern matching. Multiple instances of this flag are
allowed.
-gd=, --game-description= Filter by game description
Include only items with this game description in the output.
Additionally, the user can specify an exact match or full C#-style
regex for pattern matching. Multiple instances of this flag are
allowed.
-ngd=, --not-game-description= Filter by not game description
Include only items without this game description in the output.
Additionally, the user can specify an exact match or full C#-style
regex for pattern matching. Multiple instances of this flag are
allowed.
-ofg, --match-of-tags Allow cloneof and romof tags to match game name filters
If filter or exclude by game name is used, this flag will allow those
filters to be checked against the romof and cloneof tags as well.
This can allow for more advanced set-building, especially in
arcade-based sets.
-rn=, --item-name= Filter by item name
Include only items with this item name in the output. Additionally,
the user can specify an exact match or full C#-style regex for
pattern matching. Multiple instances of this flag are allowed.
-nrn=, --not-item-name= Filter by not item name
Include only items without this item name in the output.
Additionally, the user can specify an exact match or full C#-style
regex for pattern matching. Multiple instances of this flag are
allowed.
-rt=, --item-type= Filter by item type
Include only items with this item type in the output. Additionally,
the user can specify an exact match or full C#-style regex for
pattern matching. Multiple instances of this flag are allowed.
-nrt=, --not-item-type= Filter by not item type
Include only items without this item type in the output.
Additionally, the user can specify an exact match or full C#-style
regex for pattern matching. Multiple instances of this flag are
allowed.
-sgt=, --greater= Filter by size >=
Only include items whose size is greater than or equal to this value
in the output DAT. Users can specify either a regular integer number
or a number with a standard postfix. e.g. 8kb => 8000 or 8kib => 8192
-slt=, --less= Filter by size =<
Only include items whose size is less than or equal to this value in
the output DAT. Users can specify either a regular integer number or
a number with a standard postfix. e.g. 8kb => 8000 or 8kib => 8192
-seq=, --equal= Filter by size ==
Only include items of this exact size in the output DAT. Users can
specify either a regular integer number or a number with a standard
postfix. e.g. 8kb => 8000 or 8kib => 8192
-crc=, --crc= Filter by CRC hash
Include only items with this CRC hash in the output. Additionally,
the user can specify an exact match or full C#-style regex for
pattern matching. Multiple instances of this flag are allowed.
-ncrc=, --not-crc= Filter by not CRC hash
Include only items without this CRC hash in the output. Additionally,
the user can specify an exact match or full C#-style regex for
pattern matching. Multiple instances of this flag are allowed.
-md5=, --md5= Filter by MD5 hash
Include only items with this MD5 hash in the output. Additionally,
the user can specify an exact match or full C#-style regex for
pattern matching. Multiple instances of this flag are allowed.
-nmd5=, --not-md5= Filter by not MD5 hash
Include only items without this MD5 hash in the output. Additionally,
the user can specify an exact match or full C#-style regex for
pattern matching. Multiple instances of this flag are allowed.
-sha1=, --sha1= Filter by SHA-1 hash
Include only items with this SHA-1 hash in the output. Additionally,
the user can specify an exact match or full C#-style regex for
pattern matching. Multiple instances of this flag are allowed.
-nsha1=, --not-sha1= Filter by not SHA-1 hash
Include only items without this SHA-1 hash in the output.
Additionally, the user can specify an exact match or full C#-style
regex for pattern matching. Multiple instances of this flag are
allowed.
-sha256=, --sha256= Filter by SHA-256 hash
Include only items with this SHA-256 hash in the output.
Additionally, the user can specify an exact match or full C#-style
regex for pattern matching. Multiple instances of this flag are
allowed.
-nsha256=, --not-sha256= Filter by not SHA-256 hash
Include only items without this SHA-256 hash in the output.
Additionally, the user can specify an exact match or full C#-style
regex for pattern matching. Multiple instances of this flag are
allowed.
-sha384=, --sha384= Filter by SHA-384 hash
Include only items with this SHA-384 hash in the output.
Additionally, the user can specify an exact match or full C#-style
regex for pattern matching. Multiple instances of this flag are
allowed.
-nsha384=, --not-sha384= Filter by not SHA-384 hash
Include only items without this SHA-384 hash in the output.
Additionally, the user can specify an exact match or full C#-style
regex for pattern matching. Multiple instances of this flag are
allowed.
-sha512=, --sha512= Filter by SHA-512 hash
Include only items with this SHA-512 hash in the output.
Additionally, the user can specify an exact match or full C#-style
regex for pattern matching. Multiple instances of this flag are
allowed.
-nsha512=, --not-sha512= Filter by not SHA-512 hash
Include only items without this SHA-512 hash in the output.
Additionally, the user can specify an exact match or full C#-style
regex for pattern matching. Multiple instances of this flag are
allowed.
-is=, --status= Include only items with a given status
Include only items with this item status in the output. Multiple
instances of this flag are allowed.
Possible values are: None, Good, BadDump, Nodump, Verified
-nis=, --not-status= Exclude only items with a given status
Include only items without this item status in the output. Multiple
instances of this flag are allowed.
Possible values are: None, Good, BadDump, Nodump, Verified
-gt=, --game-type= Include only games with a given type
Include only items with this game type in the output. Multiple
instances of this flag are allowed.
Possible values are: None, Bios, Device, Mechanical
-ngt=, --not-game-type= Exclude only games with a given type
Include only items without this game type in the output. Multiple
instances of this flag are allowed.
Possible values are: None, Bios, Device, Mechanical
-run, --runnable Include only items that are marked runnable
This allows users to include only verified runnable games.
-nrun, --not-runnable Include only items that are not marked runnable
This allows users to include only unrunnable games.
** Section 3.0 - Examples ** Section 3.0 - Examples
Here are a few usage examples based on features that are commonly used by most Here are a few usage examples based on features that are commonly used by most

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,5 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using SabreTools.Library.Data; using SabreTools.Library.Data;
using SabreTools.Library.DatFiles; using SabreTools.Library.DatFiles;
using SabreTools.Library.Tools; using SabreTools.Library.Tools;
@@ -15,444 +14,452 @@ using SearchOption = System.IO.SearchOption;
namespace SabreTools namespace SabreTools
{ {
public partial class SabreTools public partial class SabreTools
{ {
#region Init Methods #region Init Methods
/// <summary> /// <summary>
/// Wrap creating a DAT file from files or a directory in parallel /// Wrap creating a DAT file from files or a directory in parallel
/// </summary> /// </summary>
/// <param name="inputs">List of input filenames</param> /// <param name="inputs">List of input filenames</param>
/// /* Normal DAT header info */ /// /* Normal DAT header info */
/// <param name="datHeader">All DatHeader info to be used</param> /// <param name="datHeader">All DatHeader info to be used</param>
/// /* Standard DFD info */ /// /* Standard DFD info */
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param> /// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
/// <param name="removeDateFromAutomaticName">True if the date should be omitted from the DAT, false otherwise</param> /// <param name="removeDateFromAutomaticName">True if the date should be omitted from the DAT, false otherwise</param>
/// <param name="archivesAsFiles">True if archives should be treated as files, false otherwise</param> /// <param name="archivesAsFiles">True if archives should be treated as files, false otherwise</param>
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param> /// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
/// <param name="skipFileType">Type of files that should be skipped on scan</param> /// <param name="skipFileType">Type of files that should be skipped on scan</param>
/// <param name="addBlankFilesForEmptyFolder">True if blank items should be created for empty folders, false otherwise</param> /// <param name="addBlankFilesForEmptyFolder">True if blank items should be created for empty folders, false otherwise</param>
/// <param name="addFileDates">True if dates should be archived for all files, false otherwise</param> /// <param name="addFileDates">True if dates should be archived for all files, false otherwise</param>
/// /* Output DAT info */ /// <param name="filter">Filter object to be passed to the DatItem level</param>
/// <param name="tempDir">Name of the directory to create a temp folder in (blank is default temp directory)</param> /// /* Output DAT info */
/// <param name="outDir">Name of the directory to output the DAT to (blank is the current directory)</param> /// <param name="tempDir">Name of the directory to create a temp folder in (blank is default temp directory)</param>
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param> /// <param name="outDir">Name of the directory to output the DAT to (blank is the current directory)</param>
private static void InitDatFromDir(List<string> inputs, /// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
/* Normal DAT header info */ /// /* Filtering info */
DatHeader datHeader, /// <param name="filter">Filter object to be passed to the DatItem level</param>
private static void InitDatFromDir(List<string> inputs,
/* Normal DAT header info */
DatHeader datHeader,
/* Standard DFD info */ /* Standard DFD info */
Hash omitFromScan, Hash omitFromScan,
bool removeDateFromAutomaticName, bool removeDateFromAutomaticName,
bool archivesAsFiles, bool archivesAsFiles,
bool chdsAsFiles, bool chdsAsFiles,
SkipFileType skipFileType, SkipFileType skipFileType,
bool addBlankFilesForEmptyFolder, bool addBlankFilesForEmptyFolder,
bool addFileDates, bool addFileDates,
/* Output DAT info */ /* Output DAT info */
string tempDir, string tempDir,
string outDir, string outDir,
bool copyFiles) bool copyFiles,
{
// Create a new DATFromDir object and process the inputs /* Filtering info */
DatFile basedat = new DatFile(datHeader) Filter filter)
{ {
Date = DateTime.Now.ToString("yyyy-MM-dd"), // Create a new DATFromDir object and process the inputs
}; DatFile basedat = new DatFile(datHeader)
{
Date = DateTime.Now.ToString("yyyy-MM-dd"),
};
// For each input directory, create a DAT // For each input directory, create a DAT
foreach (string path in inputs) foreach (string path in inputs)
{ {
if (Directory.Exists(path) || File.Exists(path)) if (Directory.Exists(path) || File.Exists(path))
{ {
// Clone the base Dat for information // Clone the base Dat for information
DatFile datdata = new DatFile(basedat); DatFile datdata = new DatFile(basedat);
string basePath = Path.GetFullPath(path); string basePath = Path.GetFullPath(path);
bool success = datdata.PopulateFromDir(basePath, omitFromScan, removeDateFromAutomaticName, archivesAsFiles, bool success = datdata.PopulateFromDir(basePath, omitFromScan, removeDateFromAutomaticName, archivesAsFiles,
skipFileType, addBlankFilesForEmptyFolder, addFileDates, tempDir, copyFiles, datHeader.Header, chdsAsFiles); skipFileType, addBlankFilesForEmptyFolder, addFileDates, tempDir, copyFiles, datHeader.Header, chdsAsFiles, filter);
// If it was a success, write the DAT out // If it was a success, write the DAT out
if (success) if (success)
{ {
datdata.Write(outDir); datdata.Write(outDir);
} }
// Otherwise, show the help // Otherwise, show the help
else else
{ {
Console.WriteLine(); Console.WriteLine();
_help.OutputIndividualFeature("DATFromDir"); _help.OutputIndividualFeature("DATFromDir");
} }
} }
} }
} }
/// <summary> /// <summary>
/// Wrap extracting headers /// Wrap extracting headers
/// </summary> /// </summary>
/// <param name="inputs">Input file or folder names</param> /// <param name="inputs">Input file or folder names</param>
/// <param name="outDir">Output directory to write new files to, blank defaults to rom folder</param> /// <param name="outDir">Output directory to write new files to, blank defaults to rom folder</param>
/// <param name="nostore">True if headers should not be stored in the database, false otherwise</param> /// <param name="nostore">True if headers should not be stored in the database, false otherwise</param>
private static void InitExtractRemoveHeader( private static void InitExtractRemoveHeader(
List<string> inputs, List<string> inputs,
string outDir, string outDir,
bool nostore) bool nostore)
{ {
// Get only files from the inputs // Get only files from the inputs
List<string> files = Utilities.GetOnlyFilesFromInputs(inputs); List<string> files = Utilities.GetOnlyFilesFromInputs(inputs);
foreach (string file in files) foreach (string file in files)
{ {
Utilities.DetectSkipperAndTransform(file, outDir, nostore); Utilities.DetectSkipperAndTransform(file, outDir, nostore);
} }
} }
/// <summary> /// <summary>
/// Wrap replacing headers /// Wrap replacing headers
/// </summary> /// </summary>
/// <param name="inputs">Input file or folder names</param> /// <param name="inputs">Input file or folder names</param>
/// <param name="outDir">Output directory to write new files to, blank defaults to rom folder</param> /// <param name="outDir">Output directory to write new files to, blank defaults to rom folder</param>
private static void InitReplaceHeader( private static void InitReplaceHeader(
List<string> inputs, List<string> inputs,
string outDir) string outDir)
{ {
// Get only files from the inputs // Get only files from the inputs
List<string> files = Utilities.GetOnlyFilesFromInputs(inputs); List<string> files = Utilities.GetOnlyFilesFromInputs(inputs);
foreach (string file in files) foreach (string file in files)
{ {
Utilities.RestoreHeader(file, outDir); Utilities.RestoreHeader(file, outDir);
} }
} }
/// <summary> /// <summary>
/// Wrap sorting files using an input DAT /// Wrap sorting files using an input DAT
/// </summary> /// </summary>
/// <param name="datfiles">Names of the DATs to compare against</param> /// <param name="datfiles">Names of the DATs to compare against</param>
/// <param name="inputs">List of input files/folders to check</param> /// <param name="inputs">List of input files/folders to check</param>
/// <param name="outDir">Output directory to use to build to</param> /// <param name="outDir">Output directory to use to build to</param>
/// <param name="depot">True if the input direcories are treated as romba depots, false otherwise</param> /// <param name="depot">True if the input direcories are treated as romba depots, false otherwise</param>
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param> /// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param> /// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
/// <param name="delete">True if input files should be deleted, false otherwise</param> /// <param name="delete">True if input files should be deleted, false otherwise</param>
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param> /// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
/// <param name="outputFormat">Output format that files should be written to</param> /// <param name="outputFormat">Output format that files should be written to</param>
/// <param name="romba">True if files should be output in Romba depot folders, false otherwise</param> /// <param name="romba">True if files should be output in Romba depot folders, false otherwise</param>
/// <param name="sevenzip">Integer representing the archive handling level for 7z</param> /// <param name="sevenzip">Integer representing the archive handling level for 7z</param>
/// <param name="gz">Integer representing the archive handling level for GZip</param> /// <param name="gz">Integer representing the archive handling level for GZip</param>
/// <param name="rar">Integer representing the archive handling level for RAR</param> /// <param name="rar">Integer representing the archive handling level for RAR</param>
/// <param name="zip">Integer representing the archive handling level for Zip</param> /// <param name="zip">Integer representing the archive handling level for Zip</param>
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param> /// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param> /// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param> /// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param> /// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
/// <param name="individual">True if DATs should be sorted individually, false if they should be done in bulk</param> /// <param name="individual">True if DATs should be sorted individually, false if they should be done in bulk</param>
private static void InitSort( private static void InitSort(
List<string> datfiles, List<string> datfiles,
List<string> inputs, List<string> inputs,
string outDir, string outDir,
bool depot, bool depot,
bool quickScan, bool quickScan,
bool date, bool date,
bool delete, bool delete,
bool inverse, bool inverse,
OutputFormat outputFormat, OutputFormat outputFormat,
bool romba, bool romba,
int sevenzip, int sevenzip,
int gz, int gz,
int rar, int rar,
int zip, int zip,
bool updateDat, bool updateDat,
string headerToCheckAgainst, string headerToCheckAgainst,
SplitType splitType, SplitType splitType,
bool chdsAsFiles, bool chdsAsFiles,
bool individual) bool individual)
{ {
// Get the archive scanning level // Get the archive scanning level
ArchiveScanLevel asl = Utilities.GetArchiveScanLevelFromNumbers(sevenzip, gz, rar, zip); ArchiveScanLevel asl = Utilities.GetArchiveScanLevelFromNumbers(sevenzip, gz, rar, zip);
// Get a list of files from the input datfiles // Get a list of files from the input datfiles
datfiles = Utilities.GetOnlyFilesFromInputs(datfiles); datfiles = Utilities.GetOnlyFilesFromInputs(datfiles);
// If we are in individual mode, process each DAT on their own, appending the DAT name to the output dir // If we are in individual mode, process each DAT on their own, appending the DAT name to the output dir
if (individual) if (individual)
{ {
foreach (string datfile in datfiles) foreach (string datfile in datfiles)
{ {
DatFile datdata = new DatFile(); DatFile datdata = new DatFile();
datdata.Parse(datfile, 99, 99, splitType, keep: true, useTags: true); datdata.Parse(datfile, 99, 99, splitType, keep: true, useTags: true);
// If we have the depot flag, respect it // If we have the depot flag, respect it
if (depot) if (depot)
{ {
datdata.RebuildDepot(inputs, Path.Combine(outDir, datdata.FileName), date, delete, inverse, outputFormat, romba, datdata.RebuildDepot(inputs, Path.Combine(outDir, datdata.FileName), date, delete, inverse, outputFormat, romba,
updateDat, headerToCheckAgainst); updateDat, headerToCheckAgainst);
} }
else else
{ {
datdata.RebuildGeneric(inputs, Path.Combine(outDir, datdata.FileName), quickScan, date, delete, inverse, outputFormat, romba, asl, datdata.RebuildGeneric(inputs, Path.Combine(outDir, datdata.FileName), quickScan, date, delete, inverse, outputFormat, romba, asl,
updateDat, headerToCheckAgainst, chdsAsFiles); updateDat, headerToCheckAgainst, chdsAsFiles);
} }
} }
} }
// Otherwise, process all DATs into the same output // Otherwise, process all DATs into the same output
else else
{ {
InternalStopwatch watch = new InternalStopwatch("Populating internal DAT"); InternalStopwatch watch = new InternalStopwatch("Populating internal DAT");
// Add all of the input DATs into one huge internal DAT // Add all of the input DATs into one huge internal DAT
DatFile datdata = new DatFile(); DatFile datdata = new DatFile();
foreach (string datfile in datfiles) foreach (string datfile in datfiles)
{ {
datdata.Parse(datfile, 99, 99, splitType, keep: true, useTags: true); datdata.Parse(datfile, 99, 99, splitType, keep: true, useTags: true);
} }
watch.Stop(); watch.Stop();
// If we have the depot flag, respect it // If we have the depot flag, respect it
if (depot) if (depot)
{ {
datdata.RebuildDepot(inputs, outDir, date, delete, inverse, outputFormat, romba, datdata.RebuildDepot(inputs, outDir, date, delete, inverse, outputFormat, romba,
updateDat, headerToCheckAgainst); updateDat, headerToCheckAgainst);
} }
else else
{ {
datdata.RebuildGeneric(inputs, outDir, quickScan, date, delete, inverse, outputFormat, romba, asl, datdata.RebuildGeneric(inputs, outDir, quickScan, date, delete, inverse, outputFormat, romba, asl,
updateDat, headerToCheckAgainst, chdsAsFiles); updateDat, headerToCheckAgainst, chdsAsFiles);
} }
} }
} }
/// <summary> /// <summary>
/// Wrap splitting a DAT by any known type /// Wrap splitting a DAT by any known type
/// </summary> /// </summary>
/// <param name="inputs">List of inputs to be used</param> /// <param name="inputs">List of inputs to be used</param>
/// <param name="outDir">Output directory for the split files</param> /// <param name="outDir">Output directory for the split files</param>
/// <param name="inplace">True if files should be written to the source folders, false otherwise</param> /// <param name="inplace">True if files should be written to the source folders, false otherwise</param>
/// <param name="datFormat">DatFormat to be used for outputting the DAT</param> /// <param name="datFormat">DatFormat to be used for outputting the DAT</param>
/// <param name="splittingMode">Type of split to perform, if any</param> /// <param name="splittingMode">Type of split to perform, if any</param>
/// <param name="exta">First extension to split on (Extension Split only)</param> /// <param name="exta">First extension to split on (Extension Split only)</param>
/// <param name="extb">Second extension to split on (Extension Split only)</param> /// <param name="extb">Second extension to split on (Extension Split only)</param>
/// <param name="shortname">True if short filenames should be used, false otherwise (Level Split only)</param> /// <param name="shortname">True if short filenames should be used, false otherwise (Level Split only)</param>
/// <param name="basedat">True if original filenames should be used as the base for output filename, false otherwise (Level Split only)</param> /// <param name="basedat">True if original filenames should be used as the base for output filename, false otherwise (Level Split only)</param>
/// <param name="radix">Long value representing the split point (Size Split only)</param> /// <param name="radix">Long value representing the split point (Size Split only)</param>
private static void InitSplit( private static void InitSplit(
List<string> inputs, List<string> inputs,
string outDir, string outDir,
bool inplace, bool inplace,
DatFormat datFormat, DatFormat datFormat,
SplittingMode splittingMode, SplittingMode splittingMode,
List<string> exta, List<string> exta,
List<string> extb, List<string> extb,
bool shortname, bool shortname,
bool basedat, bool basedat,
long radix) long radix)
{ {
DatFile datfile = new DatFile(); DatFile datfile = new DatFile();
datfile.DatFormat = datFormat; datfile.DatFormat = datFormat;
datfile.DetermineSplitType(inputs, outDir, inplace, splittingMode, exta, extb, shortname, basedat, radix); datfile.DetermineSplitType(inputs, outDir, inplace, splittingMode, exta, extb, shortname, basedat, radix);
} }
/// <summary> /// <summary>
/// Wrap getting statistics on a DAT or folder of DATs /// Wrap getting statistics on a DAT or folder of DATs
/// </summary> /// </summary>
/// <param name="inputs">List of inputs to be used</param> /// <param name="inputs">List of inputs to be used</param>
/// <param name="filename">Name of the file to output to, blank for default</param> /// <param name="filename">Name of the file to output to, blank for default</param>
/// <param name="outDir">Output directory for the report files</param> /// <param name="outDir">Output directory for the report files</param>
/// <param name="single">True to show individual DAT statistics, false otherwise</param> /// <param name="single">True to show individual DAT statistics, false otherwise</param>
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param> /// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param> /// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
/// <param name="statDatFormat">Set the statistics output format to use</param> /// <param name="statDatFormat">Set the statistics output format to use</param>
private static void InitStats( private static void InitStats(
List<string> inputs, List<string> inputs,
string filename, string filename,
string outDir, string outDir,
bool single, bool single,
bool baddumpCol, bool baddumpCol,
bool nodumpCol, bool nodumpCol,
StatReportFormat statDatFormat) StatReportFormat statDatFormat)
{ {
DatFile.OutputStats(inputs, filename, outDir, single, baddumpCol, nodumpCol, statDatFormat); DatFile.OutputStats(inputs, filename, outDir, single, baddumpCol, nodumpCol, statDatFormat);
} }
/// <summary> /// <summary>
/// Wrap converting and updating DAT file from any format to any format /// Wrap converting and updating DAT file from any format to any format
/// </summary> /// </summary>
/// <param name="inputPaths">List of input filenames</param> /// <param name="inputPaths">List of input filenames</param>
/// <param name="basePaths">List of base filenames</param> /// <param name="basePaths">List of base filenames</param>
/// /* Normal DAT header info */ /// /* Normal DAT header info */
/// <param name="datHeader">All DatHeader info to be used</param> /// <param name="datHeader">All DatHeader info to be used</param>
/// /* Merging and Diffing info */ /// /* Merging and Diffing info */
/// <param name="updateMode">Non-zero flag for diffing mode, zero otherwise</param> /// <param name="updateMode">Non-zero flag for diffing mode, zero otherwise</param>
/// <param name="inplace">True if the cascade-diffed files should overwrite their inputs, false otherwise</param> /// <param name="inplace">True if the cascade-diffed files should overwrite their inputs, false otherwise</param>
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param> /// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
/// <param name="bare">True if the date should not be appended to the default name, false otherwise</param> /// <param name="bare">True if the date should not be appended to the default name, false otherwise</param>
/// /* Filtering info */ /// /* Filtering info */
/// <param name="filter">Pre-populated filter object for DAT filtering</param> /// <param name="filter">Pre-populated filter object for DAT filtering</param>
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param> /// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
/// /* Output DAT info */ /// /* Output DAT info */
/// <param name="outDir">Optional param for output directory</param> /// <param name="outDir">Optional param for output directory</param>
/// <param name="clean">True to clean the game names to WoD standard, false otherwise (default)</param> /// <param name="clean">True to clean the game names to WoD standard, false otherwise (default)</param>
/// <param name="remUnicode">True if we should remove non-ASCII characters from output, false otherwise (default)</param> /// <param name="remUnicode">True if we should remove non-ASCII characters from output, false otherwise (default)</param>
/// <param name="descAsName">True if descriptions should be used as names, false otherwise (default)</param> /// <param name="descAsName">True if descriptions should be used as names, false otherwise (default)</param>
/// <param name="replaceMode">ReplaceMode representing what should be updated [only for base replacement]</param> /// <param name="replaceMode">ReplaceMode representing what should be updated [only for base replacement]</param>
/// <param name="onlySame">True if descriptions should only be replaced if the game name is the same, false otherwise [only for base replacement]</param> /// <param name="onlySame">True if descriptions should only be replaced if the game name is the same, false otherwise [only for base replacement]</param>
private static void InitUpdate( private static void InitUpdate(
List<string> inputPaths, List<string> inputPaths,
List<string> basePaths, List<string> basePaths,
/* Normal DAT header info */ /* Normal DAT header info */
DatHeader datHeader, DatHeader datHeader,
/* Merging and Diffing info */ /* Merging and Diffing info */
UpdateMode updateMode, UpdateMode updateMode,
bool inplace, bool inplace,
bool skip, bool skip,
bool bare, bool bare,
/* Filtering info */ /* Filtering info */
Filter filter, Filter filter,
SplitType splitType, SplitType splitType,
/* Output DAT info */ /* Output DAT info */
string outDir, string outDir,
bool clean, bool clean,
bool remUnicode, bool remUnicode,
bool descAsName, bool descAsName,
ReplaceMode replaceMode, ReplaceMode replaceMode,
bool onlySame) bool onlySame)
{ {
// Normalize the extensions // Normalize the extensions
datHeader.AddExtension = (String.IsNullOrWhiteSpace(datHeader.AddExtension) || datHeader.AddExtension.StartsWith(".") datHeader.AddExtension = (String.IsNullOrWhiteSpace(datHeader.AddExtension) || datHeader.AddExtension.StartsWith(".")
? datHeader.AddExtension ? datHeader.AddExtension
: "." + datHeader.AddExtension); : "." + datHeader.AddExtension);
datHeader.ReplaceExtension = (String.IsNullOrWhiteSpace(datHeader.ReplaceExtension) || datHeader.ReplaceExtension.StartsWith(".") datHeader.ReplaceExtension = (String.IsNullOrWhiteSpace(datHeader.ReplaceExtension) || datHeader.ReplaceExtension.StartsWith(".")
? datHeader.ReplaceExtension ? datHeader.ReplaceExtension
: "." + datHeader.ReplaceExtension); : "." + datHeader.ReplaceExtension);
// If we're in a special update mode and the names aren't set, set defaults // If we're in a special update mode and the names aren't set, set defaults
if (updateMode != 0) if (updateMode != 0)
{ {
// Get the values that will be used // Get the values that will be used
if (String.IsNullOrWhiteSpace(datHeader.Date)) if (String.IsNullOrWhiteSpace(datHeader.Date))
{ {
datHeader.Date = DateTime.Now.ToString("yyyy-MM-dd"); datHeader.Date = DateTime.Now.ToString("yyyy-MM-dd");
} }
if (String.IsNullOrWhiteSpace(datHeader.Name)) if (String.IsNullOrWhiteSpace(datHeader.Name))
{ {
datHeader.Name = (updateMode != 0 ? "DiffDAT" : "MergeDAT") datHeader.Name = (updateMode != 0 ? "DiffDAT" : "MergeDAT")
+ (datHeader.Type == "SuperDAT" ? "-SuperDAT" : "") + (datHeader.Type == "SuperDAT" ? "-SuperDAT" : "")
+ (datHeader.DedupeRoms != DedupeType.None ? "-deduped" : ""); + (datHeader.DedupeRoms != DedupeType.None ? "-deduped" : "");
} }
if (String.IsNullOrWhiteSpace(datHeader.Description)) if (String.IsNullOrWhiteSpace(datHeader.Description))
{ {
datHeader.Description = (updateMode != 0 ? "DiffDAT" : "MergeDAT") datHeader.Description = (updateMode != 0 ? "DiffDAT" : "MergeDAT")
+ (datHeader.Type == "SuperDAT" ? "-SuperDAT" : "") + (datHeader.Type == "SuperDAT" ? "-SuperDAT" : "")
+ (datHeader.DedupeRoms != DedupeType.None ? " - deduped" : ""); + (datHeader.DedupeRoms != DedupeType.None ? " - deduped" : "");
if (!bare) if (!bare)
{ {
datHeader.Description += " (" + datHeader.Date + ")"; datHeader.Description += " (" + datHeader.Date + ")";
} }
} }
if (String.IsNullOrWhiteSpace(datHeader.Category) && updateMode != 0) if (String.IsNullOrWhiteSpace(datHeader.Category) && updateMode != 0)
{ {
datHeader.Category = "DiffDAT"; datHeader.Category = "DiffDAT";
} }
if (String.IsNullOrWhiteSpace(datHeader.Author)) if (String.IsNullOrWhiteSpace(datHeader.Author))
{ {
datHeader.Author = "SabreTools"; datHeader.Author = "SabreTools";
} }
} }
// If no replacement mode is set, default to Names // If no replacement mode is set, default to Names
if (replaceMode == ReplaceMode.None) if (replaceMode == ReplaceMode.None)
{ {
replaceMode = ReplaceMode.ItemName; replaceMode = ReplaceMode.ItemName;
} }
// Populate the DatData object // Populate the DatData object
DatFile userInputDat = new DatFile(datHeader); DatFile userInputDat = new DatFile(datHeader);
userInputDat.DetermineUpdateType(inputPaths, basePaths, outDir, updateMode, inplace, skip, clean, userInputDat.DetermineUpdateType(inputPaths, basePaths, outDir, updateMode, inplace, skip, clean,
remUnicode, descAsName, filter, splitType, replaceMode, onlySame); remUnicode, descAsName, filter, splitType, replaceMode, onlySame);
} }
/// <summary> /// <summary>
/// Wrap verifying files using an input DAT /// Wrap verifying files using an input DAT
/// </summary> /// </summary>
/// <param name="datfiles">Names of the DATs to compare against</param> /// <param name="datfiles">Names of the DATs to compare against</param>
/// <param name="inputs">Input directories to compare against</param> /// <param name="inputs">Input directories to compare against</param>
/// <param name="depot">True if the input direcories are treated as romba depots, false otherwise</param> /// <param name="depot">True if the input direcories are treated as romba depots, false otherwise</param>
/// <param name="hashOnly">True if only hashes should be checked, false for full file information</param> /// <param name="hashOnly">True if only hashes should be checked, false for full file information</param>
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param> /// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param> /// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param> /// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param> /// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
/// <param name="individual">True if DATs should be verified individually, false if they should be done in bulk</param> /// <param name="individual">True if DATs should be verified individually, false if they should be done in bulk</param>
private static void InitVerify( /// <param name="filter">Filter object to be passed to the DatItem level</param>
List<string> datfiles, private static void InitVerify(
List<string> inputs, List<string> datfiles,
bool depot, List<string> inputs,
bool hashOnly, bool depot,
bool quickScan, bool hashOnly,
string headerToCheckAgainst, bool quickScan,
SplitType splitType, string headerToCheckAgainst,
bool chdsAsFiles, SplitType splitType,
bool individual) bool chdsAsFiles,
{ bool individual,
// Get the archive scanning level Filter filter)
ArchiveScanLevel asl = Utilities.GetArchiveScanLevelFromNumbers(1, 1, 1, 1); {
// Get the archive scanning level
ArchiveScanLevel asl = Utilities.GetArchiveScanLevelFromNumbers(1, 1, 1, 1);
// Get a list of files from the input datfiles // Get a list of files from the input datfiles
datfiles = Utilities.GetOnlyFilesFromInputs(datfiles); datfiles = Utilities.GetOnlyFilesFromInputs(datfiles);
// If we are in individual mode, process each DAT on their own // If we are in individual mode, process each DAT on their own
if (individual) if (individual)
{ {
foreach (string datfile in datfiles) foreach (string datfile in datfiles)
{ {
DatFile datdata = new DatFile(); DatFile datdata = new DatFile();
datdata.Parse(datfile, 99, 99, splitType, keep: true, useTags: true); datdata.Parse(datfile, 99, 99, splitType, keep: true, useTags: true);
// If we have the depot flag, respect it // If we have the depot flag, respect it
if (depot) if (depot)
{ {
datdata.VerifyDepot(inputs, headerToCheckAgainst); datdata.VerifyDepot(inputs, headerToCheckAgainst);
} }
else else
{ {
datdata.VerifyGeneric(inputs, hashOnly, quickScan, headerToCheckAgainst, chdsAsFiles); datdata.VerifyGeneric(inputs, hashOnly, quickScan, headerToCheckAgainst, chdsAsFiles, filter);
} }
} }
} }
// Otherwise, process all DATs into the same output // Otherwise, process all DATs into the same output
else else
{ {
InternalStopwatch watch = new InternalStopwatch("Populating internal DAT"); InternalStopwatch watch = new InternalStopwatch("Populating internal DAT");
// Add all of the input DATs into one huge internal DAT // Add all of the input DATs into one huge internal DAT
DatFile datdata = new DatFile(); DatFile datdata = new DatFile();
foreach (string datfile in datfiles) foreach (string datfile in datfiles)
{ {
datdata.Parse(datfile, 99, 99, splitType, keep: true, useTags: true); datdata.Parse(datfile, 99, 99, splitType, keep: true, useTags: true);
} }
watch.Stop(); watch.Stop();
// If we have the depot flag, respect it // If we have the depot flag, respect it
if (depot) if (depot)
{ {
datdata.VerifyDepot(inputs, headerToCheckAgainst); datdata.VerifyDepot(inputs, headerToCheckAgainst);
} }
else else
{ {
datdata.VerifyGeneric(inputs, hashOnly, quickScan, headerToCheckAgainst, chdsAsFiles); datdata.VerifyGeneric(inputs, hashOnly, quickScan, headerToCheckAgainst, chdsAsFiles, filter);
} }
} }
} }
#endregion #endregion
} }
} }

View File

@@ -862,7 +862,7 @@ namespace SabreTools
case "DATFromDir": case "DATFromDir":
VerifyInputs(inputs, feature); VerifyInputs(inputs, feature);
InitDatFromDir(inputs, datHeader, omitFromScan, noAutomaticDate, archivesAsFiles, chdsAsFiles, InitDatFromDir(inputs, datHeader, omitFromScan, noAutomaticDate, archivesAsFiles, chdsAsFiles,
skipFileType, addBlankFiles, addFileDates, tempDir, outDir, copyFiles); skipFileType, addBlankFiles, addFileDates, tempDir, outDir, copyFiles, filter);
break; break;
// If we're in header extract and remove mode // If we're in header extract and remove mode
case "Extract": case "Extract":
@@ -902,7 +902,7 @@ namespace SabreTools
// If we're using the verifier // If we're using the verifier
case "Verify": case "Verify":
VerifyInputs(inputs, feature); VerifyInputs(inputs, feature);
InitVerify(datfiles, inputs, depot, hashOnly, quickScan, datHeader.Header, splitType, chdsAsFiles, individual); InitVerify(datfiles, inputs, depot, hashOnly, quickScan, datHeader.Header, splitType, chdsAsFiles, individual, filter);
break; break;
// If nothing is set, show the help // If nothing is set, show the help
default: default: