mirror of
https://github.com/claunia/SabreTools.git
synced 2025-12-16 19:14:27 +00:00
[ALL] Add filtering support to D2D and Verify
Verify came for free since it uses the D2D code
This commit is contained in:
@@ -66,8 +66,8 @@ namespace RombaSharp
|
||||
foreach (string dir in onlyDirs)
|
||||
{
|
||||
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
|
||||
df.PopulateFromDir(dir, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, null, true);
|
||||
df.PopulateFromDir(dir, Hash.DeepHashes, false, true, SkipFileType.None, false, false, _tmpdir, false, null, true);
|
||||
df.PopulateFromDir(dir, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, null, true, null);
|
||||
df.PopulateFromDir(dir, Hash.DeepHashes, false, true, SkipFileType.None, false, false, _tmpdir, false, null, true, null);
|
||||
}
|
||||
|
||||
// Create an empty Dat for files that need to be rebuilt
|
||||
@@ -390,7 +390,7 @@ namespace RombaSharp
|
||||
// Now run the D2D on the input and write out
|
||||
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
|
||||
datfile.PopulateFromDir(source, Hash.DeepHashes, true /* bare */, false /* archivesAsFiles */, SkipFileType.None, false /* addBlanks */,
|
||||
false /* addDate */, _tmpdir, false /* copyFiles */, null /* headerToCheckAgainst */, true /* chdsAsFiles */);
|
||||
false /* addDate */, _tmpdir, false /* copyFiles */, null /* headerToCheckAgainst */, true /* chdsAsFiles */, null /* filter */);
|
||||
datfile.Write(outDir: outdat);
|
||||
}
|
||||
|
||||
@@ -897,7 +897,7 @@ namespace RombaSharp
|
||||
// First get a list of SHA-1's from the input DATs
|
||||
DatFile datroot = new DatFile { Type = "SuperDAT", };
|
||||
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
|
||||
datroot.PopulateFromDir(_dats, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, null, true);
|
||||
datroot.PopulateFromDir(_dats, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, null, true, null);
|
||||
datroot.BucketBy(SortedBy.SHA1, DedupeType.None);
|
||||
|
||||
// Create a List of dat hashes in the database (SHA-1)
|
||||
@@ -1013,7 +1013,7 @@ namespace RombaSharp
|
||||
// Now rescan the depot itself
|
||||
DatFile depot = new DatFile();
|
||||
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
|
||||
depot.PopulateFromDir(depotname, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, null, true);
|
||||
depot.PopulateFromDir(depotname, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, null, true, null);
|
||||
depot.BucketBy(SortedBy.SHA1, DedupeType.None);
|
||||
|
||||
// Set the base queries to use
|
||||
|
||||
@@ -3277,8 +3277,9 @@ namespace SabreTools.Library.DatFiles
|
||||
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
|
||||
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
|
||||
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||
public bool PopulateFromDir(string basePath, Hash omitFromScan, bool bare, bool archivesAsFiles, SkipFileType skipFileType,
|
||||
bool addBlanks, bool addDate, string tempDir, bool copyFiles, string headerToCheckAgainst, bool chdsAsFiles)
|
||||
bool addBlanks, bool addDate, string tempDir, bool copyFiles, string headerToCheckAgainst, bool chdsAsFiles, Filter filter)
|
||||
{
|
||||
// If the description is defined but not the name, set the name from the description
|
||||
if (String.IsNullOrWhiteSpace(Name) && !String.IsNullOrWhiteSpace(Description))
|
||||
@@ -3379,6 +3380,12 @@ namespace SabreTools.Library.DatFiles
|
||||
Utilities.TryDeleteDirectory(tempDir);
|
||||
}
|
||||
|
||||
// If we have a valid filter, perform the filtering now
|
||||
if (filter != null && filter != default(Filter))
|
||||
{
|
||||
filter.FilterDatFile(this);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -4523,8 +4530,9 @@ namespace SabreTools.Library.DatFiles
|
||||
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
|
||||
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
|
||||
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||
/// <returns>True if verification was a success, false otherwise</returns>
|
||||
public bool VerifyGeneric(List<string> inputs, bool hashOnly, bool quickScan, string headerToCheckAgainst, bool chdsAsFiles)
|
||||
public bool VerifyGeneric(List<string> inputs, bool hashOnly, bool quickScan, string headerToCheckAgainst, bool chdsAsFiles, Filter filter)
|
||||
{
|
||||
// TODO: We want the cross section of what's the folder and what's in the DAT. Right now, it just has what's in the DAT that's not in the folder
|
||||
bool success = true;
|
||||
@@ -4535,7 +4543,7 @@ namespace SabreTools.Library.DatFiles
|
||||
{
|
||||
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
|
||||
PopulateFromDir(input, (quickScan ? Hash.SecureHashes : Hash.DeepHashes) /* omitFromScan */, true /* bare */, false /* archivesAsFiles */,
|
||||
SkipFileType.None, false /* addBlanks */, false /* addDate */, "" /* tempDir */, false /* copyFiles */, headerToCheckAgainst, chdsAsFiles);
|
||||
SkipFileType.None, false /* addBlanks */, false /* addDate */, "" /* tempDir */, false /* copyFiles */, headerToCheckAgainst, chdsAsFiles, filter);
|
||||
}
|
||||
|
||||
// Setup the fixdat
|
||||
|
||||
@@ -330,6 +330,165 @@ Options:
|
||||
compare against the input DATs. This flag forces all CHDs to be
|
||||
treated like regular files.
|
||||
|
||||
-gn=, --game-name= Filter by game name
|
||||
Include only items with this game name in the output. Additionally,
|
||||
the user can specify an exact match or full C#-style regex for
|
||||
pattern matching. Multiple instances of this flag are allowed.
|
||||
|
||||
-ngn=, --not-game-name= Filter by not game name
|
||||
Include only items without this game name in the output.
|
||||
Additionally, the user can specify an exact match or full C#-style
|
||||
regex for pattern matching. Multiple instances of this flag are
|
||||
allowed.
|
||||
|
||||
-gd=, --game-description= Filter by game description
|
||||
Include only items with this game description in the output.
|
||||
Additionally, the user can specify an exact match or full C#-style
|
||||
regex for pattern matching. Multiple instances of this flag are
|
||||
allowed.
|
||||
|
||||
-ngd=, --not-game-description= Filter by not game description
|
||||
Include only items without this game description in the output.
|
||||
Additionally, the user can specify an exact match or full C#-style
|
||||
regex for pattern matching. Multiple instances of this flag are
|
||||
allowed.
|
||||
|
||||
-ofg, --match-of-tags Allow cloneof and romof tags to match game name filters
|
||||
If filter or exclude by game name is used, this flag will allow those
|
||||
filters to be checked against the romof and cloneof tags as well.
|
||||
This can allow for more advanced set-building, especially in
|
||||
arcade-based sets.
|
||||
|
||||
-rn=, --item-name= Filter by item name
|
||||
Include only items with this item name in the output. Additionally,
|
||||
the user can specify an exact match or full C#-style regex for
|
||||
pattern matching. Multiple instances of this flag are allowed.
|
||||
|
||||
-nrn=, --not-item-name= Filter by not item name
|
||||
Include only items without this item name in the output.
|
||||
Additionally, the user can specify an exact match or full C#-style
|
||||
regex for pattern matching. Multiple instances of this flag are
|
||||
allowed.
|
||||
|
||||
-rt=, --item-type= Filter by item type
|
||||
Include only items with this item type in the output. Additionally,
|
||||
the user can specify an exact match or full C#-style regex for
|
||||
pattern matching. Multiple instances of this flag are allowed.
|
||||
|
||||
-nrt=, --not-item-type= Filter by not item type
|
||||
Include only items without this item type in the output.
|
||||
Additionally, the user can specify an exact match or full C#-style
|
||||
regex for pattern matching. Multiple instances of this flag are
|
||||
allowed.
|
||||
|
||||
-sgt=, --greater= Filter by size >=
|
||||
Only include items whose size is greater than or equal to this value
|
||||
in the output DAT. Users can specify either a regular integer number
|
||||
or a number with a standard postfix. e.g. 8kb => 8000 or 8kib => 8192
|
||||
|
||||
-slt=, --less= Filter by size =<
|
||||
Only include items whose size is less than or equal to this value in
|
||||
the output DAT. Users can specify either a regular integer number or
|
||||
a number with a standard postfix. e.g. 8kb => 8000 or 8kib => 8192
|
||||
|
||||
-seq=, --equal= Filter by size ==
|
||||
Only include items of this exact size in the output DAT. Users can
|
||||
specify either a regular integer number or a number with a standard
|
||||
postfix. e.g. 8kb => 8000 or 8kib => 8192
|
||||
|
||||
-crc=, --crc= Filter by CRC hash
|
||||
Include only items with this CRC hash in the output. Additionally,
|
||||
the user can specify an exact match or full C#-style regex for
|
||||
pattern matching. Multiple instances of this flag are allowed.
|
||||
|
||||
-ncrc=, --not-crc= Filter by not CRC hash
|
||||
Include only items without this CRC hash in the output. Additionally,
|
||||
the user can specify an exact match or full C#-style regex for
|
||||
pattern matching. Multiple instances of this flag are allowed.
|
||||
|
||||
-md5=, --md5= Filter by MD5 hash
|
||||
Include only items with this MD5 hash in the output. Additionally,
|
||||
the user can specify an exact match or full C#-style regex for
|
||||
pattern matching. Multiple instances of this flag are allowed.
|
||||
|
||||
-nmd5=, --not-md5= Filter by not MD5 hash
|
||||
Include only items without this MD5 hash in the output. Additionally,
|
||||
the user can specify an exact match or full C#-style regex for
|
||||
pattern matching. Multiple instances of this flag are allowed.
|
||||
|
||||
-sha1=, --sha1= Filter by SHA-1 hash
|
||||
Include only items with this SHA-1 hash in the output. Additionally,
|
||||
the user can specify an exact match or full C#-style regex for
|
||||
pattern matching. Multiple instances of this flag are allowed.
|
||||
|
||||
-nsha1=, --not-sha1= Filter by not SHA-1 hash
|
||||
Include only items without this SHA-1 hash in the output.
|
||||
Additionally, the user can specify an exact match or full C#-style
|
||||
regex for pattern matching. Multiple instances of this flag are
|
||||
allowed.
|
||||
|
||||
-sha256=, --sha256= Filter by SHA-256 hash
|
||||
Include only items with this SHA-256 hash in the output.
|
||||
Additionally, the user can specify an exact match or full C#-style
|
||||
regex for pattern matching. Multiple instances of this flag are
|
||||
allowed.
|
||||
|
||||
-nsha256=, --not-sha256= Filter by not SHA-256 hash
|
||||
Include only items without this SHA-256 hash in the output.
|
||||
Additionally, the user can specify an exact match or full C#-style
|
||||
regex for pattern matching. Multiple instances of this flag are
|
||||
allowed.
|
||||
|
||||
-sha384=, --sha384= Filter by SHA-384 hash
|
||||
Include only items with this SHA-384 hash in the output.
|
||||
Additionally, the user can specify an exact match or full C#-style
|
||||
regex for pattern matching. Multiple instances of this flag are
|
||||
allowed.
|
||||
|
||||
-nsha384=, --not-sha384= Filter by not SHA-384 hash
|
||||
Include only items without this SHA-384 hash in the output.
|
||||
Additionally, the user can specify an exact match or full C#-style
|
||||
regex for pattern matching. Multiple instances of this flag are
|
||||
allowed.
|
||||
|
||||
-sha512=, --sha512= Filter by SHA-512 hash
|
||||
Include only items with this SHA-512 hash in the output.
|
||||
Additionally, the user can specify an exact match or full C#-style
|
||||
regex for pattern matching. Multiple instances of this flag are
|
||||
allowed.
|
||||
|
||||
-nsha512=, --not-sha512= Filter by not SHA-512 hash
|
||||
Include only items without this SHA-512 hash in the output.
|
||||
Additionally, the user can specify an exact match or full C#-style
|
||||
regex for pattern matching. Multiple instances of this flag are
|
||||
allowed.
|
||||
|
||||
-is=, --status= Include only items with a given status
|
||||
Include only items with this item status in the output. Multiple
|
||||
instances of this flag are allowed.
|
||||
Possible values are: None, Good, BadDump, Nodump, Verified
|
||||
|
||||
-nis=, --not-status= Exclude only items with a given status
|
||||
Include only items without this item status in the output. Multiple
|
||||
instances of this flag are allowed.
|
||||
Possible values are: None, Good, BadDump, Nodump, Verified
|
||||
|
||||
-gt=, --game-type= Include only games with a given type
|
||||
Include only items with this game type in the output. Multiple
|
||||
instances of this flag are allowed.
|
||||
Possible values are: None, Bios, Device, Mechanical
|
||||
|
||||
-ngt=, --not-game-type= Exclude only games with a given type
|
||||
Include only items without this game type in the output. Multiple
|
||||
instances of this flag are allowed.
|
||||
Possible values are: None, Bios, Device, Mechanical
|
||||
|
||||
-run, --runnable Include only items that are marked runnable
|
||||
This allows users to include only verified runnable games.
|
||||
|
||||
-nrun, --not-runnable Include only items that are not marked runnable
|
||||
This allows users to include only unrunnable games.
|
||||
|
||||
-t=, --temp= Set the temporary directory to use
|
||||
Optionally, a temp folder can be supplied in the case the default
|
||||
temp directory is not preferred.
|
||||
@@ -1368,6 +1527,165 @@ Options:
|
||||
parent sets based on the cloneof and romof tags as well as device
|
||||
references. This is incompatible with the other --dat-X flags.
|
||||
|
||||
-gn=, --game-name= Filter by game name
|
||||
Include only items with this game name in the output. Additionally,
|
||||
the user can specify an exact match or full C#-style regex for
|
||||
pattern matching. Multiple instances of this flag are allowed.
|
||||
|
||||
-ngn=, --not-game-name= Filter by not game name
|
||||
Include only items without this game name in the output.
|
||||
Additionally, the user can specify an exact match or full C#-style
|
||||
regex for pattern matching. Multiple instances of this flag are
|
||||
allowed.
|
||||
|
||||
-gd=, --game-description= Filter by game description
|
||||
Include only items with this game description in the output.
|
||||
Additionally, the user can specify an exact match or full C#-style
|
||||
regex for pattern matching. Multiple instances of this flag are
|
||||
allowed.
|
||||
|
||||
-ngd=, --not-game-description= Filter by not game description
|
||||
Include only items without this game description in the output.
|
||||
Additionally, the user can specify an exact match or full C#-style
|
||||
regex for pattern matching. Multiple instances of this flag are
|
||||
allowed.
|
||||
|
||||
-ofg, --match-of-tags Allow cloneof and romof tags to match game name filters
|
||||
If filter or exclude by game name is used, this flag will allow those
|
||||
filters to be checked against the romof and cloneof tags as well.
|
||||
This can allow for more advanced set-building, especially in
|
||||
arcade-based sets.
|
||||
|
||||
-rn=, --item-name= Filter by item name
|
||||
Include only items with this item name in the output. Additionally,
|
||||
the user can specify an exact match or full C#-style regex for
|
||||
pattern matching. Multiple instances of this flag are allowed.
|
||||
|
||||
-nrn=, --not-item-name= Filter by not item name
|
||||
Include only items without this item name in the output.
|
||||
Additionally, the user can specify an exact match or full C#-style
|
||||
regex for pattern matching. Multiple instances of this flag are
|
||||
allowed.
|
||||
|
||||
-rt=, --item-type= Filter by item type
|
||||
Include only items with this item type in the output. Additionally,
|
||||
the user can specify an exact match or full C#-style regex for
|
||||
pattern matching. Multiple instances of this flag are allowed.
|
||||
|
||||
-nrt=, --not-item-type= Filter by not item type
|
||||
Include only items without this item type in the output.
|
||||
Additionally, the user can specify an exact match or full C#-style
|
||||
regex for pattern matching. Multiple instances of this flag are
|
||||
allowed.
|
||||
|
||||
-sgt=, --greater= Filter by size >=
|
||||
Only include items whose size is greater than or equal to this value
|
||||
in the output DAT. Users can specify either a regular integer number
|
||||
or a number with a standard postfix. e.g. 8kb => 8000 or 8kib => 8192
|
||||
|
||||
-slt=, --less= Filter by size =<
|
||||
Only include items whose size is less than or equal to this value in
|
||||
the output DAT. Users can specify either a regular integer number or
|
||||
a number with a standard postfix. e.g. 8kb => 8000 or 8kib => 8192
|
||||
|
||||
-seq=, --equal= Filter by size ==
|
||||
Only include items of this exact size in the output DAT. Users can
|
||||
specify either a regular integer number or a number with a standard
|
||||
postfix. e.g. 8kb => 8000 or 8kib => 8192
|
||||
|
||||
-crc=, --crc= Filter by CRC hash
|
||||
Include only items with this CRC hash in the output. Additionally,
|
||||
the user can specify an exact match or full C#-style regex for
|
||||
pattern matching. Multiple instances of this flag are allowed.
|
||||
|
||||
-ncrc=, --not-crc= Filter by not CRC hash
|
||||
Include only items without this CRC hash in the output. Additionally,
|
||||
the user can specify an exact match or full C#-style regex for
|
||||
pattern matching. Multiple instances of this flag are allowed.
|
||||
|
||||
-md5=, --md5= Filter by MD5 hash
|
||||
Include only items with this MD5 hash in the output. Additionally,
|
||||
the user can specify an exact match or full C#-style regex for
|
||||
pattern matching. Multiple instances of this flag are allowed.
|
||||
|
||||
-nmd5=, --not-md5= Filter by not MD5 hash
|
||||
Include only items without this MD5 hash in the output. Additionally,
|
||||
the user can specify an exact match or full C#-style regex for
|
||||
pattern matching. Multiple instances of this flag are allowed.
|
||||
|
||||
-sha1=, --sha1= Filter by SHA-1 hash
|
||||
Include only items with this SHA-1 hash in the output. Additionally,
|
||||
the user can specify an exact match or full C#-style regex for
|
||||
pattern matching. Multiple instances of this flag are allowed.
|
||||
|
||||
-nsha1=, --not-sha1= Filter by not SHA-1 hash
|
||||
Include only items without this SHA-1 hash in the output.
|
||||
Additionally, the user can specify an exact match or full C#-style
|
||||
regex for pattern matching. Multiple instances of this flag are
|
||||
allowed.
|
||||
|
||||
-sha256=, --sha256= Filter by SHA-256 hash
|
||||
Include only items with this SHA-256 hash in the output.
|
||||
Additionally, the user can specify an exact match or full C#-style
|
||||
regex for pattern matching. Multiple instances of this flag are
|
||||
allowed.
|
||||
|
||||
-nsha256=, --not-sha256= Filter by not SHA-256 hash
|
||||
Include only items without this SHA-256 hash in the output.
|
||||
Additionally, the user can specify an exact match or full C#-style
|
||||
regex for pattern matching. Multiple instances of this flag are
|
||||
allowed.
|
||||
|
||||
-sha384=, --sha384= Filter by SHA-384 hash
|
||||
Include only items with this SHA-384 hash in the output.
|
||||
Additionally, the user can specify an exact match or full C#-style
|
||||
regex for pattern matching. Multiple instances of this flag are
|
||||
allowed.
|
||||
|
||||
-nsha384=, --not-sha384= Filter by not SHA-384 hash
|
||||
Include only items without this SHA-384 hash in the output.
|
||||
Additionally, the user can specify an exact match or full C#-style
|
||||
regex for pattern matching. Multiple instances of this flag are
|
||||
allowed.
|
||||
|
||||
-sha512=, --sha512= Filter by SHA-512 hash
|
||||
Include only items with this SHA-512 hash in the output.
|
||||
Additionally, the user can specify an exact match or full C#-style
|
||||
regex for pattern matching. Multiple instances of this flag are
|
||||
allowed.
|
||||
|
||||
-nsha512=, --not-sha512= Filter by not SHA-512 hash
|
||||
Include only items without this SHA-512 hash in the output.
|
||||
Additionally, the user can specify an exact match or full C#-style
|
||||
regex for pattern matching. Multiple instances of this flag are
|
||||
allowed.
|
||||
|
||||
-is=, --status= Include only items with a given status
|
||||
Include only items with this item status in the output. Multiple
|
||||
instances of this flag are allowed.
|
||||
Possible values are: None, Good, BadDump, Nodump, Verified
|
||||
|
||||
-nis=, --not-status= Exclude only items with a given status
|
||||
Include only items without this item status in the output. Multiple
|
||||
instances of this flag are allowed.
|
||||
Possible values are: None, Good, BadDump, Nodump, Verified
|
||||
|
||||
-gt=, --game-type= Include only games with a given type
|
||||
Include only items with this game type in the output. Multiple
|
||||
instances of this flag are allowed.
|
||||
Possible values are: None, Bios, Device, Mechanical
|
||||
|
||||
-ngt=, --not-game-type= Exclude only games with a given type
|
||||
Include only items without this game type in the output. Multiple
|
||||
instances of this flag are allowed.
|
||||
Possible values are: None, Bios, Device, Mechanical
|
||||
|
||||
-run, --runnable Include only items that are marked runnable
|
||||
This allows users to include only verified runnable games.
|
||||
|
||||
-nrun, --not-runnable Include only items that are not marked runnable
|
||||
This allows users to include only unrunnable games.
|
||||
|
||||
** Section 3.0 - Examples
|
||||
|
||||
Here are a few usage examples based on features that are commonly used by most
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,5 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
using SabreTools.Library.Data;
|
||||
using SabreTools.Library.DatFiles;
|
||||
using SabreTools.Library.Tools;
|
||||
@@ -15,444 +14,452 @@ using SearchOption = System.IO.SearchOption;
|
||||
|
||||
namespace SabreTools
|
||||
{
|
||||
public partial class SabreTools
|
||||
{
|
||||
#region Init Methods
|
||||
public partial class SabreTools
|
||||
{
|
||||
#region Init Methods
|
||||
|
||||
/// <summary>
|
||||
/// Wrap creating a DAT file from files or a directory in parallel
|
||||
/// </summary>
|
||||
/// <param name="inputs">List of input filenames</param>
|
||||
/// /* Normal DAT header info */
|
||||
/// <param name="datHeader">All DatHeader info to be used</param>
|
||||
/// /* Standard DFD info */
|
||||
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
|
||||
/// <param name="removeDateFromAutomaticName">True if the date should be omitted from the DAT, false otherwise</param>
|
||||
/// <param name="archivesAsFiles">True if archives should be treated as files, false otherwise</param>
|
||||
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
|
||||
/// <param name="skipFileType">Type of files that should be skipped on scan</param>
|
||||
/// <param name="addBlankFilesForEmptyFolder">True if blank items should be created for empty folders, false otherwise</param>
|
||||
/// <param name="addFileDates">True if dates should be archived for all files, false otherwise</param>
|
||||
/// /* Output DAT info */
|
||||
/// <param name="tempDir">Name of the directory to create a temp folder in (blank is default temp directory)</param>
|
||||
/// <param name="outDir">Name of the directory to output the DAT to (blank is the current directory)</param>
|
||||
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
|
||||
private static void InitDatFromDir(List<string> inputs,
|
||||
/* Normal DAT header info */
|
||||
DatHeader datHeader,
|
||||
/// <summary>
|
||||
/// Wrap creating a DAT file from files or a directory in parallel
|
||||
/// </summary>
|
||||
/// <param name="inputs">List of input filenames</param>
|
||||
/// /* Normal DAT header info */
|
||||
/// <param name="datHeader">All DatHeader info to be used</param>
|
||||
/// /* Standard DFD info */
|
||||
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
|
||||
/// <param name="removeDateFromAutomaticName">True if the date should be omitted from the DAT, false otherwise</param>
|
||||
/// <param name="archivesAsFiles">True if archives should be treated as files, false otherwise</param>
|
||||
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
|
||||
/// <param name="skipFileType">Type of files that should be skipped on scan</param>
|
||||
/// <param name="addBlankFilesForEmptyFolder">True if blank items should be created for empty folders, false otherwise</param>
|
||||
/// <param name="addFileDates">True if dates should be archived for all files, false otherwise</param>
|
||||
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||
/// /* Output DAT info */
|
||||
/// <param name="tempDir">Name of the directory to create a temp folder in (blank is default temp directory)</param>
|
||||
/// <param name="outDir">Name of the directory to output the DAT to (blank is the current directory)</param>
|
||||
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
|
||||
/// /* Filtering info */
|
||||
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||
private static void InitDatFromDir(List<string> inputs,
|
||||
/* Normal DAT header info */
|
||||
DatHeader datHeader,
|
||||
|
||||
/* Standard DFD info */
|
||||
Hash omitFromScan,
|
||||
bool removeDateFromAutomaticName,
|
||||
bool archivesAsFiles,
|
||||
bool chdsAsFiles,
|
||||
SkipFileType skipFileType,
|
||||
bool addBlankFilesForEmptyFolder,
|
||||
bool addFileDates,
|
||||
/* Standard DFD info */
|
||||
Hash omitFromScan,
|
||||
bool removeDateFromAutomaticName,
|
||||
bool archivesAsFiles,
|
||||
bool chdsAsFiles,
|
||||
SkipFileType skipFileType,
|
||||
bool addBlankFilesForEmptyFolder,
|
||||
bool addFileDates,
|
||||
|
||||
/* Output DAT info */
|
||||
string tempDir,
|
||||
string outDir,
|
||||
bool copyFiles)
|
||||
{
|
||||
// Create a new DATFromDir object and process the inputs
|
||||
DatFile basedat = new DatFile(datHeader)
|
||||
{
|
||||
Date = DateTime.Now.ToString("yyyy-MM-dd"),
|
||||
};
|
||||
/* Output DAT info */
|
||||
string tempDir,
|
||||
string outDir,
|
||||
bool copyFiles,
|
||||
|
||||
// For each input directory, create a DAT
|
||||
foreach (string path in inputs)
|
||||
{
|
||||
if (Directory.Exists(path) || File.Exists(path))
|
||||
{
|
||||
// Clone the base Dat for information
|
||||
DatFile datdata = new DatFile(basedat);
|
||||
/* Filtering info */
|
||||
Filter filter)
|
||||
{
|
||||
// Create a new DATFromDir object and process the inputs
|
||||
DatFile basedat = new DatFile(datHeader)
|
||||
{
|
||||
Date = DateTime.Now.ToString("yyyy-MM-dd"),
|
||||
};
|
||||
|
||||
string basePath = Path.GetFullPath(path);
|
||||
bool success = datdata.PopulateFromDir(basePath, omitFromScan, removeDateFromAutomaticName, archivesAsFiles,
|
||||
skipFileType, addBlankFilesForEmptyFolder, addFileDates, tempDir, copyFiles, datHeader.Header, chdsAsFiles);
|
||||
// For each input directory, create a DAT
|
||||
foreach (string path in inputs)
|
||||
{
|
||||
if (Directory.Exists(path) || File.Exists(path))
|
||||
{
|
||||
// Clone the base Dat for information
|
||||
DatFile datdata = new DatFile(basedat);
|
||||
|
||||
// If it was a success, write the DAT out
|
||||
if (success)
|
||||
{
|
||||
datdata.Write(outDir);
|
||||
}
|
||||
string basePath = Path.GetFullPath(path);
|
||||
bool success = datdata.PopulateFromDir(basePath, omitFromScan, removeDateFromAutomaticName, archivesAsFiles,
|
||||
skipFileType, addBlankFilesForEmptyFolder, addFileDates, tempDir, copyFiles, datHeader.Header, chdsAsFiles, filter);
|
||||
|
||||
// Otherwise, show the help
|
||||
else
|
||||
{
|
||||
Console.WriteLine();
|
||||
_help.OutputIndividualFeature("DATFromDir");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// If it was a success, write the DAT out
|
||||
if (success)
|
||||
{
|
||||
datdata.Write(outDir);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Wrap extracting headers
|
||||
/// </summary>
|
||||
/// <param name="inputs">Input file or folder names</param>
|
||||
/// <param name="outDir">Output directory to write new files to, blank defaults to rom folder</param>
|
||||
/// <param name="nostore">True if headers should not be stored in the database, false otherwise</param>
|
||||
private static void InitExtractRemoveHeader(
|
||||
List<string> inputs,
|
||||
string outDir,
|
||||
bool nostore)
|
||||
{
|
||||
// Get only files from the inputs
|
||||
List<string> files = Utilities.GetOnlyFilesFromInputs(inputs);
|
||||
// Otherwise, show the help
|
||||
else
|
||||
{
|
||||
Console.WriteLine();
|
||||
_help.OutputIndividualFeature("DATFromDir");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
foreach (string file in files)
|
||||
{
|
||||
Utilities.DetectSkipperAndTransform(file, outDir, nostore);
|
||||
}
|
||||
}
|
||||
/// <summary>
|
||||
/// Wrap extracting headers
|
||||
/// </summary>
|
||||
/// <param name="inputs">Input file or folder names</param>
|
||||
/// <param name="outDir">Output directory to write new files to, blank defaults to rom folder</param>
|
||||
/// <param name="nostore">True if headers should not be stored in the database, false otherwise</param>
|
||||
private static void InitExtractRemoveHeader(
|
||||
List<string> inputs,
|
||||
string outDir,
|
||||
bool nostore)
|
||||
{
|
||||
// Get only files from the inputs
|
||||
List<string> files = Utilities.GetOnlyFilesFromInputs(inputs);
|
||||
|
||||
/// <summary>
|
||||
/// Wrap replacing headers
|
||||
/// </summary>
|
||||
/// <param name="inputs">Input file or folder names</param>
|
||||
/// <param name="outDir">Output directory to write new files to, blank defaults to rom folder</param>
|
||||
private static void InitReplaceHeader(
|
||||
List<string> inputs,
|
||||
string outDir)
|
||||
{
|
||||
// Get only files from the inputs
|
||||
List<string> files = Utilities.GetOnlyFilesFromInputs(inputs);
|
||||
foreach (string file in files)
|
||||
{
|
||||
Utilities.DetectSkipperAndTransform(file, outDir, nostore);
|
||||
}
|
||||
}
|
||||
|
||||
foreach (string file in files)
|
||||
{
|
||||
Utilities.RestoreHeader(file, outDir);
|
||||
}
|
||||
}
|
||||
/// <summary>
|
||||
/// Wrap replacing headers
|
||||
/// </summary>
|
||||
/// <param name="inputs">Input file or folder names</param>
|
||||
/// <param name="outDir">Output directory to write new files to, blank defaults to rom folder</param>
|
||||
private static void InitReplaceHeader(
|
||||
List<string> inputs,
|
||||
string outDir)
|
||||
{
|
||||
// Get only files from the inputs
|
||||
List<string> files = Utilities.GetOnlyFilesFromInputs(inputs);
|
||||
|
||||
/// <summary>
|
||||
/// Wrap sorting files using an input DAT
|
||||
/// </summary>
|
||||
/// <param name="datfiles">Names of the DATs to compare against</param>
|
||||
/// <param name="inputs">List of input files/folders to check</param>
|
||||
/// <param name="outDir">Output directory to use to build to</param>
|
||||
/// <param name="depot">True if the input direcories are treated as romba depots, false otherwise</param>
|
||||
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
|
||||
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
|
||||
/// <param name="delete">True if input files should be deleted, false otherwise</param>
|
||||
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
|
||||
/// <param name="outputFormat">Output format that files should be written to</param>
|
||||
/// <param name="romba">True if files should be output in Romba depot folders, false otherwise</param>
|
||||
/// <param name="sevenzip">Integer representing the archive handling level for 7z</param>
|
||||
/// <param name="gz">Integer representing the archive handling level for GZip</param>
|
||||
/// <param name="rar">Integer representing the archive handling level for RAR</param>
|
||||
/// <param name="zip">Integer representing the archive handling level for Zip</param>
|
||||
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
|
||||
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
|
||||
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
|
||||
/// <param name="individual">True if DATs should be sorted individually, false if they should be done in bulk</param>
|
||||
private static void InitSort(
|
||||
List<string> datfiles,
|
||||
List<string> inputs,
|
||||
string outDir,
|
||||
bool depot,
|
||||
bool quickScan,
|
||||
bool date,
|
||||
bool delete,
|
||||
bool inverse,
|
||||
OutputFormat outputFormat,
|
||||
bool romba,
|
||||
int sevenzip,
|
||||
int gz,
|
||||
int rar,
|
||||
int zip,
|
||||
bool updateDat,
|
||||
string headerToCheckAgainst,
|
||||
SplitType splitType,
|
||||
bool chdsAsFiles,
|
||||
bool individual)
|
||||
{
|
||||
// Get the archive scanning level
|
||||
ArchiveScanLevel asl = Utilities.GetArchiveScanLevelFromNumbers(sevenzip, gz, rar, zip);
|
||||
foreach (string file in files)
|
||||
{
|
||||
Utilities.RestoreHeader(file, outDir);
|
||||
}
|
||||
}
|
||||
|
||||
// Get a list of files from the input datfiles
|
||||
datfiles = Utilities.GetOnlyFilesFromInputs(datfiles);
|
||||
/// <summary>
|
||||
/// Wrap sorting files using an input DAT
|
||||
/// </summary>
|
||||
/// <param name="datfiles">Names of the DATs to compare against</param>
|
||||
/// <param name="inputs">List of input files/folders to check</param>
|
||||
/// <param name="outDir">Output directory to use to build to</param>
|
||||
/// <param name="depot">True if the input direcories are treated as romba depots, false otherwise</param>
|
||||
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
|
||||
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
|
||||
/// <param name="delete">True if input files should be deleted, false otherwise</param>
|
||||
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
|
||||
/// <param name="outputFormat">Output format that files should be written to</param>
|
||||
/// <param name="romba">True if files should be output in Romba depot folders, false otherwise</param>
|
||||
/// <param name="sevenzip">Integer representing the archive handling level for 7z</param>
|
||||
/// <param name="gz">Integer representing the archive handling level for GZip</param>
|
||||
/// <param name="rar">Integer representing the archive handling level for RAR</param>
|
||||
/// <param name="zip">Integer representing the archive handling level for Zip</param>
|
||||
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
|
||||
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
|
||||
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
|
||||
/// <param name="individual">True if DATs should be sorted individually, false if they should be done in bulk</param>
|
||||
private static void InitSort(
|
||||
List<string> datfiles,
|
||||
List<string> inputs,
|
||||
string outDir,
|
||||
bool depot,
|
||||
bool quickScan,
|
||||
bool date,
|
||||
bool delete,
|
||||
bool inverse,
|
||||
OutputFormat outputFormat,
|
||||
bool romba,
|
||||
int sevenzip,
|
||||
int gz,
|
||||
int rar,
|
||||
int zip,
|
||||
bool updateDat,
|
||||
string headerToCheckAgainst,
|
||||
SplitType splitType,
|
||||
bool chdsAsFiles,
|
||||
bool individual)
|
||||
{
|
||||
// Get the archive scanning level
|
||||
ArchiveScanLevel asl = Utilities.GetArchiveScanLevelFromNumbers(sevenzip, gz, rar, zip);
|
||||
|
||||
// If we are in individual mode, process each DAT on their own, appending the DAT name to the output dir
|
||||
if (individual)
|
||||
{
|
||||
foreach (string datfile in datfiles)
|
||||
{
|
||||
DatFile datdata = new DatFile();
|
||||
datdata.Parse(datfile, 99, 99, splitType, keep: true, useTags: true);
|
||||
// Get a list of files from the input datfiles
|
||||
datfiles = Utilities.GetOnlyFilesFromInputs(datfiles);
|
||||
|
||||
// If we have the depot flag, respect it
|
||||
if (depot)
|
||||
{
|
||||
datdata.RebuildDepot(inputs, Path.Combine(outDir, datdata.FileName), date, delete, inverse, outputFormat, romba,
|
||||
updateDat, headerToCheckAgainst);
|
||||
}
|
||||
else
|
||||
{
|
||||
datdata.RebuildGeneric(inputs, Path.Combine(outDir, datdata.FileName), quickScan, date, delete, inverse, outputFormat, romba, asl,
|
||||
updateDat, headerToCheckAgainst, chdsAsFiles);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Otherwise, process all DATs into the same output
|
||||
else
|
||||
{
|
||||
InternalStopwatch watch = new InternalStopwatch("Populating internal DAT");
|
||||
// If we are in individual mode, process each DAT on their own, appending the DAT name to the output dir
|
||||
if (individual)
|
||||
{
|
||||
foreach (string datfile in datfiles)
|
||||
{
|
||||
DatFile datdata = new DatFile();
|
||||
datdata.Parse(datfile, 99, 99, splitType, keep: true, useTags: true);
|
||||
|
||||
// Add all of the input DATs into one huge internal DAT
|
||||
DatFile datdata = new DatFile();
|
||||
foreach (string datfile in datfiles)
|
||||
{
|
||||
datdata.Parse(datfile, 99, 99, splitType, keep: true, useTags: true);
|
||||
}
|
||||
// If we have the depot flag, respect it
|
||||
if (depot)
|
||||
{
|
||||
datdata.RebuildDepot(inputs, Path.Combine(outDir, datdata.FileName), date, delete, inverse, outputFormat, romba,
|
||||
updateDat, headerToCheckAgainst);
|
||||
}
|
||||
else
|
||||
{
|
||||
datdata.RebuildGeneric(inputs, Path.Combine(outDir, datdata.FileName), quickScan, date, delete, inverse, outputFormat, romba, asl,
|
||||
updateDat, headerToCheckAgainst, chdsAsFiles);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Otherwise, process all DATs into the same output
|
||||
else
|
||||
{
|
||||
InternalStopwatch watch = new InternalStopwatch("Populating internal DAT");
|
||||
|
||||
watch.Stop();
|
||||
// Add all of the input DATs into one huge internal DAT
|
||||
DatFile datdata = new DatFile();
|
||||
foreach (string datfile in datfiles)
|
||||
{
|
||||
datdata.Parse(datfile, 99, 99, splitType, keep: true, useTags: true);
|
||||
}
|
||||
|
||||
// If we have the depot flag, respect it
|
||||
if (depot)
|
||||
{
|
||||
datdata.RebuildDepot(inputs, outDir, date, delete, inverse, outputFormat, romba,
|
||||
updateDat, headerToCheckAgainst);
|
||||
}
|
||||
else
|
||||
{
|
||||
datdata.RebuildGeneric(inputs, outDir, quickScan, date, delete, inverse, outputFormat, romba, asl,
|
||||
updateDat, headerToCheckAgainst, chdsAsFiles);
|
||||
}
|
||||
}
|
||||
}
|
||||
watch.Stop();
|
||||
|
||||
/// <summary>
|
||||
/// Wrap splitting a DAT by any known type
|
||||
/// </summary>
|
||||
/// <param name="inputs">List of inputs to be used</param>
|
||||
/// <param name="outDir">Output directory for the split files</param>
|
||||
/// <param name="inplace">True if files should be written to the source folders, false otherwise</param>
|
||||
/// <param name="datFormat">DatFormat to be used for outputting the DAT</param>
|
||||
/// <param name="splittingMode">Type of split to perform, if any</param>
|
||||
/// <param name="exta">First extension to split on (Extension Split only)</param>
|
||||
/// <param name="extb">Second extension to split on (Extension Split only)</param>
|
||||
/// <param name="shortname">True if short filenames should be used, false otherwise (Level Split only)</param>
|
||||
/// <param name="basedat">True if original filenames should be used as the base for output filename, false otherwise (Level Split only)</param>
|
||||
/// <param name="radix">Long value representing the split point (Size Split only)</param>
|
||||
private static void InitSplit(
|
||||
List<string> inputs,
|
||||
string outDir,
|
||||
bool inplace,
|
||||
DatFormat datFormat,
|
||||
SplittingMode splittingMode,
|
||||
List<string> exta,
|
||||
List<string> extb,
|
||||
bool shortname,
|
||||
bool basedat,
|
||||
long radix)
|
||||
{
|
||||
DatFile datfile = new DatFile();
|
||||
datfile.DatFormat = datFormat;
|
||||
datfile.DetermineSplitType(inputs, outDir, inplace, splittingMode, exta, extb, shortname, basedat, radix);
|
||||
}
|
||||
// If we have the depot flag, respect it
|
||||
if (depot)
|
||||
{
|
||||
datdata.RebuildDepot(inputs, outDir, date, delete, inverse, outputFormat, romba,
|
||||
updateDat, headerToCheckAgainst);
|
||||
}
|
||||
else
|
||||
{
|
||||
datdata.RebuildGeneric(inputs, outDir, quickScan, date, delete, inverse, outputFormat, romba, asl,
|
||||
updateDat, headerToCheckAgainst, chdsAsFiles);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Wrap getting statistics on a DAT or folder of DATs
|
||||
/// </summary>
|
||||
/// <param name="inputs">List of inputs to be used</param>
|
||||
/// <param name="filename">Name of the file to output to, blank for default</param>
|
||||
/// <param name="outDir">Output directory for the report files</param>
|
||||
/// <param name="single">True to show individual DAT statistics, false otherwise</param>
|
||||
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
|
||||
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
|
||||
/// <param name="statDatFormat">Set the statistics output format to use</param>
|
||||
private static void InitStats(
|
||||
List<string> inputs,
|
||||
string filename,
|
||||
string outDir,
|
||||
bool single,
|
||||
bool baddumpCol,
|
||||
bool nodumpCol,
|
||||
StatReportFormat statDatFormat)
|
||||
{
|
||||
DatFile.OutputStats(inputs, filename, outDir, single, baddumpCol, nodumpCol, statDatFormat);
|
||||
}
|
||||
/// <summary>
|
||||
/// Wrap splitting a DAT by any known type
|
||||
/// </summary>
|
||||
/// <param name="inputs">List of inputs to be used</param>
|
||||
/// <param name="outDir">Output directory for the split files</param>
|
||||
/// <param name="inplace">True if files should be written to the source folders, false otherwise</param>
|
||||
/// <param name="datFormat">DatFormat to be used for outputting the DAT</param>
|
||||
/// <param name="splittingMode">Type of split to perform, if any</param>
|
||||
/// <param name="exta">First extension to split on (Extension Split only)</param>
|
||||
/// <param name="extb">Second extension to split on (Extension Split only)</param>
|
||||
/// <param name="shortname">True if short filenames should be used, false otherwise (Level Split only)</param>
|
||||
/// <param name="basedat">True if original filenames should be used as the base for output filename, false otherwise (Level Split only)</param>
|
||||
/// <param name="radix">Long value representing the split point (Size Split only)</param>
|
||||
private static void InitSplit(
|
||||
List<string> inputs,
|
||||
string outDir,
|
||||
bool inplace,
|
||||
DatFormat datFormat,
|
||||
SplittingMode splittingMode,
|
||||
List<string> exta,
|
||||
List<string> extb,
|
||||
bool shortname,
|
||||
bool basedat,
|
||||
long radix)
|
||||
{
|
||||
DatFile datfile = new DatFile();
|
||||
datfile.DatFormat = datFormat;
|
||||
datfile.DetermineSplitType(inputs, outDir, inplace, splittingMode, exta, extb, shortname, basedat, radix);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Wrap converting and updating DAT file from any format to any format
|
||||
/// </summary>
|
||||
/// <param name="inputPaths">List of input filenames</param>
|
||||
/// <param name="basePaths">List of base filenames</param>
|
||||
/// /* Normal DAT header info */
|
||||
/// <param name="datHeader">All DatHeader info to be used</param>
|
||||
/// /* Merging and Diffing info */
|
||||
/// <param name="updateMode">Non-zero flag for diffing mode, zero otherwise</param>
|
||||
/// <param name="inplace">True if the cascade-diffed files should overwrite their inputs, false otherwise</param>
|
||||
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
|
||||
/// <param name="bare">True if the date should not be appended to the default name, false otherwise</param>
|
||||
/// /* Filtering info */
|
||||
/// <param name="filter">Pre-populated filter object for DAT filtering</param>
|
||||
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
|
||||
/// /* Output DAT info */
|
||||
/// <param name="outDir">Optional param for output directory</param>
|
||||
/// <param name="clean">True to clean the game names to WoD standard, false otherwise (default)</param>
|
||||
/// <param name="remUnicode">True if we should remove non-ASCII characters from output, false otherwise (default)</param>
|
||||
/// <param name="descAsName">True if descriptions should be used as names, false otherwise (default)</param>
|
||||
/// <param name="replaceMode">ReplaceMode representing what should be updated [only for base replacement]</param>
|
||||
/// <param name="onlySame">True if descriptions should only be replaced if the game name is the same, false otherwise [only for base replacement]</param>
|
||||
private static void InitUpdate(
|
||||
List<string> inputPaths,
|
||||
List<string> basePaths,
|
||||
/// <summary>
|
||||
/// Wrap getting statistics on a DAT or folder of DATs
|
||||
/// </summary>
|
||||
/// <param name="inputs">List of inputs to be used</param>
|
||||
/// <param name="filename">Name of the file to output to, blank for default</param>
|
||||
/// <param name="outDir">Output directory for the report files</param>
|
||||
/// <param name="single">True to show individual DAT statistics, false otherwise</param>
|
||||
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
|
||||
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
|
||||
/// <param name="statDatFormat">Set the statistics output format to use</param>
|
||||
private static void InitStats(
|
||||
List<string> inputs,
|
||||
string filename,
|
||||
string outDir,
|
||||
bool single,
|
||||
bool baddumpCol,
|
||||
bool nodumpCol,
|
||||
StatReportFormat statDatFormat)
|
||||
{
|
||||
DatFile.OutputStats(inputs, filename, outDir, single, baddumpCol, nodumpCol, statDatFormat);
|
||||
}
|
||||
|
||||
/* Normal DAT header info */
|
||||
DatHeader datHeader,
|
||||
/// <summary>
|
||||
/// Wrap converting and updating DAT file from any format to any format
|
||||
/// </summary>
|
||||
/// <param name="inputPaths">List of input filenames</param>
|
||||
/// <param name="basePaths">List of base filenames</param>
|
||||
/// /* Normal DAT header info */
|
||||
/// <param name="datHeader">All DatHeader info to be used</param>
|
||||
/// /* Merging and Diffing info */
|
||||
/// <param name="updateMode">Non-zero flag for diffing mode, zero otherwise</param>
|
||||
/// <param name="inplace">True if the cascade-diffed files should overwrite their inputs, false otherwise</param>
|
||||
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
|
||||
/// <param name="bare">True if the date should not be appended to the default name, false otherwise</param>
|
||||
/// /* Filtering info */
|
||||
/// <param name="filter">Pre-populated filter object for DAT filtering</param>
|
||||
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
|
||||
/// /* Output DAT info */
|
||||
/// <param name="outDir">Optional param for output directory</param>
|
||||
/// <param name="clean">True to clean the game names to WoD standard, false otherwise (default)</param>
|
||||
/// <param name="remUnicode">True if we should remove non-ASCII characters from output, false otherwise (default)</param>
|
||||
/// <param name="descAsName">True if descriptions should be used as names, false otherwise (default)</param>
|
||||
/// <param name="replaceMode">ReplaceMode representing what should be updated [only for base replacement]</param>
|
||||
/// <param name="onlySame">True if descriptions should only be replaced if the game name is the same, false otherwise [only for base replacement]</param>
|
||||
private static void InitUpdate(
|
||||
List<string> inputPaths,
|
||||
List<string> basePaths,
|
||||
|
||||
/* Merging and Diffing info */
|
||||
UpdateMode updateMode,
|
||||
bool inplace,
|
||||
bool skip,
|
||||
bool bare,
|
||||
/* Normal DAT header info */
|
||||
DatHeader datHeader,
|
||||
|
||||
/* Filtering info */
|
||||
Filter filter,
|
||||
SplitType splitType,
|
||||
/* Merging and Diffing info */
|
||||
UpdateMode updateMode,
|
||||
bool inplace,
|
||||
bool skip,
|
||||
bool bare,
|
||||
|
||||
/* Output DAT info */
|
||||
string outDir,
|
||||
bool clean,
|
||||
bool remUnicode,
|
||||
bool descAsName,
|
||||
ReplaceMode replaceMode,
|
||||
bool onlySame)
|
||||
{
|
||||
// Normalize the extensions
|
||||
datHeader.AddExtension = (String.IsNullOrWhiteSpace(datHeader.AddExtension) || datHeader.AddExtension.StartsWith(".")
|
||||
? datHeader.AddExtension
|
||||
: "." + datHeader.AddExtension);
|
||||
datHeader.ReplaceExtension = (String.IsNullOrWhiteSpace(datHeader.ReplaceExtension) || datHeader.ReplaceExtension.StartsWith(".")
|
||||
? datHeader.ReplaceExtension
|
||||
: "." + datHeader.ReplaceExtension);
|
||||
/* Filtering info */
|
||||
Filter filter,
|
||||
SplitType splitType,
|
||||
|
||||
// If we're in a special update mode and the names aren't set, set defaults
|
||||
if (updateMode != 0)
|
||||
{
|
||||
// Get the values that will be used
|
||||
if (String.IsNullOrWhiteSpace(datHeader.Date))
|
||||
{
|
||||
datHeader.Date = DateTime.Now.ToString("yyyy-MM-dd");
|
||||
}
|
||||
if (String.IsNullOrWhiteSpace(datHeader.Name))
|
||||
{
|
||||
datHeader.Name = (updateMode != 0 ? "DiffDAT" : "MergeDAT")
|
||||
+ (datHeader.Type == "SuperDAT" ? "-SuperDAT" : "")
|
||||
+ (datHeader.DedupeRoms != DedupeType.None ? "-deduped" : "");
|
||||
}
|
||||
if (String.IsNullOrWhiteSpace(datHeader.Description))
|
||||
{
|
||||
datHeader.Description = (updateMode != 0 ? "DiffDAT" : "MergeDAT")
|
||||
+ (datHeader.Type == "SuperDAT" ? "-SuperDAT" : "")
|
||||
+ (datHeader.DedupeRoms != DedupeType.None ? " - deduped" : "");
|
||||
if (!bare)
|
||||
{
|
||||
datHeader.Description += " (" + datHeader.Date + ")";
|
||||
}
|
||||
}
|
||||
if (String.IsNullOrWhiteSpace(datHeader.Category) && updateMode != 0)
|
||||
{
|
||||
datHeader.Category = "DiffDAT";
|
||||
}
|
||||
if (String.IsNullOrWhiteSpace(datHeader.Author))
|
||||
{
|
||||
datHeader.Author = "SabreTools";
|
||||
}
|
||||
}
|
||||
/* Output DAT info */
|
||||
string outDir,
|
||||
bool clean,
|
||||
bool remUnicode,
|
||||
bool descAsName,
|
||||
ReplaceMode replaceMode,
|
||||
bool onlySame)
|
||||
{
|
||||
// Normalize the extensions
|
||||
datHeader.AddExtension = (String.IsNullOrWhiteSpace(datHeader.AddExtension) || datHeader.AddExtension.StartsWith(".")
|
||||
? datHeader.AddExtension
|
||||
: "." + datHeader.AddExtension);
|
||||
datHeader.ReplaceExtension = (String.IsNullOrWhiteSpace(datHeader.ReplaceExtension) || datHeader.ReplaceExtension.StartsWith(".")
|
||||
? datHeader.ReplaceExtension
|
||||
: "." + datHeader.ReplaceExtension);
|
||||
|
||||
// If no replacement mode is set, default to Names
|
||||
if (replaceMode == ReplaceMode.None)
|
||||
{
|
||||
replaceMode = ReplaceMode.ItemName;
|
||||
}
|
||||
// If we're in a special update mode and the names aren't set, set defaults
|
||||
if (updateMode != 0)
|
||||
{
|
||||
// Get the values that will be used
|
||||
if (String.IsNullOrWhiteSpace(datHeader.Date))
|
||||
{
|
||||
datHeader.Date = DateTime.Now.ToString("yyyy-MM-dd");
|
||||
}
|
||||
if (String.IsNullOrWhiteSpace(datHeader.Name))
|
||||
{
|
||||
datHeader.Name = (updateMode != 0 ? "DiffDAT" : "MergeDAT")
|
||||
+ (datHeader.Type == "SuperDAT" ? "-SuperDAT" : "")
|
||||
+ (datHeader.DedupeRoms != DedupeType.None ? "-deduped" : "");
|
||||
}
|
||||
if (String.IsNullOrWhiteSpace(datHeader.Description))
|
||||
{
|
||||
datHeader.Description = (updateMode != 0 ? "DiffDAT" : "MergeDAT")
|
||||
+ (datHeader.Type == "SuperDAT" ? "-SuperDAT" : "")
|
||||
+ (datHeader.DedupeRoms != DedupeType.None ? " - deduped" : "");
|
||||
if (!bare)
|
||||
{
|
||||
datHeader.Description += " (" + datHeader.Date + ")";
|
||||
}
|
||||
}
|
||||
if (String.IsNullOrWhiteSpace(datHeader.Category) && updateMode != 0)
|
||||
{
|
||||
datHeader.Category = "DiffDAT";
|
||||
}
|
||||
if (String.IsNullOrWhiteSpace(datHeader.Author))
|
||||
{
|
||||
datHeader.Author = "SabreTools";
|
||||
}
|
||||
}
|
||||
|
||||
// Populate the DatData object
|
||||
DatFile userInputDat = new DatFile(datHeader);
|
||||
// If no replacement mode is set, default to Names
|
||||
if (replaceMode == ReplaceMode.None)
|
||||
{
|
||||
replaceMode = ReplaceMode.ItemName;
|
||||
}
|
||||
|
||||
userInputDat.DetermineUpdateType(inputPaths, basePaths, outDir, updateMode, inplace, skip, clean,
|
||||
remUnicode, descAsName, filter, splitType, replaceMode, onlySame);
|
||||
}
|
||||
// Populate the DatData object
|
||||
DatFile userInputDat = new DatFile(datHeader);
|
||||
|
||||
/// <summary>
|
||||
/// Wrap verifying files using an input DAT
|
||||
/// </summary>
|
||||
/// <param name="datfiles">Names of the DATs to compare against</param>
|
||||
/// <param name="inputs">Input directories to compare against</param>
|
||||
/// <param name="depot">True if the input direcories are treated as romba depots, false otherwise</param>
|
||||
/// <param name="hashOnly">True if only hashes should be checked, false for full file information</param>
|
||||
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
|
||||
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
|
||||
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
|
||||
/// <param name="individual">True if DATs should be verified individually, false if they should be done in bulk</param>
|
||||
private static void InitVerify(
|
||||
List<string> datfiles,
|
||||
List<string> inputs,
|
||||
bool depot,
|
||||
bool hashOnly,
|
||||
bool quickScan,
|
||||
string headerToCheckAgainst,
|
||||
SplitType splitType,
|
||||
bool chdsAsFiles,
|
||||
bool individual)
|
||||
{
|
||||
// Get the archive scanning level
|
||||
ArchiveScanLevel asl = Utilities.GetArchiveScanLevelFromNumbers(1, 1, 1, 1);
|
||||
userInputDat.DetermineUpdateType(inputPaths, basePaths, outDir, updateMode, inplace, skip, clean,
|
||||
remUnicode, descAsName, filter, splitType, replaceMode, onlySame);
|
||||
}
|
||||
|
||||
// Get a list of files from the input datfiles
|
||||
datfiles = Utilities.GetOnlyFilesFromInputs(datfiles);
|
||||
/// <summary>
|
||||
/// Wrap verifying files using an input DAT
|
||||
/// </summary>
|
||||
/// <param name="datfiles">Names of the DATs to compare against</param>
|
||||
/// <param name="inputs">Input directories to compare against</param>
|
||||
/// <param name="depot">True if the input direcories are treated as romba depots, false otherwise</param>
|
||||
/// <param name="hashOnly">True if only hashes should be checked, false for full file information</param>
|
||||
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
|
||||
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
|
||||
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
|
||||
/// <param name="individual">True if DATs should be verified individually, false if they should be done in bulk</param>
|
||||
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||
private static void InitVerify(
|
||||
List<string> datfiles,
|
||||
List<string> inputs,
|
||||
bool depot,
|
||||
bool hashOnly,
|
||||
bool quickScan,
|
||||
string headerToCheckAgainst,
|
||||
SplitType splitType,
|
||||
bool chdsAsFiles,
|
||||
bool individual,
|
||||
Filter filter)
|
||||
{
|
||||
// Get the archive scanning level
|
||||
ArchiveScanLevel asl = Utilities.GetArchiveScanLevelFromNumbers(1, 1, 1, 1);
|
||||
|
||||
// If we are in individual mode, process each DAT on their own
|
||||
if (individual)
|
||||
{
|
||||
foreach (string datfile in datfiles)
|
||||
{
|
||||
DatFile datdata = new DatFile();
|
||||
datdata.Parse(datfile, 99, 99, splitType, keep: true, useTags: true);
|
||||
// Get a list of files from the input datfiles
|
||||
datfiles = Utilities.GetOnlyFilesFromInputs(datfiles);
|
||||
|
||||
// If we have the depot flag, respect it
|
||||
if (depot)
|
||||
{
|
||||
datdata.VerifyDepot(inputs, headerToCheckAgainst);
|
||||
}
|
||||
else
|
||||
{
|
||||
datdata.VerifyGeneric(inputs, hashOnly, quickScan, headerToCheckAgainst, chdsAsFiles);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Otherwise, process all DATs into the same output
|
||||
else
|
||||
{
|
||||
InternalStopwatch watch = new InternalStopwatch("Populating internal DAT");
|
||||
// If we are in individual mode, process each DAT on their own
|
||||
if (individual)
|
||||
{
|
||||
foreach (string datfile in datfiles)
|
||||
{
|
||||
DatFile datdata = new DatFile();
|
||||
datdata.Parse(datfile, 99, 99, splitType, keep: true, useTags: true);
|
||||
|
||||
// Add all of the input DATs into one huge internal DAT
|
||||
DatFile datdata = new DatFile();
|
||||
foreach (string datfile in datfiles)
|
||||
{
|
||||
datdata.Parse(datfile, 99, 99, splitType, keep: true, useTags: true);
|
||||
}
|
||||
// If we have the depot flag, respect it
|
||||
if (depot)
|
||||
{
|
||||
datdata.VerifyDepot(inputs, headerToCheckAgainst);
|
||||
}
|
||||
else
|
||||
{
|
||||
datdata.VerifyGeneric(inputs, hashOnly, quickScan, headerToCheckAgainst, chdsAsFiles, filter);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Otherwise, process all DATs into the same output
|
||||
else
|
||||
{
|
||||
InternalStopwatch watch = new InternalStopwatch("Populating internal DAT");
|
||||
|
||||
watch.Stop();
|
||||
// Add all of the input DATs into one huge internal DAT
|
||||
DatFile datdata = new DatFile();
|
||||
foreach (string datfile in datfiles)
|
||||
{
|
||||
datdata.Parse(datfile, 99, 99, splitType, keep: true, useTags: true);
|
||||
}
|
||||
|
||||
// If we have the depot flag, respect it
|
||||
if (depot)
|
||||
{
|
||||
datdata.VerifyDepot(inputs, headerToCheckAgainst);
|
||||
}
|
||||
else
|
||||
{
|
||||
datdata.VerifyGeneric(inputs, hashOnly, quickScan, headerToCheckAgainst, chdsAsFiles);
|
||||
}
|
||||
}
|
||||
}
|
||||
watch.Stop();
|
||||
|
||||
#endregion
|
||||
}
|
||||
// If we have the depot flag, respect it
|
||||
if (depot)
|
||||
{
|
||||
datdata.VerifyDepot(inputs, headerToCheckAgainst);
|
||||
}
|
||||
else
|
||||
{
|
||||
datdata.VerifyGeneric(inputs, hashOnly, quickScan, headerToCheckAgainst, chdsAsFiles, filter);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
|
||||
@@ -862,7 +862,7 @@ namespace SabreTools
|
||||
case "DATFromDir":
|
||||
VerifyInputs(inputs, feature);
|
||||
InitDatFromDir(inputs, datHeader, omitFromScan, noAutomaticDate, archivesAsFiles, chdsAsFiles,
|
||||
skipFileType, addBlankFiles, addFileDates, tempDir, outDir, copyFiles);
|
||||
skipFileType, addBlankFiles, addFileDates, tempDir, outDir, copyFiles, filter);
|
||||
break;
|
||||
// If we're in header extract and remove mode
|
||||
case "Extract":
|
||||
@@ -902,7 +902,7 @@ namespace SabreTools
|
||||
// If we're using the verifier
|
||||
case "Verify":
|
||||
VerifyInputs(inputs, feature);
|
||||
InitVerify(datfiles, inputs, depot, hashOnly, quickScan, datHeader.Header, splitType, chdsAsFiles, individual);
|
||||
InitVerify(datfiles, inputs, depot, hashOnly, quickScan, datHeader.Header, splitType, chdsAsFiles, individual, filter);
|
||||
break;
|
||||
// If nothing is set, show the help
|
||||
default:
|
||||
|
||||
Reference in New Issue
Block a user