HeaderSkipper cleanup

This commit is contained in:
Matt Nadareski
2020-08-02 12:54:27 -07:00
parent ec524a63ca
commit b324adbdbb
15 changed files with 51 additions and 70 deletions

View File

@@ -66,8 +66,8 @@ have a current entry in the DAT index.";
foreach (string dir in onlyDirs)
{
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
df.PopulateFromDir(dir, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, null, true, null);
df.PopulateFromDir(dir, Hash.DeepHashes, false, true, SkipFileType.None, false, false, _tmpdir, false, null, true, null);
df.PopulateFromDir(dir, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, true, null);
df.PopulateFromDir(dir, Hash.DeepHashes, false, true, SkipFileType.None, false, false, _tmpdir, false, true, null);
}
// Create an empty Dat for files that need to be rebuilt
@@ -192,7 +192,7 @@ have a current entry in the DAT index.";
// Create the sorting object to use and rebuild the needed files
need.RebuildGeneric(onlyDirs, _depots.Keys.ToList()[0], false /*quickScan*/, false /*date*/,
false /*delete*/, false /*inverse*/, OutputFormat.TorrentGzipRomba, false /*updateDat*/,
null /*headerToCheckAgainst*/, true /* chdsAsFiles */);
true /* chdsAsFiles */);
}
}
}

View File

@@ -62,7 +62,7 @@ structure according to the original DAT master directory tree structure.";
// Now scan all of those depots and rebuild
datFile.RebuildDepot(onlineDepots, outputFolder, false /*date*/,
false /*delete*/, false /*inverse*/, (copy ? OutputFormat.TorrentGzipRomba : OutputFormat.TorrentZip),
false /*updateDat*/, null /*headerToCheckAgainst*/);
false /*updateDat*/);
}
}
}

View File

@@ -55,7 +55,7 @@ namespace RombaSharp.Features
// Now run the D2D on the input and write out
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
datfile.PopulateFromDir(source, Hash.DeepHashes, true /* bare */, false /* archivesAsFiles */, SkipFileType.None, false /* addBlanks */,
false /* addDate */, _tmpdir, false /* copyFiles */, null /* headerToCheckAgainst */, true /* chdsAsFiles */, null /* filter */);
false /* addDate */, _tmpdir, false /* copyFiles */, true /* chdsAsFiles */, null /* filter */);
datfile.Write(outDir: outdat);
}
}

View File

@@ -64,7 +64,7 @@ contents of any changed dats.";
datroot.Header.Type = "SuperDAT";
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
datroot.PopulateFromDir(_dats, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, null, true, null);
datroot.PopulateFromDir(_dats, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, true, null);
datroot.Items.BucketBy(BucketedBy.SHA1, DedupeType.None);
// Create a List of dat hashes in the database (SHA-1)

View File

@@ -66,7 +66,7 @@ namespace RombaSharp.Features
DatFile depot = DatFile.Create();
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
depot.PopulateFromDir(depotname, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, null, true, null);
depot.PopulateFromDir(depotname, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, true, null);
depot.Items.BucketBy(BucketedBy.SHA1, DedupeType.None);
// Set the base queries to use

View File

@@ -155,7 +155,7 @@ namespace SabreTools.Library.DatFiles
Header.Comment = (string.IsNullOrWhiteSpace(Header.Comment) ? itemVal : Header.Comment);
break;
case "header":
Header.Header = (string.IsNullOrWhiteSpace(Header.Header) ? itemVal : Header.Header);
Header.HeaderSkipper = (string.IsNullOrWhiteSpace(Header.HeaderSkipper) ? itemVal : Header.HeaderSkipper);
break;
case "type":
Header.Type = (string.IsNullOrWhiteSpace(Header.Type) ? itemVal : Header.Type);

View File

@@ -36,8 +36,6 @@ namespace SabreTools.Library.DatFiles
#endregion
#region Instance Methods
#region Constructors
/// <summary>
@@ -1496,7 +1494,6 @@ namespace SabreTools.Library.DatFiles
/// <param name="tempDir">Name of the directory to create a temp folder in (blank is current directory)</param>
/// <param name="outDir">Output directory to </param>
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
/// <param name="filter">Filter object to be passed to the DatItem level</param>
/// <param name="useTags">True if DatFile tags override splitting, false otherwise</param>
@@ -1510,7 +1507,6 @@ namespace SabreTools.Library.DatFiles
bool addDate,
string tempDir,
bool copyFiles,
string headerToCheckAgainst,
bool chdsAsFiles,
Filter filter,
bool useTags = false)
@@ -1548,7 +1544,7 @@ namespace SabreTools.Library.DatFiles
Parallel.ForEach(files, Globals.ParallelOptions, item =>
{
CheckFileForHashes(item, basePath, omitFromScan, archivesAsFiles, skipFileType,
addBlanks, addDate, tempDir, copyFiles, headerToCheckAgainst, chdsAsFiles);
addBlanks, addDate, tempDir, copyFiles, chdsAsFiles);
});
// Now find all folders that are empty, if we are supposed to
@@ -1590,7 +1586,7 @@ namespace SabreTools.Library.DatFiles
else if (File.Exists(basePath))
{
CheckFileForHashes(basePath, Path.GetDirectoryName(Path.GetDirectoryName(basePath)), omitFromScan, archivesAsFiles,
skipFileType, addBlanks, addDate, tempDir, copyFiles, headerToCheckAgainst, chdsAsFiles);
skipFileType, addBlanks, addDate, tempDir, copyFiles, chdsAsFiles);
}
// Now that we're done, delete the temp folder (if it's not the default)
@@ -1617,7 +1613,6 @@ namespace SabreTools.Library.DatFiles
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
/// <param name="tempDir">Name of the directory to create a temp folder in (blank is current directory)</param>
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
private void CheckFileForHashes(
string item,
@@ -1629,7 +1624,6 @@ namespace SabreTools.Library.DatFiles
bool addDate,
string tempDir,
bool copyFiles,
string headerToCheckAgainst,
bool chdsAsFiles)
{
// Special case for if we are in Romba mode (all names are supposed to be SHA-1 hashes)
@@ -1684,7 +1678,7 @@ namespace SabreTools.Library.DatFiles
// If the extracted list is null, just scan the item itself
if (extracted == null)
{
ProcessFile(newItem, string.Empty, newBasePath, omitFromScan, addDate, headerToCheckAgainst, chdsAsFiles);
ProcessFile(newItem, string.Empty, newBasePath, omitFromScan, addDate, chdsAsFiles);
}
// Otherwise, add all of the found items
else
@@ -1733,7 +1727,6 @@ namespace SabreTools.Library.DatFiles
/// <param name="basePath">Path the represents the parent directory</param>
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
private void ProcessFile(
string item,
@@ -1741,11 +1734,10 @@ namespace SabreTools.Library.DatFiles
string basePath,
Hash omitFromScan,
bool addDate,
string headerToCheckAgainst,
bool chdsAsFiles)
{
Globals.Logger.Verbose($"'{Path.GetFileName(item)}' treated like a file");
BaseFile baseFile = FileExtensions.GetInfo(item, omitFromScan: omitFromScan, date: addDate, header: headerToCheckAgainst, chdsAsFiles: chdsAsFiles);
BaseFile baseFile = FileExtensions.GetInfo(item, omitFromScan: omitFromScan, date: addDate, header: Header.HeaderSkipper, chdsAsFiles: chdsAsFiles);
ProcessFileHelper(item, DatItem.Create(baseFile), basePath, parent);
}
@@ -1869,7 +1861,6 @@ namespace SabreTools.Library.DatFiles
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
/// <param name="outputFormat">Output format that files should be written to</param>
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <returns>True if rebuilding was a success, false otherwise</returns>
public bool RebuildDepot(
List<string> inputs,
@@ -1878,8 +1869,7 @@ namespace SabreTools.Library.DatFiles
bool delete,
bool inverse,
OutputFormat outputFormat,
bool updateDat,
string headerToCheckAgainst)
bool updateDat)
{
#region Perform setup
@@ -2009,9 +1999,9 @@ namespace SabreTools.Library.DatFiles
// Otherwise, we rebuild that file to all locations that we need to
bool usedInternally;
if (Items[hash][0].ItemType == ItemType.Disk)
usedInternally = RebuildIndividualFile(new Disk(fileinfo), foundpath, outDir, date, inverse, outputFormat, updateDat, false /* isZip */, headerToCheckAgainst);
usedInternally = RebuildIndividualFile(new Disk(fileinfo), foundpath, outDir, date, inverse, outputFormat, updateDat, false /* isZip */);
else
usedInternally = RebuildIndividualFile(new Rom(fileinfo), foundpath, outDir, date, inverse, outputFormat, updateDat, false /* isZip */, headerToCheckAgainst);
usedInternally = RebuildIndividualFile(new Rom(fileinfo), foundpath, outDir, date, inverse, outputFormat, updateDat, false /* isZip */);
// If we are supposed to delete the depot file, do so
if (delete && usedInternally)
@@ -2046,7 +2036,6 @@ namespace SabreTools.Library.DatFiles
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
/// <param name="outputFormat">Output format that files should be written to</param>
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
/// <returns>True if rebuilding was a success, false otherwise</returns>
public bool RebuildGeneric(
@@ -2058,7 +2047,6 @@ namespace SabreTools.Library.DatFiles
bool inverse,
OutputFormat outputFormat,
bool updateDat,
string headerToCheckAgainst,
bool chdsAsFiles)
{
#region Perform setup
@@ -2140,7 +2128,7 @@ namespace SabreTools.Library.DatFiles
if (File.Exists(input))
{
Globals.Logger.User($"Checking file: {input}");
RebuildGenericHelper(input, outDir, quickScan, date, delete, inverse, outputFormat, updateDat, headerToCheckAgainst, chdsAsFiles);
RebuildGenericHelper(input, outDir, quickScan, date, delete, inverse, outputFormat, updateDat, chdsAsFiles);
}
// If the input is a directory
@@ -2150,7 +2138,7 @@ namespace SabreTools.Library.DatFiles
foreach (string file in Directory.EnumerateFiles(input, "*", SearchOption.AllDirectories))
{
Globals.Logger.User($"Checking file: {file}");
RebuildGenericHelper(file, outDir, quickScan, date, delete, inverse, outputFormat, updateDat, headerToCheckAgainst, chdsAsFiles);
RebuildGenericHelper(file, outDir, quickScan, date, delete, inverse, outputFormat, updateDat, chdsAsFiles);
}
}
}
@@ -2183,7 +2171,6 @@ namespace SabreTools.Library.DatFiles
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
/// <param name="outputFormat">Output format that files should be written to</param>
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
private void RebuildGenericHelper(
string file,
@@ -2194,7 +2181,6 @@ namespace SabreTools.Library.DatFiles
bool inverse,
OutputFormat outputFormat,
bool updateDat,
string headerToCheckAgainst,
bool chdsAsFiles)
{
// If we somehow have a null filename, return
@@ -2208,7 +2194,7 @@ namespace SabreTools.Library.DatFiles
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
BaseFile externalFileInfo = FileExtensions.GetInfo(file, omitFromScan: (quickScan ? Hash.SecureHashes : Hash.DeepHashes),
header: headerToCheckAgainst, chdsAsFiles: chdsAsFiles);
header: Header.HeaderSkipper, chdsAsFiles: chdsAsFiles);
DatItem externalDatItem = null;
if (externalFileInfo.Type == FileType.CHD)
@@ -2216,8 +2202,7 @@ namespace SabreTools.Library.DatFiles
else if (externalFileInfo.Type == FileType.None)
externalDatItem = new Rom(externalFileInfo);
usedExternally = RebuildIndividualFile(externalDatItem, file, outDir, date, inverse, outputFormat,
updateDat, null /* isZip */, headerToCheckAgainst);
usedExternally = RebuildIndividualFile(externalDatItem, file, outDir, date, inverse, outputFormat, updateDat, null /* isZip */);
// Scan the file internally
@@ -2250,7 +2235,7 @@ namespace SabreTools.Library.DatFiles
else if (internalFileInfo.Type == FileType.None)
internalDatItem = new Rom(internalFileInfo);
usedExternally = RebuildIndividualFile(internalDatItem, file, outDir, date, inverse, outputFormat, updateDat, null /* isZip */, headerToCheckAgainst);
usedExternally = RebuildIndividualFile(internalDatItem, file, outDir, date, inverse, outputFormat, updateDat, null /* isZip */);
}
// Otherwise, loop through the entries and try to match
else
@@ -2258,7 +2243,7 @@ namespace SabreTools.Library.DatFiles
foreach (BaseFile entry in entries)
{
DatItem internalDatItem = DatItem.Create(entry);
usedInternally |= RebuildIndividualFile(internalDatItem, file, outDir, date, inverse, outputFormat, updateDat, !isTorrentGzip /* isZip */, headerToCheckAgainst);
usedInternally |= RebuildIndividualFile(internalDatItem, file, outDir, date, inverse, outputFormat, updateDat, !isTorrentGzip /* isZip */);
}
}
@@ -2278,7 +2263,6 @@ namespace SabreTools.Library.DatFiles
/// <param name="outputFormat">Output format that files should be written to</param>
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
/// <param name="isZip">True if the input file is an archive, false if the file is TGZ, null otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <returns>True if the file was able to be rebuilt, false otherwise</returns>
private bool RebuildIndividualFile(
DatItem datItem,
@@ -2288,8 +2272,7 @@ namespace SabreTools.Library.DatFiles
bool inverse,
OutputFormat outputFormat,
bool updateDat,
bool? isZip,
string headerToCheckAgainst)
bool? isZip)
{
// Set the initial output value
bool rebuilt = false;
@@ -2435,7 +2418,7 @@ namespace SabreTools.Library.DatFiles
}
// Now we want to take care of headers, if applicable
if (headerToCheckAgainst != null)
if (Header.HeaderSkipper != null)
{
// Get a generic stream for the file
Stream fileStream = new MemoryStream();
@@ -2458,7 +2441,7 @@ namespace SabreTools.Library.DatFiles
return false;
// Check to see if we have a matching header first
SkipperRule rule = Transform.GetMatchingRule(fileStream, Path.GetFileNameWithoutExtension(headerToCheckAgainst));
SkipperRule rule = Transform.GetMatchingRule(fileStream, Path.GetFileNameWithoutExtension(Header.HeaderSkipper));
// If there's a match, create the new file to write
if (rule.Tests != null && rule.Tests.Count != 0)
@@ -2604,11 +2587,10 @@ namespace SabreTools.Library.DatFiles
/// <param name="outDir">Optional param for output directory</param>
/// <param name="hashOnly">True if only hashes should be checked, false for full file information</param>
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
/// <param name="filter">Filter object to be passed to the DatItem level</param>
/// <returns>True if verification was a success, false otherwise</returns>
public bool VerifyGeneric(List<string> inputs, string outDir, bool hashOnly, bool quickScan, string headerToCheckAgainst, bool chdsAsFiles, Filter filter)
public bool VerifyGeneric(List<string> inputs, string outDir, bool hashOnly, bool quickScan, bool chdsAsFiles, Filter filter)
{
// TODO: We want the cross section of what's the folder and what's in the DAT. Right now, it just has what's in the DAT that's not in the folder
bool success = true;
@@ -2619,7 +2601,7 @@ namespace SabreTools.Library.DatFiles
{
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
PopulateFromDir(input, (quickScan ? Hash.SecureHashes : Hash.DeepHashes) /* omitFromScan */, true /* bare */, false /* archivesAsFiles */,
SkipFileType.None, false /* addBlanks */, false /* addDate */, string.Empty /* tempDir */, false /* copyFiles */, headerToCheckAgainst, chdsAsFiles, filter);
SkipFileType.None, false /* addBlanks */, false /* addDate */, string.Empty /* tempDir */, false /* copyFiles */, chdsAsFiles, filter);
}
// Setup the fixdat
@@ -3363,7 +3345,5 @@ namespace SabreTools.Library.DatFiles
}
#endregion
#endregion // Instance Methods
}
}

View File

@@ -93,7 +93,7 @@ namespace SabreTools.Library.DatFiles
/// Header skipper to be used when loading the DAT
/// </summary>
[JsonProperty("header")]
public string Header { get; set; }
public string HeaderSkipper { get; set; }
/// <summary>
/// Classification of the DAT. Generally only used for SuperDAT
@@ -262,7 +262,7 @@ namespace SabreTools.Library.DatFiles
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
HeaderSkipper = this.HeaderSkipper,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
@@ -308,7 +308,7 @@ namespace SabreTools.Library.DatFiles
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
HeaderSkipper = this.HeaderSkipper,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
@@ -394,8 +394,8 @@ namespace SabreTools.Library.DatFiles
if (!string.IsNullOrWhiteSpace(datHeader.Comment))
this.Comment = datHeader.Comment;
if (!string.IsNullOrWhiteSpace(datHeader.Header))
this.Header = datHeader.Header;
if (!string.IsNullOrWhiteSpace(datHeader.HeaderSkipper))
this.HeaderSkipper = datHeader.HeaderSkipper;
if (!string.IsNullOrWhiteSpace(datHeader.Type))
this.Type = datHeader.Type;

View File

@@ -206,7 +206,7 @@ namespace SabreTools.Library.DatFiles
case "header":
content = jtr.ReadAsString();
Header.Header = (string.IsNullOrWhiteSpace(Header.Header) ? content : Header.Header);
Header.HeaderSkipper = (string.IsNullOrWhiteSpace(Header.HeaderSkipper) ? content : Header.HeaderSkipper);
break;
default:
@@ -963,10 +963,10 @@ namespace SabreTools.Library.DatFiles
break;
}
}
if (!string.IsNullOrWhiteSpace(Header.Header))
if (!string.IsNullOrWhiteSpace(Header.HeaderSkipper))
{
jtw.WritePropertyName("header");
jtw.WriteValue(Header.Header);
jtw.WriteValue(Header.HeaderSkipper);
}
// End header

View File

@@ -218,8 +218,8 @@ namespace SabreTools.Library.DatFiles
break;
case "clrmamepro":
if (string.IsNullOrWhiteSpace(Header.Header))
Header.Header = reader.GetAttribute("header");
if (string.IsNullOrWhiteSpace(Header.HeaderSkipper))
Header.HeaderSkipper = reader.GetAttribute("header");
if (Header.ForceMerging == ForceMerging.None)
Header.ForceMerging = reader.GetAttribute("forcemerging").AsForceMerging();
@@ -782,7 +782,7 @@ namespace SabreTools.Library.DatFiles
if (Header.ForcePacking != ForcePacking.None
|| Header.ForceMerging != ForceMerging.None
|| Header.ForceNodump != ForceNodump.None
|| !string.IsNullOrWhiteSpace(Header.Header))
|| !string.IsNullOrWhiteSpace(Header.HeaderSkipper))
{
xtw.WriteStartElement("clrmamepro");
switch (Header.ForcePacking)
@@ -824,8 +824,8 @@ namespace SabreTools.Library.DatFiles
break;
}
if (!string.IsNullOrWhiteSpace(Header.Header))
xtw.WriteAttributeString("header", Header.Header);
if (!string.IsNullOrWhiteSpace(Header.HeaderSkipper))
xtw.WriteAttributeString("header", Header.HeaderSkipper);
// End clrmamepro
xtw.WriteEndElement();

View File

@@ -165,7 +165,7 @@ namespace SabreTools.Library.DatFiles
break;
case "DatFile.Header":
Header.Header = (string.IsNullOrWhiteSpace(Header.Header) ? value : Header.Header);
Header.HeaderSkipper = (string.IsNullOrWhiteSpace(Header.HeaderSkipper) ? value : Header.HeaderSkipper);
break;
case "DatFile.Type":

View File

@@ -2603,7 +2603,7 @@ Some special strings that can be used:
ForceNodump = GetString(features, ForceNodumpStringValue).AsForceNodump(),
ForcePacking = GetString(features, ForcePackingStringValue).AsForcePacking(),
GameName = GetBoolean(features, GamePrefixValue),
Header = GetString(features, HeaderStringValue),
HeaderSkipper = GetString(features, HeaderStringValue),
Homepage = GetString(features, HomepageStringValue),
KeepEmptyGames = GetBoolean(features, KeepEmptyGamesValue),
Name = GetString(features, NameStringValue),

View File

@@ -87,7 +87,6 @@ namespace SabreTools.Features
addFileDates,
tempDir,
copyFiles,
Header.Header,
chdsAsFiles,
Filter);

View File

@@ -65,7 +65,6 @@ namespace SabreTools.Features
bool quickScan = GetBoolean(features, QuickValue);
bool romba = GetBoolean(features, RombaValue);
bool updateDat = GetBoolean(features, UpdateDatValue);
string headerToCheckAgainst = GetString(features, HeaderStringValue);
var outputFormat = GetOutputFormat(features);
// If we have TorrentGzip output and the romba flag, update
@@ -87,12 +86,14 @@ namespace SabreTools.Features
{
DatFile datdata = DatFile.Create();
datdata.Parse(datfile, 99, keep: true);
if (!string.IsNullOrEmpty(Header.HeaderSkipper))
datdata.Header.HeaderSkipper = Header.HeaderSkipper;
// If we have the depot flag, respect it
if (depot)
datdata.RebuildDepot(Inputs, Path.Combine(OutputDir, datdata.Header.FileName), date, delete, inverse, outputFormat, updateDat, headerToCheckAgainst);
datdata.RebuildDepot(Inputs, Path.Combine(OutputDir, datdata.Header.FileName), date, delete, inverse, outputFormat, updateDat);
else
datdata.RebuildGeneric(Inputs, Path.Combine(OutputDir, datdata.Header.FileName), quickScan, date, delete, inverse, outputFormat, updateDat, headerToCheckAgainst, chdsAsFiles);
datdata.RebuildGeneric(Inputs, Path.Combine(OutputDir, datdata.Header.FileName), quickScan, date, delete, inverse, outputFormat, updateDat, chdsAsFiles);
}
}
@@ -112,9 +113,9 @@ namespace SabreTools.Features
// If we have the depot flag, respect it
if (depot)
datdata.RebuildDepot(Inputs, OutputDir, date, delete, inverse, outputFormat, updateDat, headerToCheckAgainst);
datdata.RebuildDepot(Inputs, OutputDir, date, delete, inverse, outputFormat, updateDat);
else
datdata.RebuildGeneric(Inputs, OutputDir, quickScan, date, delete, inverse, outputFormat, updateDat, headerToCheckAgainst, chdsAsFiles);
datdata.RebuildGeneric(Inputs, OutputDir, quickScan, date, delete, inverse, outputFormat, updateDat, chdsAsFiles);
}
}
}

View File

@@ -47,7 +47,6 @@ namespace SabreTools.Features
bool depot = GetBoolean(features, DepotValue);
bool hashOnly = GetBoolean(features, HashOnlyValue);
bool quickScan = GetBoolean(features, QuickValue);
string headerToCheckAgainst = Header.Header;
// If we are in individual mode, process each DAT on their own
if (GetBoolean(features, IndividualValue))
@@ -57,12 +56,14 @@ namespace SabreTools.Features
DatFile datdata = DatFile.Create();
datdata.Parse(datfile, 99, keep: true);
Filter.FilterDatFile(datdata, true);
if (!string.IsNullOrEmpty(Header.HeaderSkipper))
datdata.Header.HeaderSkipper = Header.HeaderSkipper;
// If we have the depot flag, respect it
if (depot)
datdata.VerifyDepot(Inputs, OutputDir);
else
datdata.VerifyGeneric(Inputs, OutputDir, hashOnly, quickScan, headerToCheckAgainst, chdsAsFiles, Filter);
datdata.VerifyGeneric(Inputs, OutputDir, hashOnly, quickScan, chdsAsFiles, Filter);
}
}
// Otherwise, process all DATs into the same output
@@ -84,7 +85,7 @@ namespace SabreTools.Features
if (depot)
datdata.VerifyDepot(Inputs, OutputDir);
else
datdata.VerifyGeneric(Inputs, OutputDir, hashOnly, quickScan, headerToCheckAgainst, chdsAsFiles, Filter);
datdata.VerifyGeneric(Inputs, OutputDir, hashOnly, quickScan, chdsAsFiles, Filter);
}
}
}