diff --git a/SabreTools.DatFiles/DatFile.Verifying.cs b/SabreTools.DatFiles/DatTool.Verifying.cs
similarity index 71%
rename from SabreTools.DatFiles/DatFile.Verifying.cs
rename to SabreTools.DatFiles/DatTool.Verifying.cs
index e4a34a15..1aaf79e3 100644
--- a/SabreTools.DatFiles/DatFile.Verifying.cs
+++ b/SabreTools.DatFiles/DatTool.Verifying.cs
@@ -11,14 +11,16 @@ using SabreTools.Logging;
// This file represents all methods related to verifying with a DatFile
namespace SabreTools.DatFiles
{
- public abstract partial class DatFile
+ // TODO: Re-evaluate if these should be made static instead of instanced
+ public partial class DatTool
{
///
/// Verify a DatFile against a set of depots, leaving only missing files
///
+ /// Current DatFile object to verify against
/// List of input directories to compare against
/// True if verification was a success, false otherwise
- public bool VerifyDepot(List inputs)
+ public bool VerifyDepot(DatFile datFile, List inputs)
{
bool success = true;
@@ -41,10 +43,10 @@ namespace SabreTools.DatFiles
return success;
// Now that we have a list of depots, we want to bucket the input DAT by SHA-1
- Items.BucketBy(Field.DatItem_SHA1, DedupeType.None);
+ datFile.Items.BucketBy(Field.DatItem_SHA1, DedupeType.None);
// Then we want to loop through each of the hashes and see if we can rebuild
- var keys = Items.SortedKeys.ToList();
+ var keys = datFile.Items.SortedKeys.ToList();
foreach (string hash in keys)
{
// Pre-empt any issues that could arise from string length
@@ -54,7 +56,7 @@ namespace SabreTools.DatFiles
logger.User($"Checking hash '{hash}'");
// Get the extension path for the hash
- string subpath = PathExtensions.GetDepotPath(hash, Header.InputDepot.Depth);
+ string subpath = PathExtensions.GetDepotPath(hash, datFile.Header.InputDepot.Depth);
// Find the first depot that includes the hash
string foundpath = null;
@@ -80,17 +82,17 @@ namespace SabreTools.DatFiles
continue;
// Now we want to remove all duplicates from the DAT
- Items.GetDuplicates(new Rom(fileinfo))
- .AddRange(Items.GetDuplicates(new Disk(fileinfo)));
+ datFile.Items.GetDuplicates(new Rom(fileinfo))
+ .AddRange(datFile.Items.GetDuplicates(new Disk(fileinfo)));
}
watch.Stop();
// Set fixdat headers in case of writing out
- Header.FileName = $"fixDAT_{Header.FileName}";
- Header.Name = $"fixDAT_{Header.Name}";
- Header.Description = $"fixDAT_{Header.Description}";
- Items.ClearMarked();
+ datFile.Header.FileName = $"fixDAT_{datFile.Header.FileName}";
+ datFile.Header.Name = $"fixDAT_{datFile.Header.Name}";
+ datFile.Header.Description = $"fixDAT_{datFile.Header.Description}";
+ datFile.Items.ClearMarked();
return success;
}
@@ -98,24 +100,25 @@ namespace SabreTools.DatFiles
///
/// Verify a DatFile against a set of inputs, leaving only missing files
///
+ /// Current DatFile object to verify against
/// True if only hashes should be checked, false for full file information
/// True if verification was a success, false otherwise
- public bool VerifyGeneric(bool hashOnly)
+ public bool VerifyGeneric(DatFile datFile, bool hashOnly)
{
bool success = true;
// Force bucketing according to the flags
- Items.SetBucketedBy(Field.NULL);
+ datFile.Items.SetBucketedBy(Field.NULL);
if (hashOnly)
- Items.BucketBy(Field.DatItem_CRC, DedupeType.Full);
+ datFile.Items.BucketBy(Field.DatItem_CRC, DedupeType.Full);
else
- Items.BucketBy(Field.Machine_Name, DedupeType.Full);
+ datFile.Items.BucketBy(Field.Machine_Name, DedupeType.Full);
// Then mark items for removal
- var keys = Items.SortedKeys.ToList();
+ var keys = datFile.Items.SortedKeys.ToList();
foreach (string key in keys)
{
- List items = Items[key];
+ List items = datFile.Items[key];
for (int i = 0; i < items.Count; i++)
{
// Unmatched items will have a source ID of int.MaxValue, remove all others
@@ -124,14 +127,14 @@ namespace SabreTools.DatFiles
}
// Set the list back, just in case
- Items[key] = items;
+ datFile.Items[key] = items;
}
// Set fixdat headers in case of writing out
- Header.FileName = $"fixDAT_{Header.FileName}";
- Header.Name = $"fixDAT_{Header.Name}";
- Header.Description = $"fixDAT_{Header.Description}";
- Items.ClearMarked();
+ datFile.Header.FileName = $"fixDAT_{datFile.Header.FileName}";
+ datFile.Header.Name = $"fixDAT_{datFile.Header.Name}";
+ datFile.Header.Description = $"fixDAT_{datFile.Header.Description}";
+ datFile.Items.ClearMarked();
return success;
}
diff --git a/SabreTools/Features/Verify.cs b/SabreTools/Features/Verify.cs
index 3009e883..68d712d3 100644
--- a/SabreTools/Features/Verify.cs
+++ b/SabreTools/Features/Verify.cs
@@ -79,7 +79,7 @@ namespace SabreTools.Features
// If we have the depot flag, respect it
if (Header.InputDepot?.IsActive ?? false)
{
- datdata.VerifyDepot(Inputs);
+ dt.VerifyDepot(datdata, Inputs);
}
else
{
@@ -90,7 +90,7 @@ namespace SabreTools.Features
dt.PopulateFromDir(datdata, input, asFiles: asFiles, hashes: quickScan ? Hash.CRC : Hash.Standard);
}
- datdata.VerifyGeneric(hashOnly);
+ dt.VerifyGeneric(datdata, hashOnly);
}
// Now write out if there are any items left
@@ -128,7 +128,7 @@ namespace SabreTools.Features
// If we have the depot flag, respect it
if (Header.InputDepot?.IsActive ?? false)
{
- datdata.VerifyDepot(Inputs);
+ dt.VerifyDepot(datdata, Inputs);
}
else
{
@@ -139,7 +139,7 @@ namespace SabreTools.Features
dt.PopulateFromDir(datdata, input, asFiles: asFiles, hashes: quickScan ? Hash.CRC : Hash.Standard);
}
- datdata.VerifyGeneric(hashOnly);
+ dt.VerifyGeneric(datdata, hashOnly);
}
// Now write out if there are any items left