diff --git a/SabreTools.DatTools/DatFromDir.cs b/SabreTools.DatTools/DatFromDir.cs index 26da119c..2a11a9d1 100644 --- a/SabreTools.DatTools/DatFromDir.cs +++ b/SabreTools.DatTools/DatFromDir.cs @@ -34,6 +34,11 @@ namespace SabreTools.DatTools /// private readonly SkipFileType _skipFileType; + /// + /// TreatAsFile representing CHD and Archive scanning + /// + private readonly TreatAsFile _treatAsFile; + /// /// Indicates if blank items should be created for empty folders /// @@ -52,13 +57,14 @@ namespace SabreTools.DatTools #region Constructors - public DatFromDir(HashType[] hashes, SkipFileType skipFileType, bool addBlanks) + public DatFromDir(HashType[] hashes, SkipFileType skipFileType, TreatAsFile treatAsFile, bool addBlanks) { if (hashes.Length == 0) throw new ArgumentException($"{nameof(hashes)} must contain at least one valid HashType"); _hashes = hashes; _skipFileType = skipFileType; + _treatAsFile = treatAsFile; _addBlanks = addBlanks; } @@ -69,8 +75,7 @@ namespace SabreTools.DatTools /// /// Current DatFile object to add to /// Base folder to be used in creating the DAT - /// TreatAsFile representing CHD and Archive scanning - public bool PopulateFromDir(DatFile datFile, string basePath, TreatAsFile asFile = 0x00) + public bool PopulateFromDir(DatFile datFile, string basePath) { InternalStopwatch watch = new($"Populating DAT from {basePath}"); @@ -91,7 +96,7 @@ namespace SabreTools.DatTools foreach (string item in files) { currentCount++; - CheckFileForHashes(datFile, item, basePath, asFile); + CheckFileForHashes(datFile, item, basePath); _staticLogger.User(totalCount, currentCount, item); } @@ -110,7 +115,7 @@ namespace SabreTools.DatTools // Process the input file string? parentPath = Path.GetDirectoryName(Path.GetDirectoryName(basePath)); - CheckFileForHashes(datFile, basePath, parentPath, asFile); + CheckFileForHashes(datFile, basePath, parentPath); _staticLogger.User(totalCount, totalCount, basePath); } @@ -130,7 +135,7 @@ namespace SabreTools.DatTools /// Filename of the item to be checked /// Base folder to be used in creating the DAT /// TreatAsFile representing CHD and Archive scanning - private void CheckFileForHashes(DatFile datFile, string item, string? basePath, TreatAsFile asFile) + private void CheckFileForHashes(DatFile datFile, string item, string? basePath) { // If we're in depot mode, process it separately if (CheckDepotFile(datFile, item)) @@ -147,9 +152,9 @@ namespace SabreTools.DatTools // Skip if we're treating archives as files and skipping files #if NET20 || NET35 - if ((asFile & TreatAsFile.Archive) != 0 && _skipFileType == SkipFileType.File) + if ((_treatAsFile & TreatAsFile.Archive) != 0 && _skipFileType == SkipFileType.File) #else - if (asFile.HasFlag(TreatAsFile.Archive) && _skipFileType == SkipFileType.File) + if (_treatAsFile.HasFlag(TreatAsFile.Archive) && _skipFileType == SkipFileType.File) #endif { return; @@ -163,9 +168,9 @@ namespace SabreTools.DatTools // Process as archive if we're not treating archives as files #if NET20 || NET35 - else if ((asFile & TreatAsFile.Archive) == 0) + else if ((_treatAsFile & TreatAsFile.Archive) == 0) #else - else if (!asFile.HasFlag(TreatAsFile.Archive)) + else if (!_treatAsFile.HasFlag(TreatAsFile.Archive)) #endif { var extracted = archive.GetChildren(); @@ -182,7 +187,7 @@ namespace SabreTools.DatTools // Process as file if we're treating archives as files else { - ProcessFile(datFile, item, basePath, asFile); + ProcessFile(datFile, item, basePath); } } @@ -195,7 +200,7 @@ namespace SabreTools.DatTools // Process as file else - ProcessFile(datFile, item, basePath, asFile); + ProcessFile(datFile, item, basePath); } } @@ -392,12 +397,12 @@ namespace SabreTools.DatTools /// File to be added /// Path the represents the parent directory /// TreatAsFile representing CHD and Archive scanning - private void ProcessFile(DatFile datFile, string item, string? basePath, TreatAsFile asFile) + private void ProcessFile(DatFile datFile, string item, string? basePath) { _staticLogger.Verbose($"'{Path.GetFileName(item)}' treated like a file"); var header = datFile.Header.GetStringFieldValue(Models.Metadata.Header.HeaderKey); BaseFile? baseFile = FileTypeTool.GetInfo(item, _hashes, header); - DatItem? datItem = DatItemTool.CreateDatItem(baseFile, asFile); + DatItem? datItem = DatItemTool.CreateDatItem(baseFile, _treatAsFile); if (datItem != null) ProcessFileHelper(datFile, item, datItem, basePath, string.Empty); } diff --git a/SabreTools/Features/Batch.cs b/SabreTools/Features/Batch.cs index 62abe3a9..29dff1fb 100644 --- a/SabreTools/Features/Batch.cs +++ b/SabreTools/Features/Batch.cs @@ -5,6 +5,7 @@ using System.Text.RegularExpressions; using SabreTools.Core.Filter; using SabreTools.Core.Tools; using SabreTools.DatFiles; +using SabreTools.DatItems; using SabreTools.DatTools; using SabreTools.Hashing; using SabreTools.Help; @@ -271,7 +272,7 @@ Reset the internal state: reset();"; public override void Process(BatchState batchState) { HashType[] hashes = [HashType.CRC32, HashType.MD5, HashType.SHA1]; - var dfd = new DatTools.DatFromDir(hashes, SkipFileType.None, addBlanks: false); + var dfd = new DatTools.DatFromDir(hashes, SkipFileType.None, (TreatAsFile)0x00, addBlanks: false); // Assume there could be multiple foreach (string input in Arguments) diff --git a/SabreTools/Features/DatFromDir.cs b/SabreTools/Features/DatFromDir.cs index 8cf7aca5..642b1ef7 100644 --- a/SabreTools/Features/DatFromDir.cs +++ b/SabreTools/Features/DatFromDir.cs @@ -60,11 +60,11 @@ namespace SabreTools.Features // Get feature flags bool addBlankFiles = GetBoolean(features, AddBlankFilesValue); bool addFileDates = GetBoolean(features, AddDateValue); - TreatAsFile asFile = GetTreatAsFile(features); + TreatAsFile treatAsFile = GetTreatAsFile(features); bool noAutomaticDate = GetBoolean(features, NoAutomaticDateValue); var includeInScan = GetIncludeInScan(features); var skipFileType = GetSkipFileType(features); - var dfd = new DatTools.DatFromDir(includeInScan, skipFileType, addBlankFiles); + var dfd = new DatTools.DatFromDir(includeInScan, skipFileType, treatAsFile, addBlankFiles); // Apply the specialized field removals to the cleaner if (!addFileDates) @@ -91,7 +91,7 @@ namespace SabreTools.Features datdata.FillHeaderFromPath(basePath, noAutomaticDate); // Now populate from the path - bool success = dfd.PopulateFromDir(datdata, basePath, asFile); + bool success = dfd.PopulateFromDir(datdata, basePath); if (success) { // Perform additional processing steps diff --git a/SabreTools/Features/Verify.cs b/SabreTools/Features/Verify.cs index d8c20b33..dd0e5de1 100644 --- a/SabreTools/Features/Verify.cs +++ b/SabreTools/Features/Verify.cs @@ -50,11 +50,11 @@ namespace SabreTools.Features var datfilePaths = PathTool.GetFilesOnly(datfiles); // Get feature flags - TreatAsFile asFile = GetTreatAsFile(features); + TreatAsFile treatAsFile = GetTreatAsFile(features); bool hashOnly = GetBoolean(features, HashOnlyValue); bool quickScan = GetBoolean(features, QuickValue); HashType[] hashes = quickScan ? [HashType.CRC32] : [HashType.CRC32, HashType.MD5, HashType.SHA1]; - var dfd = new DatTools.DatFromDir(hashes, SkipFileType.None, addBlanks: false); + var dfd = new DatTools.DatFromDir(hashes, SkipFileType.None, treatAsFile, addBlanks: false); // If we are in individual mode, process each DAT on their own if (GetBoolean(features, IndividualValue)) @@ -93,7 +93,7 @@ namespace SabreTools.Features _logger.User("Processing files:\n"); foreach (string input in Inputs) { - dfd.PopulateFromDir(datdata, input, asFile); + dfd.PopulateFromDir(datdata, input); } Verification.VerifyGeneric(datdata, hashOnly); @@ -147,7 +147,7 @@ namespace SabreTools.Features _logger.User("Processing files:\n"); foreach (string input in Inputs) { - dfd.PopulateFromDir(datdata, input, asFile); + dfd.PopulateFromDir(datdata, input); } Verification.VerifyGeneric(datdata, hashOnly);