diff --git a/RombaSharp/Features/Archive.cs b/RombaSharp/Features/Archive.cs
index ca6281c8..b8209008 100644
--- a/RombaSharp/Features/Archive.cs
+++ b/RombaSharp/Features/Archive.cs
@@ -66,8 +66,8 @@ have a current entry in the DAT index.";
foreach (string dir in onlyDirs)
{
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
- df.PopulateFromDir(dir, Hash.DeepHashes, false, false, SkipFileType.None, false, false, false, true, null);
- df.PopulateFromDir(dir, Hash.DeepHashes, false, true, SkipFileType.None, false, false, false, true, null);
+ df.PopulateFromDir(dir, Hash.DeepHashes, false, TreatAsFiles.CHDs, SkipFileType.None, false, false, false, null);
+ df.PopulateFromDir(dir, Hash.DeepHashes, false, TreatAsFiles.Archives | TreatAsFiles.CHDs, SkipFileType.None, false, false, false, null);
}
// Create an empty Dat for files that need to be rebuilt
@@ -192,7 +192,7 @@ have a current entry in the DAT index.";
// Create the sorting object to use and rebuild the needed files
need.RebuildGeneric(onlyDirs, _depots.Keys.ToList()[0], false /*quickScan*/, false /*date*/,
false /*delete*/, false /*inverse*/, OutputFormat.TorrentGzipRomba, false /*updateDat*/,
- true /* chdsAsFiles */);
+ TreatAsFiles.CHDs);
}
}
}
diff --git a/RombaSharp/Features/Dir2Dat.cs b/RombaSharp/Features/Dir2Dat.cs
index 247a7f32..e5f7e9bd 100644
--- a/RombaSharp/Features/Dir2Dat.cs
+++ b/RombaSharp/Features/Dir2Dat.cs
@@ -54,8 +54,8 @@ namespace RombaSharp.Features
// Now run the D2D on the input and write out
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
- datfile.PopulateFromDir(source, Hash.DeepHashes, true /* bare */, false /* archivesAsFiles */, SkipFileType.None, false /* addBlanks */,
- false /* addDate */, false /* copyFiles */, true /* chdsAsFiles */, null /* filter */);
+ datfile.PopulateFromDir(source, Hash.DeepHashes, true /* bare */, TreatAsFiles.CHDs, SkipFileType.None, false /* addBlanks */,
+ false /* addDate */, false /* copyFiles */, null /* filter */);
datfile.Write(outDir: outdat);
}
}
diff --git a/RombaSharp/Features/RefreshDats.cs b/RombaSharp/Features/RefreshDats.cs
index 80cb20f7..be3c7a8f 100644
--- a/RombaSharp/Features/RefreshDats.cs
+++ b/RombaSharp/Features/RefreshDats.cs
@@ -64,7 +64,7 @@ contents of any changed dats.";
datroot.Header.Type = "SuperDAT";
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
- datroot.PopulateFromDir(_dats, Hash.DeepHashes, false, false, SkipFileType.None, false, false, false, true, null);
+ datroot.PopulateFromDir(_dats, Hash.DeepHashes, false, TreatAsFiles.CHDs, SkipFileType.None, false, false, false, null);
datroot.Items.BucketBy(BucketedBy.SHA1, DedupeType.None);
// Create a List of dat hashes in the database (SHA-1)
diff --git a/RombaSharp/Features/RescanDepots.cs b/RombaSharp/Features/RescanDepots.cs
index 2e481cfa..3829c880 100644
--- a/RombaSharp/Features/RescanDepots.cs
+++ b/RombaSharp/Features/RescanDepots.cs
@@ -66,7 +66,7 @@ namespace RombaSharp.Features
DatFile depot = DatFile.Create();
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
- depot.PopulateFromDir(depotname, Hash.DeepHashes, false, false, SkipFileType.None, false, false, false, true, null);
+ depot.PopulateFromDir(depotname, Hash.DeepHashes, false, TreatAsFiles.CHDs, SkipFileType.None, false, false, false, null);
depot.Items.BucketBy(BucketedBy.SHA1, DedupeType.None);
// Set the base queries to use
diff --git a/SabreTools.Library/DatFiles/DatFile.cs b/SabreTools.Library/DatFiles/DatFile.cs
index 4c7d0027..62b641f9 100644
--- a/SabreTools.Library/DatFiles/DatFile.cs
+++ b/SabreTools.Library/DatFiles/DatFile.cs
@@ -1487,25 +1487,23 @@ namespace SabreTools.Library.DatFiles
/// Base folder to be used in creating the DAT
/// Hash flag saying what hashes should not be calculated
/// True if the date should be omitted from the DAT, false otherwise
- /// True if archives should be treated as files, false otherwise
+ /// TreatAsFiles representing CHD and Archive scanning
/// Type of files that should be skipped
/// True if blank items should be created for empty folders, false otherwise
/// True if dates should be archived for all files, false otherwise
/// Output directory to
/// True if files should be copied to the temp directory before hashing, false otherwise
- /// True if CHDs should be treated like regular files, false otherwise
/// Filter object to be passed to the DatItem level
/// True if DatFile tags override splitting, false otherwise
public bool PopulateFromDir(
string basePath,
Hash omitFromScan,
bool bare,
- bool archivesAsFiles,
+ TreatAsFiles asFiles,
SkipFileType skipFileType,
bool addBlanks,
bool addDate,
bool copyFiles,
- bool chdsAsFiles,
Filter filter,
bool useTags = false)
{
@@ -1541,8 +1539,7 @@ namespace SabreTools.Library.DatFiles
List files = Directory.EnumerateFiles(basePath, "*", SearchOption.AllDirectories).ToList();
Parallel.ForEach(files, Globals.ParallelOptions, item =>
{
- CheckFileForHashes(item, basePath, omitFromScan, archivesAsFiles, skipFileType,
- addBlanks, addDate, copyFiles, chdsAsFiles);
+ CheckFileForHashes(item, basePath, omitFromScan, asFiles, skipFileType, addBlanks, addDate, copyFiles);
});
// Now find all folders that are empty, if we are supposed to
@@ -1583,8 +1580,8 @@ namespace SabreTools.Library.DatFiles
}
else if (File.Exists(basePath))
{
- CheckFileForHashes(basePath, Path.GetDirectoryName(Path.GetDirectoryName(basePath)), omitFromScan, archivesAsFiles,
- skipFileType, addBlanks, addDate, copyFiles, chdsAsFiles);
+ CheckFileForHashes(basePath, Path.GetDirectoryName(Path.GetDirectoryName(basePath)), omitFromScan, asFiles,
+ skipFileType, addBlanks, addDate, copyFiles);
}
// Now that we're done, delete the temp folder (if it's not the default)
@@ -1605,22 +1602,20 @@ namespace SabreTools.Library.DatFiles
/// Filename of the item to be checked
/// Base folder to be used in creating the DAT
/// Hash flag saying what hashes should not be calculated
- /// True if archives should be treated as files, false otherwise
+ /// TreatAsFiles representing CHD and Archive scanning
/// Type of files that should be skipped
/// True if blank items should be created for empty folders, false otherwise
/// True if dates should be archived for all files, false otherwise
/// True if files should be copied to the temp directory before hashing, false otherwise
- /// True if CHDs should be treated like regular files, false otherwise
private void CheckFileForHashes(
string item,
string basePath,
Hash omitFromScan,
- bool archivesAsFiles,
+ TreatAsFiles asFiles,
SkipFileType skipFileType,
bool addBlanks,
bool addDate,
- bool copyFiles,
- bool chdsAsFiles)
+ bool copyFiles)
{
// Special case for if we are in Romba mode (all names are supposed to be SHA-1 hashes)
if (Header.Romba)
@@ -1661,7 +1656,7 @@ namespace SabreTools.Library.DatFiles
List extracted = null;
// If we have an archive and we're supposed to scan it
- if (archive != null && !archivesAsFiles)
+ if (archive != null && !asFiles.HasFlag(TreatAsFiles.Archives))
extracted = archive.GetChildren(omitFromScan: omitFromScan, date: addDate);
// If the file should be skipped based on type, do so now
@@ -1674,7 +1669,7 @@ namespace SabreTools.Library.DatFiles
// If the extracted list is null, just scan the item itself
if (extracted == null)
{
- ProcessFile(newItem, string.Empty, newBasePath, omitFromScan, addDate, chdsAsFiles);
+ ProcessFile(newItem, string.Empty, newBasePath, omitFromScan, addDate, asFiles);
}
// Otherwise, add all of the found items
else
@@ -1723,17 +1718,17 @@ namespace SabreTools.Library.DatFiles
/// Path the represents the parent directory
/// Hash flag saying what hashes should not be calculated
/// True if dates should be archived for all files, false otherwise
- /// True if CHDs should be treated like regular files, false otherwise
+ /// TreatAsFiles representing CHD and Archive scanning
private void ProcessFile(
string item,
string parent,
string basePath,
Hash omitFromScan,
bool addDate,
- bool chdsAsFiles)
+ TreatAsFiles asFiles)
{
Globals.Logger.Verbose($"'{Path.GetFileName(item)}' treated like a file");
- BaseFile baseFile = FileExtensions.GetInfo(item, omitFromScan: omitFromScan, date: addDate, header: Header.HeaderSkipper, chdsAsFiles: chdsAsFiles);
+ BaseFile baseFile = FileExtensions.GetInfo(item, omitFromScan: omitFromScan, date: addDate, header: Header.HeaderSkipper, chdsAsFiles: asFiles.HasFlag(TreatAsFiles.CHDs));
ProcessFileHelper(item, DatItem.Create(baseFile), basePath, parent);
}
@@ -2032,7 +2027,7 @@ namespace SabreTools.Library.DatFiles
/// True if the DAT should be used as a filter instead of a template, false otherwise
/// Output format that files should be written to
/// True if the updated DAT should be output, false otherwise
- /// True if CHDs should be treated like regular files, false otherwise
+ /// TreatAsFiles representing CHD and Archive scanning
/// True if rebuilding was a success, false otherwise
public bool RebuildGeneric(
List inputs,
@@ -2043,7 +2038,7 @@ namespace SabreTools.Library.DatFiles
bool inverse,
OutputFormat outputFormat,
bool updateDat,
- bool chdsAsFiles)
+ TreatAsFiles asFiles)
{
#region Perform setup
@@ -2124,7 +2119,7 @@ namespace SabreTools.Library.DatFiles
if (File.Exists(input))
{
Globals.Logger.User($"Checking file: {input}");
- RebuildGenericHelper(input, outDir, quickScan, date, delete, inverse, outputFormat, updateDat, chdsAsFiles);
+ RebuildGenericHelper(input, outDir, quickScan, date, delete, inverse, outputFormat, updateDat, asFiles);
}
// If the input is a directory
@@ -2134,7 +2129,7 @@ namespace SabreTools.Library.DatFiles
foreach (string file in Directory.EnumerateFiles(input, "*", SearchOption.AllDirectories))
{
Globals.Logger.User($"Checking file: {file}");
- RebuildGenericHelper(file, outDir, quickScan, date, delete, inverse, outputFormat, updateDat, chdsAsFiles);
+ RebuildGenericHelper(file, outDir, quickScan, date, delete, inverse, outputFormat, updateDat, asFiles);
}
}
}
@@ -2167,7 +2162,7 @@ namespace SabreTools.Library.DatFiles
/// True if the DAT should be used as a filter instead of a template, false otherwise
/// Output format that files should be written to
/// True if the updated DAT should be output, false otherwise
- /// True if CHDs should be treated like regular files, false otherwise
+ /// TreatAsFiles representing CHD and Archive scanning
private void RebuildGenericHelper(
string file,
string outDir,
@@ -2177,7 +2172,7 @@ namespace SabreTools.Library.DatFiles
bool inverse,
OutputFormat outputFormat,
bool updateDat,
- bool chdsAsFiles)
+ TreatAsFiles asFiles)
{
// If we somehow have a null filename, return
if (file == null)
@@ -2190,7 +2185,7 @@ namespace SabreTools.Library.DatFiles
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
BaseFile externalFileInfo = FileExtensions.GetInfo(file, omitFromScan: (quickScan ? Hash.SecureHashes : Hash.DeepHashes),
- header: Header.HeaderSkipper, chdsAsFiles: chdsAsFiles);
+ header: Header.HeaderSkipper, chdsAsFiles: asFiles.HasFlag(TreatAsFiles.CHDs));
DatItem externalDatItem = null;
if (externalFileInfo.Type == FileType.CHD)
@@ -2223,7 +2218,7 @@ namespace SabreTools.Library.DatFiles
if (entries == null && File.Exists(file))
{
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
- BaseFile internalFileInfo = FileExtensions.GetInfo(file, omitFromScan: (quickScan ? Hash.SecureHashes : Hash.DeepHashes), chdsAsFiles: chdsAsFiles);
+ BaseFile internalFileInfo = FileExtensions.GetInfo(file, omitFromScan: (quickScan ? Hash.SecureHashes : Hash.DeepHashes), chdsAsFiles: asFiles.HasFlag(TreatAsFiles.CHDs));
DatItem internalDatItem = null;
if (internalFileInfo.Type == FileType.CHD)
@@ -2583,10 +2578,10 @@ namespace SabreTools.Library.DatFiles
/// Optional param for output directory
/// True if only hashes should be checked, false for full file information
/// True to enable external scanning of archives, false otherwise
- /// True if CHDs should be treated like regular files, false otherwise
+ /// TreatAsFiles representing CHD and Archive scanning
/// Filter object to be passed to the DatItem level
/// True if verification was a success, false otherwise
- public bool VerifyGeneric(List inputs, string outDir, bool hashOnly, bool quickScan, bool chdsAsFiles, Filter filter)
+ public bool VerifyGeneric(List inputs, string outDir, bool hashOnly, bool quickScan, TreatAsFiles asFiles, Filter filter)
{
// TODO: We want the cross section of what's the folder and what's in the DAT. Right now, it just has what's in the DAT that's not in the folder
bool success = true;
@@ -2596,8 +2591,8 @@ namespace SabreTools.Library.DatFiles
foreach (string input in inputs)
{
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
- PopulateFromDir(input, (quickScan ? Hash.SecureHashes : Hash.DeepHashes) /* omitFromScan */, true /* bare */, false /* archivesAsFiles */,
- SkipFileType.None, false /* addBlanks */, false /* addDate */, false /* copyFiles */, chdsAsFiles, filter);
+ PopulateFromDir(input, (quickScan ? Hash.SecureHashes : Hash.DeepHashes) /* omitFromScan */, true /* bare */, asFiles,
+ SkipFileType.None, false /* addBlanks */, false /* addDate */, false /* copyFiles */, filter);
}
// Setup the fixdat
diff --git a/SabreTools.Library/DatFiles/Enums.cs b/SabreTools.Library/DatFiles/Enums.cs
index 1f8f85a2..aa98ca9f 100644
--- a/SabreTools.Library/DatFiles/Enums.cs
+++ b/SabreTools.Library/DatFiles/Enums.cs
@@ -277,4 +277,14 @@ namespace SabreTools.Library.DatFiles
Split,
DeviceNonMerged
}
+
+ ///
+ /// Determines what sort of files get externally hashed
+ ///
+ [Flags]
+ public enum TreatAsFiles
+ {
+ CHDs = 1 << 0,
+ Archives = 1 << 1,
+ }
}
diff --git a/SabreTools/Features/BaseFeature.cs b/SabreTools/Features/BaseFeature.cs
index 76a0aad2..275b6d91 100644
--- a/SabreTools/Features/BaseFeature.cs
+++ b/SabreTools/Features/BaseFeature.cs
@@ -2476,6 +2476,20 @@ Some special strings that can be used:
return statDatFormat;
}
+ ///
+ /// Get TreatAsFiles from feature list
+ ///
+ protected TreatAsFiles GetTreatAsFiles(Dictionary features)
+ {
+ TreatAsFiles asFiles = 0x00;
+ if (GetBoolean(features, ArchivesAsFilesValue))
+ asFiles |= TreatAsFiles.Archives;
+ if (GetBoolean(features, ChdsAsFilesValue))
+ asFiles |= TreatAsFiles.CHDs;
+
+ return asFiles;
+ }
+
///
/// Get update fields from feature list
///
diff --git a/SabreTools/Features/DatFromDir.cs b/SabreTools/Features/DatFromDir.cs
index b99d7bb3..30bd97ec 100644
--- a/SabreTools/Features/DatFromDir.cs
+++ b/SabreTools/Features/DatFromDir.cs
@@ -56,8 +56,7 @@ namespace SabreTools.Features
// Get feature flags
bool addBlankFiles = GetBoolean(features, AddBlankFilesValue);
bool addFileDates = GetBoolean(features, AddDateValue);
- bool archivesAsFiles = GetBoolean(features, ArchivesAsFilesValue);
- bool chdsAsFiles = GetBoolean(features, ChdsAsFilesValue);
+ TreatAsFiles asFiles = GetTreatAsFiles(features);
bool copyFiles = GetBoolean(features, CopyFilesValue);
bool noAutomaticDate = GetBoolean(features, NoAutomaticDateValue);
var omitFromScan = GetOmitFromScan(features);
@@ -80,12 +79,11 @@ namespace SabreTools.Features
basePath,
omitFromScan,
noAutomaticDate,
- archivesAsFiles,
+ asFiles,
skipFileType,
addBlankFiles,
addFileDates,
copyFiles,
- chdsAsFiles,
Filter);
if (success)
diff --git a/SabreTools/Features/Sort.cs b/SabreTools/Features/Sort.cs
index 27b9a09e..fe094916 100644
--- a/SabreTools/Features/Sort.cs
+++ b/SabreTools/Features/Sort.cs
@@ -57,7 +57,7 @@ namespace SabreTools.Features
base.ProcessFeatures(features);
// Get feature flags
- bool chdsAsFiles = GetBoolean(features, ChdsAsFilesValue);
+ TreatAsFiles asFiles = GetTreatAsFiles(features);
bool date = GetBoolean(features, AddDateValue);
bool delete = GetBoolean(features, DeleteValue);
bool depot = GetBoolean(features, DepotValue);
@@ -95,7 +95,7 @@ namespace SabreTools.Features
if (depot)
datdata.RebuildDepot(Inputs, Path.Combine(OutputDir, datdata.Header.FileName), date, delete, inverse, outputFormat, updateDat);
else
- datdata.RebuildGeneric(Inputs, Path.Combine(OutputDir, datdata.Header.FileName), quickScan, date, delete, inverse, outputFormat, updateDat, chdsAsFiles);
+ datdata.RebuildGeneric(Inputs, Path.Combine(OutputDir, datdata.Header.FileName), quickScan, date, delete, inverse, outputFormat, updateDat, asFiles);
}
}
@@ -121,7 +121,7 @@ namespace SabreTools.Features
if (depot)
datdata.RebuildDepot(Inputs, OutputDir, date, delete, inverse, outputFormat, updateDat);
else
- datdata.RebuildGeneric(Inputs, OutputDir, quickScan, date, delete, inverse, outputFormat, updateDat, chdsAsFiles);
+ datdata.RebuildGeneric(Inputs, OutputDir, quickScan, date, delete, inverse, outputFormat, updateDat, asFiles);
}
}
}
diff --git a/SabreTools/Features/Verify.cs b/SabreTools/Features/Verify.cs
index 2cdd6863..f675c535 100644
--- a/SabreTools/Features/Verify.cs
+++ b/SabreTools/Features/Verify.cs
@@ -43,7 +43,7 @@ namespace SabreTools.Features
var datfilePaths = DirectoryExtensions.GetFilesOnly(datfiles);
// Get feature flags
- bool chdsAsFiles = GetBoolean(features, ChdsAsFilesValue);
+ TreatAsFiles asFiles = GetTreatAsFiles(features);
bool depot = GetBoolean(features, DepotValue);
bool hashOnly = GetBoolean(features, HashOnlyValue);
bool quickScan = GetBoolean(features, QuickValue);
@@ -65,7 +65,7 @@ namespace SabreTools.Features
if (depot)
datdata.VerifyDepot(Inputs, OutputDir);
else
- datdata.VerifyGeneric(Inputs, OutputDir, hashOnly, quickScan, chdsAsFiles, Filter);
+ datdata.VerifyGeneric(Inputs, OutputDir, hashOnly, quickScan, asFiles, Filter);
}
}
// Otherwise, process all DATs into the same output
@@ -91,7 +91,7 @@ namespace SabreTools.Features
if (depot)
datdata.VerifyDepot(Inputs, OutputDir);
else
- datdata.VerifyGeneric(Inputs, OutputDir, hashOnly, quickScan, chdsAsFiles, Filter);
+ datdata.VerifyGeneric(Inputs, OutputDir, hashOnly, quickScan, asFiles, Filter);
}
}
}