diff --git a/RombaSharp/Features/Archive.cs b/RombaSharp/Features/Archive.cs
index 010215c8..b0103811 100644
--- a/RombaSharp/Features/Archive.cs
+++ b/RombaSharp/Features/Archive.cs
@@ -62,11 +62,12 @@ have a current entry in the DAT index.";
}
// Then process all of the input directories into an internal DAT
+ DatTool dt = new DatTool();
DatFile df = DatFile.Create();
foreach (string dir in onlyDirs)
{
- df.PopulateFromDir(dir, asFiles: TreatAsFile.NonArchive);
- df.PopulateFromDir(dir, asFiles: TreatAsFile.All);
+ dt.PopulateFromDir(df, dir, asFiles: TreatAsFile.NonArchive);
+ dt.PopulateFromDir(df, dir, asFiles: TreatAsFile.All);
}
// Create an empty Dat for files that need to be rebuilt
diff --git a/RombaSharp/Features/Dir2Dat.cs b/RombaSharp/Features/Dir2Dat.cs
index 013ec8f9..4381cd54 100644
--- a/RombaSharp/Features/Dir2Dat.cs
+++ b/RombaSharp/Features/Dir2Dat.cs
@@ -50,10 +50,11 @@ namespace RombaSharp.Features
}
// Create and write the encapsulating datfile
+ DatTool dt = new DatTool();
DatFile datfile = DatFile.Create();
datfile.Header.Name = string.IsNullOrWhiteSpace(name) ? "untitled" : name;
datfile.Header.Description = description;
- datfile.PopulateFromDir(source, asFiles: TreatAsFile.NonArchive);
+ dt.PopulateFromDir(datfile, source, asFiles: TreatAsFile.NonArchive);
datfile.ApplyCleaning(new Cleaner() { ExcludeFields = Hash.DeepHashes.AsFields() });
datfile.Write(outdat);
}
diff --git a/RombaSharp/Features/RefreshDats.cs b/RombaSharp/Features/RefreshDats.cs
index d633cb4b..f53bc180 100644
--- a/RombaSharp/Features/RefreshDats.cs
+++ b/RombaSharp/Features/RefreshDats.cs
@@ -60,9 +60,10 @@ contents of any changed dats.";
Directory.CreateDirectory(_dats);
// First get a list of SHA-1's from the input DATs
+ DatTool dt = new DatTool();
DatFile datroot = DatFile.Create();
datroot.Header.Type = "SuperDAT";
- datroot.PopulateFromDir(_dats, asFiles: TreatAsFile.NonArchive);
+ dt.PopulateFromDir(datroot, _dats, asFiles: TreatAsFile.NonArchive);
datroot.Items.BucketBy(Field.DatItem_SHA1, DedupeType.None);
// Create a List of dat hashes in the database (SHA-1)
diff --git a/RombaSharp/Features/RescanDepots.cs b/RombaSharp/Features/RescanDepots.cs
index 67312bb9..41f01c6f 100644
--- a/RombaSharp/Features/RescanDepots.cs
+++ b/RombaSharp/Features/RescanDepots.cs
@@ -63,8 +63,9 @@ namespace RombaSharp.Features
}
// Now rescan the depot itself
+ DatTool dt = new DatTool();
DatFile depot = DatFile.Create();
- depot.PopulateFromDir(depotname, asFiles: TreatAsFile.NonArchive);
+ dt.PopulateFromDir(depot, depotname, asFiles: TreatAsFile.NonArchive);
depot.Items.BucketBy(Field.DatItem_SHA1, DedupeType.None);
// Set the base queries to use
diff --git a/SabreTools.DatFiles/DatFile.DFD.cs b/SabreTools.DatFiles/DatTool.DFD.cs
similarity index 82%
rename from SabreTools.DatFiles/DatFile.DFD.cs
rename to SabreTools.DatFiles/DatTool.DFD.cs
index 734945a0..4b3c8b36 100644
--- a/SabreTools.DatFiles/DatFile.DFD.cs
+++ b/SabreTools.DatFiles/DatTool.DFD.cs
@@ -9,22 +9,25 @@ using SabreTools.DatItems;
using SabreTools.FileTypes;
using SabreTools.IO;
-// This file represents all methods related to creating a DatFile
+// This file represents all methods related to populating a DatFile
// from a set of files and directories
namespace SabreTools.DatFiles
{
// TODO: See if any of the methods can be broken up a bit more neatly
- public abstract partial class DatFile
+ // TODO: See if any of this can be more stateful given the inputted DatFile
+ public partial class DatTool
{
///
/// Create a new Dat from a directory
///
+ /// Current DatFile object to add to
/// Base folder to be used in creating the DAT
/// TreatAsFiles representing CHD and Archive scanning
/// Type of files that should be skipped
/// True if blank items should be created for empty folders, false otherwise
/// Hashes to include in the information
public bool PopulateFromDir(
+ DatFile datFile,
string basePath,
TreatAsFile asFiles = 0x00,
SkipFileType skipFileType = SkipFileType.None,
@@ -56,14 +59,14 @@ namespace SabreTools.DatFiles
logger.User(totalSize, currentSize);
foreach (string item in files)
{
- CheckFileForHashes(item, basePath, asFiles, skipFileType, addBlanks, hashes);
+ CheckFileForHashes(datFile, item, basePath, asFiles, skipFileType, addBlanks, hashes);
currentSize += new FileInfo(item).Length;
logger.User(totalSize, currentSize, item);
}
// Now find all folders that are empty, if we are supposed to
if (addBlanks)
- ProcessDirectoryBlanks(basePath);
+ ProcessDirectoryBlanks(datFile, basePath);
}
else if (File.Exists(basePath))
{
@@ -73,7 +76,7 @@ namespace SabreTools.DatFiles
logger.User(totalSize, currentSize);
string parentPath = Path.GetDirectoryName(Path.GetDirectoryName(basePath));
- CheckFileForHashes(basePath, parentPath, asFiles, skipFileType, addBlanks, hashes);
+ CheckFileForHashes(datFile, basePath, parentPath, asFiles, skipFileType, addBlanks, hashes);
logger.User(totalSize, totalSize, basePath);
}
@@ -91,16 +94,24 @@ namespace SabreTools.DatFiles
///
/// Check a given file for hashes, based on current settings
///
+ /// Current DatFile object to add to
/// Filename of the item to be checked
/// Base folder to be used in creating the DAT
/// TreatAsFiles representing CHD and Archive scanning
/// Type of files that should be skipped
/// True if blank items should be created for empty folders, false otherwise
/// Hashes to include in the information
- private void CheckFileForHashes(string item, string basePath, TreatAsFile asFiles, SkipFileType skipFileType, bool addBlanks, Hash hashes)
+ private void CheckFileForHashes(
+ DatFile datFile,
+ string item,
+ string basePath,
+ TreatAsFile asFiles,
+ SkipFileType skipFileType,
+ bool addBlanks,
+ Hash hashes)
{
// If we're in depot mode, process it separately
- if (CheckDepotFile(item))
+ if (CheckDepotFile(datFile, item))
return;
// Initialize possible archive variables
@@ -131,17 +142,17 @@ namespace SabreTools.DatFiles
// If we have internal items to process, do so
if (extracted != null)
- ProcessArchive(item, basePath, extracted);
+ ProcessArchive(datFile, item, basePath, extracted);
// Now find all folders that are empty, if we are supposed to
if (addBlanks)
- ProcessArchiveBlanks(item, basePath, archive);
+ ProcessArchiveBlanks(datFile, item, basePath, archive);
}
// Process as file if we're treating archives as files
else
{
- ProcessFile(item, basePath, hashes, asFiles);
+ ProcessFile(datFile, item, basePath, hashes, asFiles);
}
}
@@ -154,19 +165,20 @@ namespace SabreTools.DatFiles
// Process as file
else
- ProcessFile(item, basePath, hashes, asFiles);
+ ProcessFile(datFile, item, basePath, hashes, asFiles);
}
}
///
/// Check an item as if it's supposed to be in a depot
///
+ /// Current DatFile object to add to
/// Filename of the item to be checked
/// True if we checked a depot file, false otherwise
- private bool CheckDepotFile(string item)
+ private bool CheckDepotFile(DatFile datFile, string item)
{
// If we're not in Depot mode, return false
- if (Header.OutputDepot?.IsActive != true)
+ if (datFile.Header.OutputDepot?.IsActive != true)
return false;
// Check the file as if it were in a depot
@@ -178,7 +190,7 @@ namespace SabreTools.DatFiles
{
// Add the list if it doesn't exist already
Rom rom = new Rom(baseFile);
- Items.Add(rom.GetKey(Field.DatItem_CRC), rom);
+ datFile.Items.Add(rom.GetKey(Field.DatItem_CRC), rom);
logger.Verbose($"File added: {Path.GetFileNameWithoutExtension(item)}");
}
else
@@ -193,10 +205,11 @@ namespace SabreTools.DatFiles
///
/// Process a single file as an archive
///
+ /// Current DatFile object to add to
/// File to be added
/// Path the represents the parent directory
/// List of BaseFiles representing the internal files
- private void ProcessArchive(string item, string basePath, List extracted)
+ private void ProcessArchive(DatFile datFile, string item, string basePath, List extracted)
{
// Get the parent path for all items
string parent = (Path.GetDirectoryName(Path.GetFullPath(item)) + Path.DirectorySeparatorChar).Remove(0, basePath.Length) + Path.GetFileNameWithoutExtension(item);
@@ -205,17 +218,18 @@ namespace SabreTools.DatFiles
Parallel.ForEach(extracted, Globals.ParallelOptions, baseFile =>
{
DatItem datItem = DatItem.Create(baseFile);
- ProcessFileHelper(item, datItem, basePath, parent);
+ ProcessFileHelper(datFile, item, datItem, basePath, parent);
});
}
///
/// Process blank folders in an archive
///
+ /// Current DatFile object to add to
/// File containing the blanks
/// Path the represents the parent directory
/// BaseArchive to get blanks from
- private void ProcessArchiveBlanks(string item, string basePath, BaseArchive archive)
+ private void ProcessArchiveBlanks(DatFile datFile, string item, string basePath, BaseArchive archive)
{
List empties = new List();
@@ -230,18 +244,19 @@ namespace SabreTools.DatFiles
Parallel.ForEach(empties, Globals.ParallelOptions, empty =>
{
Rom emptyRom = new Rom(Path.Combine(empty, "_"), item);
- ProcessFileHelper(item, emptyRom, basePath, parent);
+ ProcessFileHelper(datFile, item, emptyRom, basePath, parent);
});
}
///
/// Process blank folders in a directory
///
+ /// Current DatFile object to add to
/// Path the represents the parent directory
- private void ProcessDirectoryBlanks(string basePath)
+ private void ProcessDirectoryBlanks(DatFile datFile, string basePath)
{
// If we're in depot mode, we don't process blanks
- if (Header.OutputDepot?.IsActive == true)
+ if (datFile.Header.OutputDepot?.IsActive == true)
return;
List empties = DirectoryExtensions.ListEmpty(basePath);
@@ -255,7 +270,7 @@ namespace SabreTools.DatFiles
string romname = string.Empty;
// If we have a SuperDAT, we want anything that's not the base path as the game, and the file as the rom
- if (Header.Type == "SuperDAT")
+ if (datFile.Header.Type == "SuperDAT")
{
gamename = fulldir.Remove(0, basePath.Length + 1);
romname = "_";
@@ -273,33 +288,35 @@ namespace SabreTools.DatFiles
romname = romname.Trim(Path.DirectorySeparatorChar);
logger.Verbose($"Adding blank empty folder: {gamename}");
- Items["null"].Add(new Rom(romname, gamename));
+ datFile.Items["null"].Add(new Rom(romname, gamename));
});
}
///
/// Process a single file as a file
///
+ /// Current DatFile object to add to
/// File to be added
/// Path the represents the parent directory
/// Hashes to include in the information
/// TreatAsFiles representing CHD and Archive scanning
- private void ProcessFile(string item, string basePath, Hash hashes, TreatAsFile asFiles)
+ private void ProcessFile(DatFile datFile, string item, string basePath, Hash hashes, TreatAsFile asFiles)
{
logger.Verbose($"'{Path.GetFileName(item)}' treated like a file");
- BaseFile baseFile = BaseFile.GetInfo(item, header: Header.HeaderSkipper, hashes: hashes, asFiles: asFiles);
+ BaseFile baseFile = BaseFile.GetInfo(item, header: datFile.Header.HeaderSkipper, hashes: hashes, asFiles: asFiles);
DatItem datItem = DatItem.Create(baseFile);
- ProcessFileHelper(item, datItem, basePath, string.Empty);
+ ProcessFileHelper(datFile, item, datItem, basePath, string.Empty);
}
///
/// Process a single file as a file (with found Rom data)
///
+ /// Current DatFile object to add to
/// File to be added
/// Rom data to be used to write to file
/// Path the represents the parent directory
/// Parent game to be used
- private void ProcessFileHelper(string item, DatItem datItem, string basepath, string parent)
+ private void ProcessFileHelper(DatFile datFile, string item, DatItem datItem, string basepath, string parent)
{
// If we didn't get an accepted parsed type somehow, cancel out
List parsed = new List { ItemType.Disk, ItemType.Media, ItemType.Rom };
@@ -316,11 +333,11 @@ namespace SabreTools.DatFiles
item = Path.GetFullPath(item);
// Process the item to sanitize names based on input
- SetDatItemInfo(datItem, item, parent, basepath);
+ SetDatItemInfo(datFile, datItem, item, parent, basepath);
// Add the file information to the DAT
string key = datItem.GetKey(Field.DatItem_CRC);
- Items.Add(key, datItem);
+ datFile.Items.Add(key, datItem);
logger.Verbose($"File added: {datItem.GetName() ?? string.Empty}");
}
@@ -334,11 +351,12 @@ namespace SabreTools.DatFiles
///
/// Set proper Game and Rom names from user inputs
///
+ /// Current DatFile object to add to
/// DatItem representing the input file
/// Item name to use
/// Parent name to use
/// Base path to use
- private void SetDatItemInfo(DatItem datItem, string item, string parent, string basepath)
+ private void SetDatItemInfo(DatFile datFile, DatItem datItem, string item, string parent, string basepath)
{
// Get the data to be added as game and item names
string machineName, itemName;
@@ -347,7 +365,7 @@ namespace SabreTools.DatFiles
if (string.IsNullOrWhiteSpace(parent))
{
// If we have a SuperDAT, we want anything that's not the base path as the game, and the file as the rom
- if (Header.Type == "SuperDAT")
+ if (datFile.Header.Type == "SuperDAT")
{
machineName = Path.GetDirectoryName(item.Remove(0, basepath.Length));
itemName = Path.GetFileName(item);
@@ -365,7 +383,7 @@ namespace SabreTools.DatFiles
else
{
// If we have a SuperDAT, we want the archive name as the game, and the file as everything else (?)
- if (Header.Type == "SuperDAT")
+ if (datFile.Header.Type == "SuperDAT")
{
machineName = parent;
itemName = datItem.GetName();
diff --git a/SabreTools.DatFiles/DatTool.cs b/SabreTools.DatFiles/DatTool.cs
new file mode 100644
index 00000000..73a884f1
--- /dev/null
+++ b/SabreTools.DatFiles/DatTool.cs
@@ -0,0 +1,22 @@
+using SabreTools.Logging;
+
+// TODO: What sort of internal state should this have? Would a single DatFile be appropriate?
+// TODO: How much of the stuff currently in DatFile should be moved here?
+// TODO: Can things like CreateAndParse be moved here?
+namespace SabreTools.DatFiles
+{
+ ///
+ /// Represents a format-agnostic DAT
+ ///
+ public partial class DatTool
+ {
+ #region Logging
+
+ ///
+ /// Logging object
+ ///
+ protected Logger logger;
+
+ #endregion
+ }
+}
diff --git a/SabreTools/Features/Batch.cs b/SabreTools/Features/Batch.cs
index 2a65798b..ca9b4acc 100644
--- a/SabreTools/Features/Batch.cs
+++ b/SabreTools/Features/Batch.cs
@@ -67,6 +67,7 @@ Reset the internal state: reset();";
// Each batch file has its own state
int index = 0;
+ DatTool dt = new DatTool();
DatFile datFile = DatFile.Create();
string outputDirectory = null;
@@ -153,7 +154,7 @@ Reset the internal state: reset();";
// Assume there could be multiple
foreach (string input in command.Arguments)
{
- datFile.PopulateFromDir(input);
+ dt.PopulateFromDir(datFile, input);
}
// TODO: We might not want to remove higher order hashes in the future
diff --git a/SabreTools/Features/DatFromDir.cs b/SabreTools/Features/DatFromDir.cs
index 58cf903f..4856aacb 100644
--- a/SabreTools/Features/DatFromDir.cs
+++ b/SabreTools/Features/DatFromDir.cs
@@ -73,6 +73,7 @@ namespace SabreTools.Features
Cleaner.ExcludeFields.Add(Field.DatItem_Date);
// Create a new DATFromDir object and process the inputs
+ DatTool dt = new DatTool();
DatFile basedat = DatFile.Create(Header);
basedat.Header.Date = DateTime.Now.ToString("yyyy-MM-dd");
@@ -89,7 +90,8 @@ namespace SabreTools.Features
datdata.FillHeaderFromPath(basePath, noAutomaticDate);
// Now populate from the path
- bool success = datdata.PopulateFromDir(
+ bool success = dt.PopulateFromDir(
+ datdata,
basePath,
asFiles,
skipFileType,
diff --git a/SabreTools/Features/Verify.cs b/SabreTools/Features/Verify.cs
index 379eb7fe..dd0ed8e1 100644
--- a/SabreTools/Features/Verify.cs
+++ b/SabreTools/Features/Verify.cs
@@ -51,6 +51,9 @@ namespace SabreTools.Features
bool quickScan = GetBoolean(features, QuickValue);
var splitType = GetSplitType(features);
+ // Get the DatTool for required operations
+ DatTool dt = new DatTool();
+
// If we are in individual mode, process each DAT on their own
if (GetBoolean(features, IndividualValue))
{
@@ -84,7 +87,7 @@ namespace SabreTools.Features
logger.User("Processing files:\n");
foreach (string input in Inputs)
{
- datdata.PopulateFromDir(input, asFiles: asFiles, hashes: quickScan ? Hash.CRC : Hash.Standard);
+ dt.PopulateFromDir(datdata, input, asFiles: asFiles, hashes: quickScan ? Hash.CRC : Hash.Standard);
}
datdata.VerifyGeneric(hashOnly);
@@ -133,7 +136,7 @@ namespace SabreTools.Features
logger.User("Processing files:\n");
foreach (string input in Inputs)
{
- datdata.PopulateFromDir(input, asFiles: asFiles, hashes: quickScan ? Hash.CRC : Hash.Standard);
+ dt.PopulateFromDir(datdata, input, asFiles: asFiles, hashes: quickScan ? Hash.CRC : Hash.Standard);
}
datdata.VerifyGeneric(hashOnly);