diff --git a/RombaSharp/Features/Archive.cs b/RombaSharp/Features/Archive.cs index 25cae136..c81dc061 100644 --- a/RombaSharp/Features/Archive.cs +++ b/RombaSharp/Features/Archive.cs @@ -62,12 +62,11 @@ have a current entry in the DAT index."; } // Then process all of the input directories into an internal DAT - DatTool dt = new DatTool(); DatFile df = DatFile.Create(); foreach (string dir in onlyDirs) { - dt.PopulateFromDir(df, dir, asFiles: TreatAsFile.NonArchive); - dt.PopulateFromDir(df, dir, asFiles: TreatAsFile.All); + DatTool.PopulateFromDir(df, dir, asFiles: TreatAsFile.NonArchive); + DatTool.PopulateFromDir(df, dir, asFiles: TreatAsFile.All); } // Create an empty Dat for files that need to be rebuilt @@ -190,7 +189,7 @@ have a current entry in the DAT index."; } // Create the sorting object to use and rebuild the needed files - dt.RebuildGeneric( + DatTool.RebuildGeneric( need, onlyDirs, outDir: _depots.Keys.ToList()[0], diff --git a/RombaSharp/Features/BaseFeature.cs b/RombaSharp/Features/BaseFeature.cs index 0cff733f..5c3e6c58 100644 --- a/RombaSharp/Features/BaseFeature.cs +++ b/RombaSharp/Features/BaseFeature.cs @@ -705,8 +705,7 @@ CREATE TABLE IF NOT EXISTS dat ( // Parse the Dat if possible logger.User($"Adding from '{dat.Name}'"); - DatTool dt = new DatTool(); - DatFile tempdat = dt.CreateAndParse(fullpath); + DatFile tempdat = DatTool.CreateAndParse(fullpath); // If the Dat wasn't empty, add the information SqliteCommand slc = null; diff --git a/RombaSharp/Features/Build.cs b/RombaSharp/Features/Build.cs index 8b5c0eea..de040bc4 100644 --- a/RombaSharp/Features/Build.cs +++ b/RombaSharp/Features/Build.cs @@ -46,14 +46,11 @@ structure according to the original DAT master directory tree structure."; if (string.IsNullOrWhiteSpace(outdat)) outdat = "out"; - // Get the DatTool for operations - DatTool dt = new DatTool(); - // Now that we have the dictionary, we can loop through and output to a new folder for each foreach (string key in foundDats.Keys) { // Get the DAT file associated with the key - DatFile datFile = dt.CreateAndParse(Path.Combine(_dats, foundDats[key])); + DatFile datFile = DatTool.CreateAndParse(Path.Combine(_dats, foundDats[key])); // Set the depot values datFile.Header.InputDepot = new DepotInformation(true, 4); @@ -67,11 +64,11 @@ structure according to the original DAT master directory tree structure."; List onlineDepots = _depots.Where(d => d.Value.Item2).Select(d => d.Key).ToList(); // Now scan all of those depots and rebuild - dt.RebuildDepot( + DatTool.RebuildDepot( datFile, onlineDepots, outDir: outputFolder, - outputFormat: (copy ? OutputFormat.TorrentGzipRomba : OutputFormat.TorrentZip)); + outputFormat: copy ? OutputFormat.TorrentGzipRomba : OutputFormat.TorrentZip); } } } diff --git a/RombaSharp/Features/Diffdat.cs b/RombaSharp/Features/Diffdat.cs index 00a36594..88c119a9 100644 --- a/RombaSharp/Features/Diffdat.cs +++ b/RombaSharp/Features/Diffdat.cs @@ -55,19 +55,16 @@ in -old DAT file. Ignores those entries in -old that are not in -new."; return; } - // Get the DatTool for opeations - DatTool dt = new DatTool(); - // Create the encapsulating datfile DatFile datfile = DatFile.Create(); datfile.Header.Name = name; datfile.Header.Description = description; - dt.ParseInto(datfile, olddat); + DatTool.ParseInto(datfile, olddat); // Diff against the new datfile - DatFile intDat = dt.CreateAndParse(newdat); + DatFile intDat = DatTool.CreateAndParse(newdat); datfile.DiffAgainst(intDat, false); - dt.Write(intDat, outdat); + DatTool.Write(intDat, outdat); } } } diff --git a/RombaSharp/Features/Dir2Dat.cs b/RombaSharp/Features/Dir2Dat.cs index 76f21b3c..ee9e0d03 100644 --- a/RombaSharp/Features/Dir2Dat.cs +++ b/RombaSharp/Features/Dir2Dat.cs @@ -50,13 +50,12 @@ namespace RombaSharp.Features } // Create and write the encapsulating datfile - DatTool dt = new DatTool(); DatFile datfile = DatFile.Create(); datfile.Header.Name = string.IsNullOrWhiteSpace(name) ? "untitled" : name; datfile.Header.Description = description; - dt.PopulateFromDir(datfile, source, asFiles: TreatAsFile.NonArchive); + DatTool.PopulateFromDir(datfile, source, asFiles: TreatAsFile.NonArchive); datfile.ApplyCleaning(new Cleaner() { ExcludeFields = Hash.DeepHashes.AsFields() }); - dt.Write(datfile, outdat); + DatTool.Write(datfile, outdat); } } } diff --git a/RombaSharp/Features/EDiffdat.cs b/RombaSharp/Features/EDiffdat.cs index c4db2976..4e4e5e1e 100644 --- a/RombaSharp/Features/EDiffdat.cs +++ b/RombaSharp/Features/EDiffdat.cs @@ -50,16 +50,13 @@ namespace RombaSharp.Features return; } - // Get the DatTool for parsing - DatTool dt = new DatTool(); - // Create the encapsulating datfile - DatFile datfile = dt.CreateAndParse(olddat); + DatFile datfile = DatTool.CreateAndParse(olddat); // Diff against the new datfile - DatFile intDat = dt.CreateAndParse(newdat); + DatFile intDat = DatTool.CreateAndParse(newdat); datfile.DiffAgainst(intDat, false); - dt.Write(intDat, outdat); + DatTool.Write(intDat, outdat); } } } diff --git a/RombaSharp/Features/Miss.cs b/RombaSharp/Features/Miss.cs index 31153c60..3591dc9d 100644 --- a/RombaSharp/Features/Miss.cs +++ b/RombaSharp/Features/Miss.cs @@ -33,14 +33,11 @@ namespace RombaSharp.Features // Create the new output directory if it doesn't exist DirectoryExtensions.Ensure(Path.Combine(Globals.ExeDir, "out"), create: true); - // Get the DatTool for parsing - DatTool dt = new DatTool(); - // Now that we have the dictionary, we can loop through and output to a new folder for each foreach (string key in foundDats.Keys) { // Get the DAT file associated with the key - DatFile datFile = dt.CreateAndParse(Path.Combine(_dats, foundDats[key])); + DatFile datFile = DatTool.CreateAndParse(Path.Combine(_dats, foundDats[key])); // Now loop through and see if all of the hash combinations exist in the database /* ended here */ diff --git a/RombaSharp/Features/RefreshDats.cs b/RombaSharp/Features/RefreshDats.cs index f53bc180..48cd454d 100644 --- a/RombaSharp/Features/RefreshDats.cs +++ b/RombaSharp/Features/RefreshDats.cs @@ -60,10 +60,9 @@ contents of any changed dats."; Directory.CreateDirectory(_dats); // First get a list of SHA-1's from the input DATs - DatTool dt = new DatTool(); DatFile datroot = DatFile.Create(); datroot.Header.Type = "SuperDAT"; - dt.PopulateFromDir(datroot, _dats, asFiles: TreatAsFile.NonArchive); + DatTool.PopulateFromDir(datroot, _dats, asFiles: TreatAsFile.NonArchive); datroot.Items.BucketBy(Field.DatItem_SHA1, DedupeType.None); // Create a List of dat hashes in the database (SHA-1) diff --git a/RombaSharp/Features/RescanDepots.cs b/RombaSharp/Features/RescanDepots.cs index 41f01c6f..9064ff56 100644 --- a/RombaSharp/Features/RescanDepots.cs +++ b/RombaSharp/Features/RescanDepots.cs @@ -63,9 +63,8 @@ namespace RombaSharp.Features } // Now rescan the depot itself - DatTool dt = new DatTool(); DatFile depot = DatFile.Create(); - dt.PopulateFromDir(depot, depotname, asFiles: TreatAsFile.NonArchive); + DatTool.PopulateFromDir(depot, depotname, asFiles: TreatAsFile.NonArchive); depot.Items.BucketBy(Field.DatItem_SHA1, DedupeType.None); // Set the base queries to use diff --git a/SabreTools.DatFiles/DatFile.Updating.cs b/SabreTools.DatFiles/DatFile.Updating.cs index b8863c2c..491ad10c 100644 --- a/SabreTools.DatFiles/DatFile.Updating.cs +++ b/SabreTools.DatFiles/DatFile.Updating.cs @@ -429,7 +429,6 @@ namespace SabreTools.DatFiles /// List of DatHeader objects representing headers public List PopulateUserData(List inputs) { - DatTool dt = new DatTool(); DatFile[] datFiles = new DatFile[inputs.Count]; InternalStopwatch watch = new InternalStopwatch("Processing individual DATs"); @@ -439,7 +438,7 @@ namespace SabreTools.DatFiles var input = inputs[i]; logger.User($"Adding DAT: {input.CurrentPath}"); datFiles[i] = Create(Header.CloneFiltering()); - dt.ParseInto(datFiles[i], input, i, keep: true); + DatTool.ParseInto(datFiles[i], input, i, keep: true); }); watch.Stop(); diff --git a/SabreTools.DatFiles/DatTool.DFD.cs b/SabreTools.DatFiles/DatTool.DFD.cs index 01c08e4a..d114f22a 100644 --- a/SabreTools.DatFiles/DatTool.DFD.cs +++ b/SabreTools.DatFiles/DatTool.DFD.cs @@ -15,7 +15,6 @@ namespace SabreTools.DatFiles { // TODO: See if any of the methods can be broken up a bit more neatly // TODO: See if any of this can be more stateful given the inputted DatFile - // TODO: Re-evaluate if these should be made static instead of instanced public partial class DatTool { /// @@ -27,7 +26,7 @@ namespace SabreTools.DatFiles /// Type of files that should be skipped /// True if blank items should be created for empty folders, false otherwise /// Hashes to include in the information - public bool PopulateFromDir( + public static bool PopulateFromDir( DatFile datFile, string basePath, TreatAsFile asFiles = 0x00, @@ -102,7 +101,7 @@ namespace SabreTools.DatFiles /// Type of files that should be skipped /// True if blank items should be created for empty folders, false otherwise /// Hashes to include in the information - private void CheckFileForHashes( + private static void CheckFileForHashes( DatFile datFile, string item, string basePath, @@ -176,7 +175,7 @@ namespace SabreTools.DatFiles /// Current DatFile object to add to /// Filename of the item to be checked /// True if we checked a depot file, false otherwise - private bool CheckDepotFile(DatFile datFile, string item) + private static bool CheckDepotFile(DatFile datFile, string item) { // If we're not in Depot mode, return false if (datFile.Header.OutputDepot?.IsActive != true) @@ -210,7 +209,7 @@ namespace SabreTools.DatFiles /// File to be added /// Path the represents the parent directory /// List of BaseFiles representing the internal files - private void ProcessArchive(DatFile datFile, string item, string basePath, List extracted) + private static void ProcessArchive(DatFile datFile, string item, string basePath, List extracted) { // Get the parent path for all items string parent = (Path.GetDirectoryName(Path.GetFullPath(item)) + Path.DirectorySeparatorChar).Remove(0, basePath.Length) + Path.GetFileNameWithoutExtension(item); @@ -230,7 +229,7 @@ namespace SabreTools.DatFiles /// File containing the blanks /// Path the represents the parent directory /// BaseArchive to get blanks from - private void ProcessArchiveBlanks(DatFile datFile, string item, string basePath, BaseArchive archive) + private static void ProcessArchiveBlanks(DatFile datFile, string item, string basePath, BaseArchive archive) { List empties = new List(); @@ -254,7 +253,7 @@ namespace SabreTools.DatFiles /// /// Current DatFile object to add to /// Path the represents the parent directory - private void ProcessDirectoryBlanks(DatFile datFile, string basePath) + private static void ProcessDirectoryBlanks(DatFile datFile, string basePath) { // If we're in depot mode, we don't process blanks if (datFile.Header.OutputDepot?.IsActive == true) @@ -301,7 +300,7 @@ namespace SabreTools.DatFiles /// Path the represents the parent directory /// Hashes to include in the information /// TreatAsFiles representing CHD and Archive scanning - private void ProcessFile(DatFile datFile, string item, string basePath, Hash hashes, TreatAsFile asFiles) + private static void ProcessFile(DatFile datFile, string item, string basePath, Hash hashes, TreatAsFile asFiles) { logger.Verbose($"'{Path.GetFileName(item)}' treated like a file"); BaseFile baseFile = BaseFile.GetInfo(item, header: datFile.Header.HeaderSkipper, hashes: hashes, asFiles: asFiles); @@ -317,7 +316,7 @@ namespace SabreTools.DatFiles /// Rom data to be used to write to file /// Path the represents the parent directory /// Parent game to be used - private void ProcessFileHelper(DatFile datFile, string item, DatItem datItem, string basepath, string parent) + private static void ProcessFileHelper(DatFile datFile, string item, DatItem datItem, string basepath, string parent) { // If we didn't get an accepted parsed type somehow, cancel out List parsed = new List { ItemType.Disk, ItemType.Media, ItemType.Rom }; @@ -357,7 +356,7 @@ namespace SabreTools.DatFiles /// Item name to use /// Parent name to use /// Base path to use - private void SetDatItemInfo(DatFile datFile, DatItem datItem, string item, string parent, string basepath) + private static void SetDatItemInfo(DatFile datFile, DatItem datItem, string item, string parent, string basepath) { // Get the data to be added as game and item names string machineName, itemName; diff --git a/SabreTools.DatFiles/DatTool.Parsing.cs b/SabreTools.DatFiles/DatTool.Parsing.cs index 5eb9d509..dab148f5 100644 --- a/SabreTools.DatFiles/DatTool.Parsing.cs +++ b/SabreTools.DatFiles/DatTool.Parsing.cs @@ -8,7 +8,6 @@ using SabreTools.IO; // This file represents all methods related to parsing from a file namespace SabreTools.DatFiles { - // TODO: Re-evaluate if these should be made static instead of instanced public partial class DatTool { /// @@ -16,7 +15,7 @@ namespace SabreTools.DatFiles /// /// Name of the file to be parsed /// True if the error that is thrown should be thrown back to the caller, false otherwise - public DatFile CreateAndParse(string filename, bool throwOnError = false) + public static DatFile CreateAndParse(string filename, bool throwOnError = false) { DatFile datFile = DatFile.Create(); ParseInto(datFile, new ParentablePath(filename), throwOnError: throwOnError); @@ -33,7 +32,7 @@ namespace SabreTools.DatFiles /// True if original extension should be kept, false otherwise (default) /// True if quotes are assumed in supported types (default), false otherwise /// True if the error that is thrown should be thrown back to the caller, false otherwise - public void ParseInto( + public static void ParseInto( DatFile datFile, string filename, int indexId = 0, @@ -56,7 +55,7 @@ namespace SabreTools.DatFiles /// True if original extension should be kept, false otherwise (default) /// True if quotes are assumed in supported types (default), false otherwise /// True if the error that is thrown should be thrown back to the caller, false otherwise - public void ParseInto( + public static void ParseInto( DatFile datFile, ParentablePath input, int indexId = 0, @@ -101,7 +100,7 @@ namespace SabreTools.DatFiles /// /// Name of the file to be parsed /// The DatFormat corresponding to the DAT - private DatFormat GetDatFormat(string filename) + private static DatFormat GetDatFormat(string filename) { // Limit the output formats based on extension if (!PathExtensions.HasValidDatExtension(filename)) diff --git a/SabreTools.DatFiles/DatTool.Rebuilding.cs b/SabreTools.DatFiles/DatTool.Rebuilding.cs index e54a801b..cef87bce 100644 --- a/SabreTools.DatFiles/DatTool.Rebuilding.cs +++ b/SabreTools.DatFiles/DatTool.Rebuilding.cs @@ -13,7 +13,6 @@ using SabreTools.Skippers; // This file represents all methods related to rebuilding from a DatFile namespace SabreTools.DatFiles { - // TODO: Re-evaluate if these should be made static instead of instanced public partial class DatTool { /// @@ -27,7 +26,7 @@ namespace SabreTools.DatFiles /// True if the DAT should be used as a filter instead of a template, false otherwise /// Output format that files should be written to /// True if rebuilding was a success, false otherwise - public bool RebuildDepot( + public static bool RebuildDepot( DatFile datFile, List inputs, string outDir, @@ -163,7 +162,7 @@ namespace SabreTools.DatFiles /// Output format that files should be written to /// TreatAsFiles representing special format scanning /// True if rebuilding was a success, false otherwise - public bool RebuildGeneric( + public static bool RebuildGeneric( DatFile datFile, List inputs, string outDir, @@ -255,7 +254,7 @@ namespace SabreTools.DatFiles /// Output format that files should be written to /// TreatAsFiles representing special format scanning /// True if the file was used to rebuild, false otherwise - private bool RebuildGenericHelper( + private static bool RebuildGenericHelper( DatFile datFile, string file, string outDir, @@ -331,7 +330,7 @@ namespace SabreTools.DatFiles /// Output format that files should be written to /// True if the input file is an archive, false if the file is TGZ/TXZ, null otherwise /// True if the file was able to be rebuilt, false otherwise - private bool RebuildIndividualFile( + private static bool RebuildIndividualFile( DatFile datFile, DatItem datItem, string file, @@ -470,7 +469,7 @@ namespace SabreTools.DatFiles /// True if the DAT should be used as a filter instead of a template, false otherwise /// Output list of duplicate items to rebuild to /// True if the item should be rebuilt, false otherwise - private bool ShouldRebuild(DatFile datFile, DatItem datItem, Stream stream, bool inverse, out List dupes) + private static bool ShouldRebuild(DatFile datFile, DatItem datItem, Stream stream, bool inverse, out List dupes) { // Find if the file has duplicates in the DAT dupes = datFile.Items.GetDuplicates(datItem); @@ -526,7 +525,7 @@ namespace SabreTools.DatFiles /// Output format that files should be written to /// True if the input file is an archive, false if the file is TGZ, null otherwise /// True if rebuilt properly, false otherwise - private bool RebuildTorrentGzip(DatFile datFile, DatItem datItem, string file, string outDir, OutputFormat outputFormat, bool? isZip) + private static bool RebuildTorrentGzip(DatFile datFile, DatItem datItem, string file, string outDir, OutputFormat outputFormat, bool? isZip) { // If we have a very specific TGZ->TGZ case, just copy it accordingly GZipArchive tgz = new GZipArchive(file); @@ -570,7 +569,7 @@ namespace SabreTools.DatFiles /// Output format that files should be written to /// True if the input file is an archive, false if the file is TXZ, null otherwise /// True if rebuilt properly, false otherwise - private bool RebuildTorrentXz(DatFile datFile, DatItem datItem, string file, string outDir, OutputFormat outputFormat, bool? isZip) + private static bool RebuildTorrentXz(DatFile datFile, DatItem datItem, string file, string outDir, OutputFormat outputFormat, bool? isZip) { // If we have a very specific TGZ->TGZ case, just copy it accordingly XZArchive txz = new XZArchive(file); @@ -612,7 +611,7 @@ namespace SabreTools.DatFiles /// Non-null if the input file is an archive /// Output stream representing the opened file /// True if the stream opening succeeded, false otherwise - private bool GetFileStream(DatItem datItem, string file, bool? isZip, out Stream stream) + private static bool GetFileStream(DatItem datItem, string file, bool? isZip, out Stream stream) { // Get a generic stream for the file stream = null; @@ -644,7 +643,7 @@ namespace SabreTools.DatFiles /// /// Get the default OutputFormat associated with each PackingFlag /// - private OutputFormat GetOutputFormat(PackingFlag packing) + private static OutputFormat GetOutputFormat(PackingFlag packing) { #if NET_FRAMEWORK switch (packing) @@ -680,7 +679,7 @@ namespace SabreTools.DatFiles /// True if the date from the DAT should be used if available, false otherwise /// Output format that files should be written to /// Folder configured with proper flags - private Folder GetPreconfiguredFolder(DatFile datFile, bool date, OutputFormat outputFormat) + private static Folder GetPreconfiguredFolder(DatFile datFile, bool date, OutputFormat outputFormat) { Folder outputArchive = Folder.Create(outputFormat); if (outputArchive is BaseArchive baseArchive && date) @@ -700,7 +699,7 @@ namespace SabreTools.DatFiles /// /// OutputFormat to get value from /// String value corresponding to the OutputFormat - private string FromOutputFormat(OutputFormat itemType) + private static string FromOutputFormat(OutputFormat itemType) { #if NET_FRAMEWORK switch (itemType) diff --git a/SabreTools.DatFiles/DatTool.Splitting.cs b/SabreTools.DatFiles/DatTool.Splitting.cs index d4b2d75d..138a7c24 100644 --- a/SabreTools.DatFiles/DatTool.Splitting.cs +++ b/SabreTools.DatFiles/DatTool.Splitting.cs @@ -13,7 +13,6 @@ using NaturalSort; // This file represents all methods related to splitting a DatFile into multiple namespace SabreTools.DatFiles { - // TODO: Re-evaluate if these should be made static instead of instanced // TODO: Implement Level split public partial class DatTool { @@ -24,7 +23,7 @@ namespace SabreTools.DatFiles /// List of extensions to split on (first DAT) /// List of extensions to split on (second DAT) /// Extension Set A and Extension Set B DatFiles - public (DatFile extADat, DatFile extBDat) SplitByExtension(DatFile datFile, List extA, List extB) + public static (DatFile extADat, DatFile extBDat) SplitByExtension(DatFile datFile, List extA, List extB) { // If roms is empty, return false if (datFile.Items.TotalCount == 0) @@ -79,7 +78,7 @@ namespace SabreTools.DatFiles /// /// Current DatFile object to split /// Dictionary of Field to DatFile mappings - public Dictionary SplitByHash(DatFile datFile) + public static Dictionary SplitByHash(DatFile datFile) { // Create each of the respective output DATs logger.User("Creating and populating new DATs"); @@ -219,7 +218,7 @@ namespace SabreTools.DatFiles /// True if short names should be used, false otherwise /// True if original filenames should be used as the base for output filename, false otherwise /// True if split succeeded, false otherwise - public bool SplitByLevel(DatFile datFile, string outDir, bool shortname, bool basedat) + public static bool SplitByLevel(DatFile datFile, string outDir, bool shortname, bool basedat) { // First, bucket by games so that we can do the right thing datFile.Items.BucketBy(Field.Machine_Name, DedupeType.None, lower: false, norename: true); @@ -264,7 +263,7 @@ namespace SabreTools.DatFiles /// First string to compare /// Second string to compare /// -1 for a coming before b, 0 for a == b, 1 for a coming after b - private int SplitByLevelSort(string a, string b) + private static int SplitByLevelSort(string a, string b) { NaturalComparer nc = new NaturalComparer(); int adeep = a.Count(c => c == '/' || c == '\\'); @@ -284,7 +283,7 @@ namespace SabreTools.DatFiles /// Directory to write out to /// True if short naming scheme should be used, false otherwise /// True if original filenames should be used as the base for output filename, false otherwise - private void SplitByLevelHelper(DatFile datFile, DatFile newDatFile, string outDir, bool shortname, bool restore) + private static void SplitByLevelHelper(DatFile datFile, DatFile newDatFile, string outDir, bool shortname, bool restore) { // Get the name from the DAT to use separately string name = newDatFile.Header.Name; @@ -313,7 +312,7 @@ namespace SabreTools.DatFiles /// Current DatFile object to split /// Long value representing the split point /// Less Than and Greater Than DatFiles - public (DatFile lessThan, DatFile greaterThan) SplitBySize(DatFile datFile, long radix) + public static (DatFile lessThan, DatFile greaterThan) SplitBySize(DatFile datFile, long radix) { // Create each of the respective output DATs logger.User("Creating and populating new DATs"); @@ -361,7 +360,7 @@ namespace SabreTools.DatFiles /// /// Current DatFile object to split /// Dictionary of ItemType to DatFile mappings - public Dictionary SplitByType(DatFile datFile) + public static Dictionary SplitByType(DatFile datFile) { // Create each of the respective output DATs logger.User("Creating and populating new DATs"); @@ -403,7 +402,7 @@ namespace SabreTools.DatFiles /// DatFile to add found items to /// ItemType to retrieve items for /// DatFile containing all items with the ItemType/returns> - private void FillWithItemType(DatFile datFile, DatFile indexDat, ItemType itemType) + private static void FillWithItemType(DatFile datFile, DatFile indexDat, ItemType itemType) { // Loop through and add the items for this index to the output Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key => diff --git a/SabreTools.DatFiles/DatTool.Verifying.cs b/SabreTools.DatFiles/DatTool.Verifying.cs index 1567136b..e74af521 100644 --- a/SabreTools.DatFiles/DatTool.Verifying.cs +++ b/SabreTools.DatFiles/DatTool.Verifying.cs @@ -3,7 +3,6 @@ using System.IO; using System.Linq; using SabreTools.Core; -using SabreTools.DatFiles.Reports; using SabreTools.DatItems; using SabreTools.FileTypes; using SabreTools.IO; @@ -12,7 +11,6 @@ using SabreTools.Logging; // This file represents all methods related to verifying with a DatFile namespace SabreTools.DatFiles { - // TODO: Re-evaluate if these should be made static instead of instanced public partial class DatTool { /// @@ -21,7 +19,7 @@ namespace SabreTools.DatFiles /// Current DatFile object to verify against /// List of input directories to compare against /// True if verification was a success, false otherwise - public bool VerifyDepot(DatFile datFile, List inputs) + public static bool VerifyDepot(DatFile datFile, List inputs) { bool success = true; @@ -104,7 +102,7 @@ namespace SabreTools.DatFiles /// Current DatFile object to verify against /// True if only hashes should be checked, false for full file information /// True if verification was a success, false otherwise - public bool VerifyGeneric(DatFile datFile, bool hashOnly) + public static bool VerifyGeneric(DatFile datFile, bool hashOnly) { bool success = true; diff --git a/SabreTools.DatFiles/DatTool.Writing.cs b/SabreTools.DatFiles/DatTool.Writing.cs index e6b500a7..36f25d00 100644 --- a/SabreTools.DatFiles/DatTool.Writing.cs +++ b/SabreTools.DatFiles/DatTool.Writing.cs @@ -8,7 +8,6 @@ using SabreTools.IO; // This file represents all methods related to writing to a file namespace SabreTools.DatFiles { - // TODO: Re-evaluate if these should be made static instead of instanced public partial class DatTool { /// @@ -21,7 +20,7 @@ namespace SabreTools.DatFiles /// True if quotes are assumed in supported types (default), false otherwise /// True if the error that is thrown should be thrown back to the caller, false otherwise /// True if the DAT was written correctly, false otherwise - public bool Write( + public static bool Write( DatFile datFile, string outDir, bool overwrite = true, @@ -47,7 +46,7 @@ namespace SabreTools.DatFiles } // Make sure that the three essential fields are filled in - EnsureHeaderFields(); + EnsureHeaderFields(datFile); // Bucket roms by game name, if not already datFile.Items.BucketBy(Field.Machine_Name, DedupeType.None); @@ -90,7 +89,7 @@ namespace SabreTools.DatFiles /// Ensure that FileName, Name, and Description are filled with some value /// /// Current DatFile object to write from - private void EnsureHeaderFields(DatFile datFile) + private static void EnsureHeaderFields(DatFile datFile) { // Empty FileName if (string.IsNullOrWhiteSpace(datFile.Header.FileName)) @@ -127,7 +126,7 @@ namespace SabreTools.DatFiles /// /// Current DatFile object to write from /// True if there are any writable items, false otherwise - private bool HasWritable(DatFile datFile) + private static bool HasWritable(DatFile datFile) { // Force a statistics recheck, just in case datFile.Items.RecalculateStats(); diff --git a/SabreTools.DatFiles/DatTool.cs b/SabreTools.DatFiles/DatTool.cs index 73a884f1..fd6a67f8 100644 --- a/SabreTools.DatFiles/DatTool.cs +++ b/SabreTools.DatFiles/DatTool.cs @@ -2,7 +2,6 @@ using SabreTools.Logging; // TODO: What sort of internal state should this have? Would a single DatFile be appropriate? // TODO: How much of the stuff currently in DatFile should be moved here? -// TODO: Can things like CreateAndParse be moved here? namespace SabreTools.DatFiles { /// @@ -15,7 +14,7 @@ namespace SabreTools.DatFiles /// /// Logging object /// - protected Logger logger; + private static readonly Logger logger = new Logger(); #endregion } diff --git a/SabreTools.DatFiles/ItemDictionary.cs b/SabreTools.DatFiles/ItemDictionary.cs index a5e1b1b0..e880fbdf 100644 --- a/SabreTools.DatFiles/ItemDictionary.cs +++ b/SabreTools.DatFiles/ItemDictionary.cs @@ -1395,9 +1395,6 @@ namespace SabreTools.DatFiles string basepath = null; ItemDictionary dirStats = new ItemDictionary(); - // Get the DatTool for parsing - DatTool dt = new DatTool(); - // Now process each of the input files foreach (ParentablePath file in files) { @@ -1428,7 +1425,7 @@ namespace SabreTools.DatFiles staticLogger.Verbose($"Beginning stat collection for '{file.CurrentPath}'"); List games = new List(); - DatFile datdata = dt.CreateAndParse(file.CurrentPath); + DatFile datdata = DatTool.CreateAndParse(file.CurrentPath); datdata.Items.BucketBy(Field.Machine_Name, DedupeType.None, norename: true); // Output single DAT stats (if asked) diff --git a/SabreTools/Features/Batch.cs b/SabreTools/Features/Batch.cs index 5be28f12..ae033714 100644 --- a/SabreTools/Features/Batch.cs +++ b/SabreTools/Features/Batch.cs @@ -67,7 +67,6 @@ Reset the internal state: reset();"; // Each batch file has its own state int index = 0; - DatTool dt = new DatTool(); DatFile datFile = DatFile.Create(); string outputDirectory = null; @@ -134,7 +133,7 @@ Reset the internal state: reset();"; // Assume there could be multiple foreach (ParentablePath datFilePath in datFilePaths) { - dt.ParseInto(datFile, datFilePath, index++); + DatTool.ParseInto(datFile, datFilePath, index++); } break; @@ -154,7 +153,7 @@ Reset the internal state: reset();"; // Assume there could be multiple foreach (string input in command.Arguments) { - dt.PopulateFromDir(datFile, input); + DatTool.PopulateFromDir(datFile, input); } // TODO: We might not want to remove higher order hashes in the future @@ -406,7 +405,7 @@ Reset the internal state: reset();"; } // Write out the dat with the current state - dt.Write(datFile, outputDirectory, overwrite: overwrite.Value); + DatTool.Write(datFile, outputDirectory, overwrite: overwrite.Value); break; // Reset the internal state diff --git a/SabreTools/Features/DatFromDir.cs b/SabreTools/Features/DatFromDir.cs index 5b5ec253..3fa335b1 100644 --- a/SabreTools/Features/DatFromDir.cs +++ b/SabreTools/Features/DatFromDir.cs @@ -73,7 +73,6 @@ namespace SabreTools.Features Cleaner.ExcludeFields.Add(Field.DatItem_Date); // Create a new DATFromDir object and process the inputs - DatTool dt = new DatTool(); DatFile basedat = DatFile.Create(Header); basedat.Header.Date = DateTime.Now.ToString("yyyy-MM-dd"); @@ -90,7 +89,7 @@ namespace SabreTools.Features datdata.FillHeaderFromPath(basePath, noAutomaticDate); // Now populate from the path - bool success = dt.PopulateFromDir( + bool success = DatTool.PopulateFromDir( datdata, basePath, asFiles, @@ -107,7 +106,7 @@ namespace SabreTools.Features datdata.ApplyCleaning(Cleaner); // Write out the file - dt.Write(datdata, OutputDir); + DatTool.Write(datdata, OutputDir); } else { diff --git a/SabreTools/Features/Sort.cs b/SabreTools/Features/Sort.cs index 06a8dd9c..fd8e59b9 100644 --- a/SabreTools/Features/Sort.cs +++ b/SabreTools/Features/Sort.cs @@ -84,16 +84,13 @@ namespace SabreTools.Features var datfiles = GetList(features, DatListValue); var datfilePaths = DirectoryExtensions.GetFilesOnly(datfiles); - // Get the DatTool for operations - DatTool dt = new DatTool(); - // If we are in individual mode, process each DAT on their own, appending the DAT name to the output dir if (GetBoolean(features, IndividualValue)) { foreach (ParentablePath datfile in datfilePaths) { DatFile datdata = DatFile.Create(); - dt.ParseInto(datdata, datfile, int.MaxValue, keep: true); + DatTool.ParseInto(datdata, datfile, int.MaxValue, keep: true); // Set depot information datdata.Header.InputDepot = Header.InputDepot.Clone() as DepotInformation; @@ -106,9 +103,9 @@ namespace SabreTools.Features // If we have the depot flag, respect it bool success; if (Header.InputDepot?.IsActive ?? false) - success = dt.RebuildDepot(datdata, Inputs, Path.Combine(OutputDir, datdata.Header.FileName), date, delete, inverse, outputFormat); + success = DatTool.RebuildDepot(datdata, Inputs, Path.Combine(OutputDir, datdata.Header.FileName), date, delete, inverse, outputFormat); else - success = dt.RebuildGeneric(datdata, Inputs, Path.Combine(OutputDir, datdata.Header.FileName), quickScan, date, delete, inverse, outputFormat, asFiles); + success = DatTool.RebuildGeneric(datdata, Inputs, Path.Combine(OutputDir, datdata.Header.FileName), quickScan, date, delete, inverse, outputFormat, asFiles); // If we have a success and we're updating the DAT, write it out if (success && updateDat) @@ -117,7 +114,7 @@ namespace SabreTools.Features datdata.Header.Name = $"fixDAT_{Header.Name}"; datdata.Header.Description = $"fixDAT_{Header.Description}"; datdata.Items.ClearMarked(); - dt.Write(datdata, OutputDir); + DatTool.Write(datdata, OutputDir); } } } @@ -131,7 +128,7 @@ namespace SabreTools.Features DatFile datdata = DatFile.Create(); foreach (ParentablePath datfile in datfilePaths) { - dt.ParseInto(datdata, datfile, int.MaxValue, keep: true); + DatTool.ParseInto(datdata, datfile, int.MaxValue, keep: true); } // Set depot information @@ -147,9 +144,9 @@ namespace SabreTools.Features // If we have the depot flag, respect it bool success; if (Header.InputDepot?.IsActive ?? false) - success = dt.RebuildDepot(datdata, Inputs, OutputDir, date, delete, inverse, outputFormat); + success = DatTool.RebuildDepot(datdata, Inputs, OutputDir, date, delete, inverse, outputFormat); else - success = dt.RebuildGeneric(datdata, Inputs, OutputDir, quickScan, date, delete, inverse, outputFormat, asFiles); + success = DatTool.RebuildGeneric(datdata, Inputs, OutputDir, quickScan, date, delete, inverse, outputFormat, asFiles); // If we have a success and we're updating the DAT, write it out if (success && updateDat) @@ -158,7 +155,7 @@ namespace SabreTools.Features datdata.Header.Name = $"fixDAT_{Header.Name}"; datdata.Header.Description = $"fixDAT_{Header.Description}"; datdata.Items.ClearMarked(); - dt.Write(datdata, OutputDir); + DatTool.Write(datdata, OutputDir); } } } diff --git a/SabreTools/Features/Split.cs b/SabreTools/Features/Split.cs index a0fc8055..a917850a 100644 --- a/SabreTools/Features/Split.cs +++ b/SabreTools/Features/Split.cs @@ -50,15 +50,12 @@ namespace SabreTools.Features // Get only files from the inputs List files = DirectoryExtensions.GetFilesOnly(Inputs, appendparent: true); - // Get the DatTool for operations - DatTool dt = new DatTool(); - // Loop over the input files foreach (ParentablePath file in files) { // Create and fill the new DAT DatFile internalDat = DatFile.Create(Header); - dt.ParseInto(internalDat, file); + DatTool.ParseInto(internalDat, file); // Get the output directory OutputDir = file.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue)); @@ -66,13 +63,13 @@ namespace SabreTools.Features // Extension splitting if (splittingMode.HasFlag(SplittingMode.Extension)) { - (DatFile extADat, DatFile extBDat) = dt.SplitByExtension(internalDat, GetList(features, ExtAListValue), GetList(features, ExtBListValue)); + (DatFile extADat, DatFile extBDat) = DatTool.SplitByExtension(internalDat, GetList(features, ExtAListValue), GetList(features, ExtBListValue)); InternalStopwatch watch = new InternalStopwatch("Outputting extension-split DATs"); // Output both possible DatFiles - dt.Write(extADat, OutputDir); - dt.Write(extBDat, OutputDir); + DatTool.Write(extADat, OutputDir); + DatTool.Write(extBDat, OutputDir); watch.Stop(); } @@ -80,14 +77,14 @@ namespace SabreTools.Features // Hash splitting if (splittingMode.HasFlag(SplittingMode.Hash)) { - Dictionary typeDats = dt.SplitByHash(internalDat); + Dictionary typeDats = DatTool.SplitByHash(internalDat); InternalStopwatch watch = new InternalStopwatch("Outputting hash-split DATs"); // Loop through each type DatFile Parallel.ForEach(typeDats.Keys, Globals.ParallelOptions, itemType => { - dt.Write(typeDats[itemType], OutputDir); + DatTool.Write(typeDats[itemType], OutputDir); }); watch.Stop(); @@ -97,7 +94,7 @@ namespace SabreTools.Features if (splittingMode.HasFlag(SplittingMode.Level)) { logger.Warning("This feature is not implemented: level-split"); - dt.SplitByLevel( + DatTool.SplitByLevel( internalDat, OutputDir, GetBoolean(features, ShortValue), @@ -107,13 +104,13 @@ namespace SabreTools.Features // Size splitting if (splittingMode.HasFlag(SplittingMode.Size)) { - (DatFile lessThan, DatFile greaterThan) = dt.SplitBySize(internalDat, GetInt64(features, RadixInt64Value)); + (DatFile lessThan, DatFile greaterThan) = DatTool.SplitBySize(internalDat, GetInt64(features, RadixInt64Value)); InternalStopwatch watch = new InternalStopwatch("Outputting size-split DATs"); // Output both possible DatFiles - dt.Write(lessThan, OutputDir); - dt.Write(greaterThan, OutputDir); + DatTool.Write(lessThan, OutputDir); + DatTool.Write(greaterThan, OutputDir); watch.Stop(); } @@ -121,14 +118,14 @@ namespace SabreTools.Features // Type splitting if (splittingMode.HasFlag(SplittingMode.Type)) { - Dictionary typeDats = dt.SplitByType(internalDat); + Dictionary typeDats = DatTool.SplitByType(internalDat); InternalStopwatch watch = new InternalStopwatch("Outputting ItemType DATs"); // Loop through each type DatFile Parallel.ForEach(typeDats.Keys, Globals.ParallelOptions, itemType => { - dt.Write(typeDats[itemType], OutputDir); + DatTool.Write(typeDats[itemType], OutputDir); }); watch.Stop(); diff --git a/SabreTools/Features/Update.cs b/SabreTools/Features/Update.cs index a19fc349..8348909c 100644 --- a/SabreTools/Features/Update.cs +++ b/SabreTools/Features/Update.cs @@ -152,9 +152,6 @@ namespace SabreTools.Features List inputPaths = DirectoryExtensions.GetFilesOnly(Inputs, appendparent: true); List basePaths = DirectoryExtensions.GetFilesOnly(GetList(features, BaseDatListValue)); - // Get the DatTool for parsing - DatTool dt = new DatTool(); - // If we're in standard update mode, run through all of the inputs if (updateMode == UpdateMode.None) { @@ -164,7 +161,7 @@ namespace SabreTools.Features // Create a new base DatFile DatFile datFile = DatFile.Create(Header); logger.User($"Processing '{Path.GetFileName(inputPath.CurrentPath)}'"); - dt.ParseInto(datFile, inputPath, keep: true, + DatTool.ParseInto(datFile, inputPath, keep: true, keepext: datFile.Header.DatFormat.HasFlag(DatFormat.TSV) || datFile.Header.DatFormat.HasFlag(DatFormat.CSV) || datFile.Header.DatFormat.HasFlag(DatFormat.SSV)); @@ -179,7 +176,7 @@ namespace SabreTools.Features string realOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue)); // Try to output the file, overwriting only if it's not in the current directory - dt.Write(datFile, realOutDir, overwrite: GetBoolean(features, InplaceValue)); + DatTool.Write(datFile, realOutDir, overwrite: GetBoolean(features, InplaceValue)); }); return; @@ -219,7 +216,7 @@ namespace SabreTools.Features DatFile dupeData = userInputDat.DiffDuplicates(inputPaths); InternalStopwatch watch = new InternalStopwatch("Outputting duplicate DAT"); - dt.Write(dupeData, OutputDir, overwrite: false); + DatTool.Write(dupeData, OutputDir, overwrite: false); watch.Stop(); } @@ -229,7 +226,7 @@ namespace SabreTools.Features DatFile outerDiffData = userInputDat.DiffNoDuplicates(inputPaths); InternalStopwatch watch = new InternalStopwatch("Outputting no duplicate DAT"); - dt.Write(outerDiffData, OutputDir, overwrite: false); + DatTool.Write(outerDiffData, OutputDir, overwrite: false); watch.Stop(); } @@ -247,7 +244,7 @@ namespace SabreTools.Features string path = inputPaths[j].GetOutputPath(OutputDir, GetBoolean(features, InplaceValue)); // Try to output the file - dt.Write(datFiles[j], path, overwrite: GetBoolean(features, InplaceValue)); + DatTool.Write(datFiles[j], path, overwrite: GetBoolean(features, InplaceValue)); }); watch.Stop(); @@ -283,7 +280,7 @@ namespace SabreTools.Features string path = inputPaths[j].GetOutputPath(OutputDir, GetBoolean(features, InplaceValue)); // Try to output the file - dt.Write(datFiles[j], path, overwrite: GetBoolean(features, InplaceValue)); + DatTool.Write(datFiles[j], path, overwrite: GetBoolean(features, InplaceValue)); }); watch.Stop(); @@ -297,7 +294,7 @@ namespace SabreTools.Features { // Parse the path to a new DatFile DatFile repDat = DatFile.Create(userInputDat.Header.CloneFiltering()); - dt.ParseInto(repDat, inputPath, indexId: 1, keep: true); + DatTool.ParseInto(repDat, inputPath, indexId: 1, keep: true); // Perform additional processing steps repDat.ApplyExtras(Extras); @@ -310,7 +307,7 @@ namespace SabreTools.Features // Finally output the diffed DatFile string interOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue)); - dt.Write(repDat, interOutDir, overwrite: GetBoolean(features, InplaceValue)); + DatTool.Write(repDat, interOutDir, overwrite: GetBoolean(features, InplaceValue)); }); } @@ -322,7 +319,7 @@ namespace SabreTools.Features { // Parse the path to a new DatFile DatFile repDat = DatFile.Create(userInputDat.Header.CloneFiltering()); - dt.ParseInto(repDat, inputPath, indexId: 1, keep: true); + DatTool.ParseInto(repDat, inputPath, indexId: 1, keep: true); // Perform additional processing steps repDat.ApplyExtras(Extras); @@ -335,7 +332,7 @@ namespace SabreTools.Features // Finally output the replaced DatFile string interOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue)); - dt.Write(repDat, interOutDir, overwrite: GetBoolean(features, InplaceValue)); + DatTool.Write(repDat, interOutDir, overwrite: GetBoolean(features, InplaceValue)); }); } @@ -347,7 +344,7 @@ namespace SabreTools.Features if (string.Equals(userInputDat.Header.Type, "SuperDAT", StringComparison.OrdinalIgnoreCase)) userInputDat.ApplySuperDAT(inputPaths); - dt.Write(userInputDat, OutputDir); + DatTool.Write(userInputDat, OutputDir); } } } diff --git a/SabreTools/Features/Verify.cs b/SabreTools/Features/Verify.cs index 3b5987d1..0db683cb 100644 --- a/SabreTools/Features/Verify.cs +++ b/SabreTools/Features/Verify.cs @@ -51,9 +51,6 @@ namespace SabreTools.Features bool quickScan = GetBoolean(features, QuickValue); var splitType = GetSplitType(features); - // Get the DatTool for required operations - DatTool dt = new DatTool(); - // If we are in individual mode, process each DAT on their own if (GetBoolean(features, IndividualValue)) { @@ -61,7 +58,7 @@ namespace SabreTools.Features { // Parse in from the file DatFile datdata = DatFile.Create(); - dt.ParseInto(datdata, datfile, int.MaxValue, keep: true); + DatTool.ParseInto(datdata, datfile, int.MaxValue, keep: true); // Perform additional processing steps datdata.ApplyExtras(Extras); @@ -79,7 +76,7 @@ namespace SabreTools.Features // If we have the depot flag, respect it if (Header.InputDepot?.IsActive ?? false) { - dt.VerifyDepot(datdata, Inputs); + DatTool.VerifyDepot(datdata, Inputs); } else { @@ -87,15 +84,15 @@ namespace SabreTools.Features logger.User("Processing files:\n"); foreach (string input in Inputs) { - dt.PopulateFromDir(datdata, input, asFiles: asFiles, hashes: quickScan ? Hash.CRC : Hash.Standard); + DatTool.PopulateFromDir(datdata, input, asFiles: asFiles, hashes: quickScan ? Hash.CRC : Hash.Standard); } - dt.VerifyGeneric(datdata, hashOnly); + DatTool.VerifyGeneric(datdata, hashOnly); } // Now write out if there are any items left datdata.WriteStatsToConsole(); - dt.Write(datdata, OutputDir); + DatTool.Write(datdata, OutputDir); } } // Otherwise, process all DATs into the same output @@ -107,7 +104,7 @@ namespace SabreTools.Features DatFile datdata = DatFile.Create(); foreach (ParentablePath datfile in datfilePaths) { - dt.ParseInto(datdata, datfile, int.MaxValue, keep: true); + DatTool.ParseInto(datdata, datfile, int.MaxValue, keep: true); } // Perform additional processing steps @@ -128,7 +125,7 @@ namespace SabreTools.Features // If we have the depot flag, respect it if (Header.InputDepot?.IsActive ?? false) { - dt.VerifyDepot(datdata, Inputs); + DatTool.VerifyDepot(datdata, Inputs); } else { @@ -136,15 +133,15 @@ namespace SabreTools.Features logger.User("Processing files:\n"); foreach (string input in Inputs) { - dt.PopulateFromDir(datdata, input, asFiles: asFiles, hashes: quickScan ? Hash.CRC : Hash.Standard); + DatTool.PopulateFromDir(datdata, input, asFiles: asFiles, hashes: quickScan ? Hash.CRC : Hash.Standard); } - dt.VerifyGeneric(datdata, hashOnly); + DatTool.VerifyGeneric(datdata, hashOnly); } // Now write out if there are any items left datdata.WriteStatsToConsole(); - dt.Write(datdata, OutputDir); + DatTool.Write(datdata, OutputDir); } } }