Make DatTool methods static

This commit is contained in:
Matt Nadareski
2020-12-10 11:58:46 -08:00
parent c41f2cbed2
commit 0cfd4adc45
24 changed files with 105 additions and 148 deletions

View File

@@ -429,7 +429,6 @@ namespace SabreTools.DatFiles
/// <returns>List of DatHeader objects representing headers</returns>
public List<DatHeader> PopulateUserData(List<ParentablePath> inputs)
{
DatTool dt = new DatTool();
DatFile[] datFiles = new DatFile[inputs.Count];
InternalStopwatch watch = new InternalStopwatch("Processing individual DATs");
@@ -439,7 +438,7 @@ namespace SabreTools.DatFiles
var input = inputs[i];
logger.User($"Adding DAT: {input.CurrentPath}");
datFiles[i] = Create(Header.CloneFiltering());
dt.ParseInto(datFiles[i], input, i, keep: true);
DatTool.ParseInto(datFiles[i], input, i, keep: true);
});
watch.Stop();

View File

@@ -15,7 +15,6 @@ namespace SabreTools.DatFiles
{
// TODO: See if any of the methods can be broken up a bit more neatly
// TODO: See if any of this can be more stateful given the inputted DatFile
// TODO: Re-evaluate if these should be made static instead of instanced
public partial class DatTool
{
/// <summary>
@@ -27,7 +26,7 @@ namespace SabreTools.DatFiles
/// <param name="skipFileType">Type of files that should be skipped</param>
/// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param>
/// <param name="hashes">Hashes to include in the information</param>
public bool PopulateFromDir(
public static bool PopulateFromDir(
DatFile datFile,
string basePath,
TreatAsFile asFiles = 0x00,
@@ -102,7 +101,7 @@ namespace SabreTools.DatFiles
/// <param name="skipFileType">Type of files that should be skipped</param>
/// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param>
/// <param name="hashes">Hashes to include in the information</param>
private void CheckFileForHashes(
private static void CheckFileForHashes(
DatFile datFile,
string item,
string basePath,
@@ -176,7 +175,7 @@ namespace SabreTools.DatFiles
/// <param name="datFile">Current DatFile object to add to</param>
/// <param name="item">Filename of the item to be checked</param>
/// <returns>True if we checked a depot file, false otherwise</returns>
private bool CheckDepotFile(DatFile datFile, string item)
private static bool CheckDepotFile(DatFile datFile, string item)
{
// If we're not in Depot mode, return false
if (datFile.Header.OutputDepot?.IsActive != true)
@@ -210,7 +209,7 @@ namespace SabreTools.DatFiles
/// <param name="item">File to be added</param>
/// <param name="basePath">Path the represents the parent directory</param>
/// <param name="extracted">List of BaseFiles representing the internal files</param>
private void ProcessArchive(DatFile datFile, string item, string basePath, List<BaseFile> extracted)
private static void ProcessArchive(DatFile datFile, string item, string basePath, List<BaseFile> extracted)
{
// Get the parent path for all items
string parent = (Path.GetDirectoryName(Path.GetFullPath(item)) + Path.DirectorySeparatorChar).Remove(0, basePath.Length) + Path.GetFileNameWithoutExtension(item);
@@ -230,7 +229,7 @@ namespace SabreTools.DatFiles
/// <param name="item">File containing the blanks</param>
/// <param name="basePath">Path the represents the parent directory</param>
/// <param name="archive">BaseArchive to get blanks from</param>
private void ProcessArchiveBlanks(DatFile datFile, string item, string basePath, BaseArchive archive)
private static void ProcessArchiveBlanks(DatFile datFile, string item, string basePath, BaseArchive archive)
{
List<string> empties = new List<string>();
@@ -254,7 +253,7 @@ namespace SabreTools.DatFiles
/// </summary>
/// <param name="datFile">Current DatFile object to add to</param>
/// <param name="basePath">Path the represents the parent directory</param>
private void ProcessDirectoryBlanks(DatFile datFile, string basePath)
private static void ProcessDirectoryBlanks(DatFile datFile, string basePath)
{
// If we're in depot mode, we don't process blanks
if (datFile.Header.OutputDepot?.IsActive == true)
@@ -301,7 +300,7 @@ namespace SabreTools.DatFiles
/// <param name="basePath">Path the represents the parent directory</param>
/// <param name="hashes">Hashes to include in the information</param>
/// <param name="asFiles">TreatAsFiles representing CHD and Archive scanning</param>
private void ProcessFile(DatFile datFile, string item, string basePath, Hash hashes, TreatAsFile asFiles)
private static void ProcessFile(DatFile datFile, string item, string basePath, Hash hashes, TreatAsFile asFiles)
{
logger.Verbose($"'{Path.GetFileName(item)}' treated like a file");
BaseFile baseFile = BaseFile.GetInfo(item, header: datFile.Header.HeaderSkipper, hashes: hashes, asFiles: asFiles);
@@ -317,7 +316,7 @@ namespace SabreTools.DatFiles
/// <param name="item">Rom data to be used to write to file</param>
/// <param name="basepath">Path the represents the parent directory</param>
/// <param name="parent">Parent game to be used</param>
private void ProcessFileHelper(DatFile datFile, string item, DatItem datItem, string basepath, string parent)
private static void ProcessFileHelper(DatFile datFile, string item, DatItem datItem, string basepath, string parent)
{
// If we didn't get an accepted parsed type somehow, cancel out
List<ItemType> parsed = new List<ItemType> { ItemType.Disk, ItemType.Media, ItemType.Rom };
@@ -357,7 +356,7 @@ namespace SabreTools.DatFiles
/// <param name="item">Item name to use</param>
/// <param name="parent">Parent name to use</param>
/// <param name="basepath">Base path to use</param>
private void SetDatItemInfo(DatFile datFile, DatItem datItem, string item, string parent, string basepath)
private static void SetDatItemInfo(DatFile datFile, DatItem datItem, string item, string parent, string basepath)
{
// Get the data to be added as game and item names
string machineName, itemName;

View File

@@ -8,7 +8,6 @@ using SabreTools.IO;
// This file represents all methods related to parsing from a file
namespace SabreTools.DatFiles
{
// TODO: Re-evaluate if these should be made static instead of instanced
public partial class DatTool
{
/// <summary>
@@ -16,7 +15,7 @@ namespace SabreTools.DatFiles
/// </summary>
/// <param name="filename">Name of the file to be parsed</param>
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
public DatFile CreateAndParse(string filename, bool throwOnError = false)
public static DatFile CreateAndParse(string filename, bool throwOnError = false)
{
DatFile datFile = DatFile.Create();
ParseInto(datFile, new ParentablePath(filename), throwOnError: throwOnError);
@@ -33,7 +32,7 @@ namespace SabreTools.DatFiles
/// <param name="keepext">True if original extension should be kept, false otherwise (default)</param>
/// <param name="quotes">True if quotes are assumed in supported types (default), false otherwise</param>
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
public void ParseInto(
public static void ParseInto(
DatFile datFile,
string filename,
int indexId = 0,
@@ -56,7 +55,7 @@ namespace SabreTools.DatFiles
/// <param name="keepext">True if original extension should be kept, false otherwise (default)</param>
/// <param name="quotes">True if quotes are assumed in supported types (default), false otherwise</param>
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
public void ParseInto(
public static void ParseInto(
DatFile datFile,
ParentablePath input,
int indexId = 0,
@@ -101,7 +100,7 @@ namespace SabreTools.DatFiles
/// </summary>
/// <param name="filename">Name of the file to be parsed</param>
/// <returns>The DatFormat corresponding to the DAT</returns>
private DatFormat GetDatFormat(string filename)
private static DatFormat GetDatFormat(string filename)
{
// Limit the output formats based on extension
if (!PathExtensions.HasValidDatExtension(filename))

View File

@@ -13,7 +13,6 @@ using SabreTools.Skippers;
// This file represents all methods related to rebuilding from a DatFile
namespace SabreTools.DatFiles
{
// TODO: Re-evaluate if these should be made static instead of instanced
public partial class DatTool
{
/// <summary>
@@ -27,7 +26,7 @@ namespace SabreTools.DatFiles
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
/// <param name="outputFormat">Output format that files should be written to</param>
/// <returns>True if rebuilding was a success, false otherwise</returns>
public bool RebuildDepot(
public static bool RebuildDepot(
DatFile datFile,
List<string> inputs,
string outDir,
@@ -163,7 +162,7 @@ namespace SabreTools.DatFiles
/// <param name="outputFormat">Output format that files should be written to</param>
/// <param name="asFiles">TreatAsFiles representing special format scanning</param>
/// <returns>True if rebuilding was a success, false otherwise</returns>
public bool RebuildGeneric(
public static bool RebuildGeneric(
DatFile datFile,
List<string> inputs,
string outDir,
@@ -255,7 +254,7 @@ namespace SabreTools.DatFiles
/// <param name="outputFormat">Output format that files should be written to</param>
/// <param name="asFiles">TreatAsFiles representing special format scanning</param>
/// <returns>True if the file was used to rebuild, false otherwise</returns>
private bool RebuildGenericHelper(
private static bool RebuildGenericHelper(
DatFile datFile,
string file,
string outDir,
@@ -331,7 +330,7 @@ namespace SabreTools.DatFiles
/// <param name="outputFormat">Output format that files should be written to</param>
/// <param name="isZip">True if the input file is an archive, false if the file is TGZ/TXZ, null otherwise</param>
/// <returns>True if the file was able to be rebuilt, false otherwise</returns>
private bool RebuildIndividualFile(
private static bool RebuildIndividualFile(
DatFile datFile,
DatItem datItem,
string file,
@@ -470,7 +469,7 @@ namespace SabreTools.DatFiles
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
/// <param name="dupes">Output list of duplicate items to rebuild to</param>
/// <returns>True if the item should be rebuilt, false otherwise</returns>
private bool ShouldRebuild(DatFile datFile, DatItem datItem, Stream stream, bool inverse, out List<DatItem> dupes)
private static bool ShouldRebuild(DatFile datFile, DatItem datItem, Stream stream, bool inverse, out List<DatItem> dupes)
{
// Find if the file has duplicates in the DAT
dupes = datFile.Items.GetDuplicates(datItem);
@@ -526,7 +525,7 @@ namespace SabreTools.DatFiles
/// <param name="outputFormat">Output format that files should be written to</param>
/// <param name="isZip">True if the input file is an archive, false if the file is TGZ, null otherwise</param>
/// <returns>True if rebuilt properly, false otherwise</returns>
private bool RebuildTorrentGzip(DatFile datFile, DatItem datItem, string file, string outDir, OutputFormat outputFormat, bool? isZip)
private static bool RebuildTorrentGzip(DatFile datFile, DatItem datItem, string file, string outDir, OutputFormat outputFormat, bool? isZip)
{
// If we have a very specific TGZ->TGZ case, just copy it accordingly
GZipArchive tgz = new GZipArchive(file);
@@ -570,7 +569,7 @@ namespace SabreTools.DatFiles
/// <param name="outputFormat">Output format that files should be written to</param>
/// <param name="isZip">True if the input file is an archive, false if the file is TXZ, null otherwise</param>
/// <returns>True if rebuilt properly, false otherwise</returns>
private bool RebuildTorrentXz(DatFile datFile, DatItem datItem, string file, string outDir, OutputFormat outputFormat, bool? isZip)
private static bool RebuildTorrentXz(DatFile datFile, DatItem datItem, string file, string outDir, OutputFormat outputFormat, bool? isZip)
{
// If we have a very specific TGZ->TGZ case, just copy it accordingly
XZArchive txz = new XZArchive(file);
@@ -612,7 +611,7 @@ namespace SabreTools.DatFiles
/// <param name="isZip">Non-null if the input file is an archive</param>
/// <param name="stream">Output stream representing the opened file</param>
/// <returns>True if the stream opening succeeded, false otherwise</returns>
private bool GetFileStream(DatItem datItem, string file, bool? isZip, out Stream stream)
private static bool GetFileStream(DatItem datItem, string file, bool? isZip, out Stream stream)
{
// Get a generic stream for the file
stream = null;
@@ -644,7 +643,7 @@ namespace SabreTools.DatFiles
/// <summary>
/// Get the default OutputFormat associated with each PackingFlag
/// </summary>
private OutputFormat GetOutputFormat(PackingFlag packing)
private static OutputFormat GetOutputFormat(PackingFlag packing)
{
#if NET_FRAMEWORK
switch (packing)
@@ -680,7 +679,7 @@ namespace SabreTools.DatFiles
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
/// <param name="outputFormat">Output format that files should be written to</param>
/// <returns>Folder configured with proper flags</returns>
private Folder GetPreconfiguredFolder(DatFile datFile, bool date, OutputFormat outputFormat)
private static Folder GetPreconfiguredFolder(DatFile datFile, bool date, OutputFormat outputFormat)
{
Folder outputArchive = Folder.Create(outputFormat);
if (outputArchive is BaseArchive baseArchive && date)
@@ -700,7 +699,7 @@ namespace SabreTools.DatFiles
/// </summary>
/// <param name="itemType">OutputFormat to get value from</param>
/// <returns>String value corresponding to the OutputFormat</returns>
private string FromOutputFormat(OutputFormat itemType)
private static string FromOutputFormat(OutputFormat itemType)
{
#if NET_FRAMEWORK
switch (itemType)

View File

@@ -13,7 +13,6 @@ using NaturalSort;
// This file represents all methods related to splitting a DatFile into multiple
namespace SabreTools.DatFiles
{
// TODO: Re-evaluate if these should be made static instead of instanced
// TODO: Implement Level split
public partial class DatTool
{
@@ -24,7 +23,7 @@ namespace SabreTools.DatFiles
/// <param name="extA">List of extensions to split on (first DAT)</param>
/// <param name="extB">List of extensions to split on (second DAT)</param>
/// <returns>Extension Set A and Extension Set B DatFiles</returns>
public (DatFile extADat, DatFile extBDat) SplitByExtension(DatFile datFile, List<string> extA, List<string> extB)
public static (DatFile extADat, DatFile extBDat) SplitByExtension(DatFile datFile, List<string> extA, List<string> extB)
{
// If roms is empty, return false
if (datFile.Items.TotalCount == 0)
@@ -79,7 +78,7 @@ namespace SabreTools.DatFiles
/// </summary>
/// <param name="datFile">Current DatFile object to split</param>
/// <returns>Dictionary of Field to DatFile mappings</returns>
public Dictionary<Field, DatFile> SplitByHash(DatFile datFile)
public static Dictionary<Field, DatFile> SplitByHash(DatFile datFile)
{
// Create each of the respective output DATs
logger.User("Creating and populating new DATs");
@@ -219,7 +218,7 @@ namespace SabreTools.DatFiles
/// <param name="shortname">True if short names should be used, false otherwise</param>
/// <param name="basedat">True if original filenames should be used as the base for output filename, false otherwise</param>
/// <returns>True if split succeeded, false otherwise</returns>
public bool SplitByLevel(DatFile datFile, string outDir, bool shortname, bool basedat)
public static bool SplitByLevel(DatFile datFile, string outDir, bool shortname, bool basedat)
{
// First, bucket by games so that we can do the right thing
datFile.Items.BucketBy(Field.Machine_Name, DedupeType.None, lower: false, norename: true);
@@ -264,7 +263,7 @@ namespace SabreTools.DatFiles
/// <param name="a">First string to compare</param>
/// <param name="b">Second string to compare</param>
/// <returns>-1 for a coming before b, 0 for a == b, 1 for a coming after b</returns>
private int SplitByLevelSort(string a, string b)
private static int SplitByLevelSort(string a, string b)
{
NaturalComparer nc = new NaturalComparer();
int adeep = a.Count(c => c == '/' || c == '\\');
@@ -284,7 +283,7 @@ namespace SabreTools.DatFiles
/// <param name="outDir">Directory to write out to</param>
/// <param name="shortname">True if short naming scheme should be used, false otherwise</param>
/// <param name="restore">True if original filenames should be used as the base for output filename, false otherwise</param>
private void SplitByLevelHelper(DatFile datFile, DatFile newDatFile, string outDir, bool shortname, bool restore)
private static void SplitByLevelHelper(DatFile datFile, DatFile newDatFile, string outDir, bool shortname, bool restore)
{
// Get the name from the DAT to use separately
string name = newDatFile.Header.Name;
@@ -313,7 +312,7 @@ namespace SabreTools.DatFiles
/// <param name="datFile">Current DatFile object to split</param>
/// <param name="radix">Long value representing the split point</param>
/// <returns>Less Than and Greater Than DatFiles</returns>
public (DatFile lessThan, DatFile greaterThan) SplitBySize(DatFile datFile, long radix)
public static (DatFile lessThan, DatFile greaterThan) SplitBySize(DatFile datFile, long radix)
{
// Create each of the respective output DATs
logger.User("Creating and populating new DATs");
@@ -361,7 +360,7 @@ namespace SabreTools.DatFiles
/// </summary>
/// <param name="datFile">Current DatFile object to split</param>
/// <returns>Dictionary of ItemType to DatFile mappings</returns>
public Dictionary<ItemType, DatFile> SplitByType(DatFile datFile)
public static Dictionary<ItemType, DatFile> SplitByType(DatFile datFile)
{
// Create each of the respective output DATs
logger.User("Creating and populating new DATs");
@@ -403,7 +402,7 @@ namespace SabreTools.DatFiles
/// <param name="indexDat">DatFile to add found items to</param>
/// <param name="itemType">ItemType to retrieve items for</param>
/// <returns>DatFile containing all items with the ItemType/returns>
private void FillWithItemType(DatFile datFile, DatFile indexDat, ItemType itemType)
private static void FillWithItemType(DatFile datFile, DatFile indexDat, ItemType itemType)
{
// Loop through and add the items for this index to the output
Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>

View File

@@ -3,7 +3,6 @@ using System.IO;
using System.Linq;
using SabreTools.Core;
using SabreTools.DatFiles.Reports;
using SabreTools.DatItems;
using SabreTools.FileTypes;
using SabreTools.IO;
@@ -12,7 +11,6 @@ using SabreTools.Logging;
// This file represents all methods related to verifying with a DatFile
namespace SabreTools.DatFiles
{
// TODO: Re-evaluate if these should be made static instead of instanced
public partial class DatTool
{
/// <summary>
@@ -21,7 +19,7 @@ namespace SabreTools.DatFiles
/// <param name="datFile">Current DatFile object to verify against</param>
/// <param name="inputs">List of input directories to compare against</param>
/// <returns>True if verification was a success, false otherwise</returns>
public bool VerifyDepot(DatFile datFile, List<string> inputs)
public static bool VerifyDepot(DatFile datFile, List<string> inputs)
{
bool success = true;
@@ -104,7 +102,7 @@ namespace SabreTools.DatFiles
/// <param name="datFile">Current DatFile object to verify against</param>
/// <param name="hashOnly">True if only hashes should be checked, false for full file information</param>
/// <returns>True if verification was a success, false otherwise</returns>
public bool VerifyGeneric(DatFile datFile, bool hashOnly)
public static bool VerifyGeneric(DatFile datFile, bool hashOnly)
{
bool success = true;

View File

@@ -8,7 +8,6 @@ using SabreTools.IO;
// This file represents all methods related to writing to a file
namespace SabreTools.DatFiles
{
// TODO: Re-evaluate if these should be made static instead of instanced
public partial class DatTool
{
/// <summary>
@@ -21,7 +20,7 @@ namespace SabreTools.DatFiles
/// <param name="quotes">True if quotes are assumed in supported types (default), false otherwise</param>
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
/// <returns>True if the DAT was written correctly, false otherwise</returns>
public bool Write(
public static bool Write(
DatFile datFile,
string outDir,
bool overwrite = true,
@@ -47,7 +46,7 @@ namespace SabreTools.DatFiles
}
// Make sure that the three essential fields are filled in
EnsureHeaderFields();
EnsureHeaderFields(datFile);
// Bucket roms by game name, if not already
datFile.Items.BucketBy(Field.Machine_Name, DedupeType.None);
@@ -90,7 +89,7 @@ namespace SabreTools.DatFiles
/// Ensure that FileName, Name, and Description are filled with some value
/// </summary>
/// <param name="datFile">Current DatFile object to write from</param>
private void EnsureHeaderFields(DatFile datFile)
private static void EnsureHeaderFields(DatFile datFile)
{
// Empty FileName
if (string.IsNullOrWhiteSpace(datFile.Header.FileName))
@@ -127,7 +126,7 @@ namespace SabreTools.DatFiles
/// </summary>
/// <param name="datFile">Current DatFile object to write from</param>
/// <returns>True if there are any writable items, false otherwise</returns>
private bool HasWritable(DatFile datFile)
private static bool HasWritable(DatFile datFile)
{
// Force a statistics recheck, just in case
datFile.Items.RecalculateStats();

View File

@@ -2,7 +2,6 @@ using SabreTools.Logging;
// TODO: What sort of internal state should this have? Would a single DatFile be appropriate?
// TODO: How much of the stuff currently in DatFile should be moved here?
// TODO: Can things like CreateAndParse be moved here?
namespace SabreTools.DatFiles
{
/// <summary>
@@ -15,7 +14,7 @@ namespace SabreTools.DatFiles
/// <summary>
/// Logging object
/// </summary>
protected Logger logger;
private static readonly Logger logger = new Logger();
#endregion
}

View File

@@ -1395,9 +1395,6 @@ namespace SabreTools.DatFiles
string basepath = null;
ItemDictionary dirStats = new ItemDictionary();
// Get the DatTool for parsing
DatTool dt = new DatTool();
// Now process each of the input files
foreach (ParentablePath file in files)
{
@@ -1428,7 +1425,7 @@ namespace SabreTools.DatFiles
staticLogger.Verbose($"Beginning stat collection for '{file.CurrentPath}'");
List<string> games = new List<string>();
DatFile datdata = dt.CreateAndParse(file.CurrentPath);
DatFile datdata = DatTool.CreateAndParse(file.CurrentPath);
datdata.Items.BucketBy(Field.Machine_Name, DedupeType.None, norename: true);
// Output single DAT stats (if asked)