Make DatTool methods static

This commit is contained in:
Matt Nadareski
2020-12-10 11:58:46 -08:00
parent c41f2cbed2
commit 0cfd4adc45
24 changed files with 105 additions and 148 deletions

View File

@@ -62,12 +62,11 @@ have a current entry in the DAT index.";
}
// Then process all of the input directories into an internal DAT
DatTool dt = new DatTool();
DatFile df = DatFile.Create();
foreach (string dir in onlyDirs)
{
dt.PopulateFromDir(df, dir, asFiles: TreatAsFile.NonArchive);
dt.PopulateFromDir(df, dir, asFiles: TreatAsFile.All);
DatTool.PopulateFromDir(df, dir, asFiles: TreatAsFile.NonArchive);
DatTool.PopulateFromDir(df, dir, asFiles: TreatAsFile.All);
}
// Create an empty Dat for files that need to be rebuilt
@@ -190,7 +189,7 @@ have a current entry in the DAT index.";
}
// Create the sorting object to use and rebuild the needed files
dt.RebuildGeneric(
DatTool.RebuildGeneric(
need,
onlyDirs,
outDir: _depots.Keys.ToList()[0],

View File

@@ -705,8 +705,7 @@ CREATE TABLE IF NOT EXISTS dat (
// Parse the Dat if possible
logger.User($"Adding from '{dat.Name}'");
DatTool dt = new DatTool();
DatFile tempdat = dt.CreateAndParse(fullpath);
DatFile tempdat = DatTool.CreateAndParse(fullpath);
// If the Dat wasn't empty, add the information
SqliteCommand slc = null;

View File

@@ -46,14 +46,11 @@ structure according to the original DAT master directory tree structure.";
if (string.IsNullOrWhiteSpace(outdat))
outdat = "out";
// Get the DatTool for operations
DatTool dt = new DatTool();
// Now that we have the dictionary, we can loop through and output to a new folder for each
foreach (string key in foundDats.Keys)
{
// Get the DAT file associated with the key
DatFile datFile = dt.CreateAndParse(Path.Combine(_dats, foundDats[key]));
DatFile datFile = DatTool.CreateAndParse(Path.Combine(_dats, foundDats[key]));
// Set the depot values
datFile.Header.InputDepot = new DepotInformation(true, 4);
@@ -67,11 +64,11 @@ structure according to the original DAT master directory tree structure.";
List<string> onlineDepots = _depots.Where(d => d.Value.Item2).Select(d => d.Key).ToList();
// Now scan all of those depots and rebuild
dt.RebuildDepot(
DatTool.RebuildDepot(
datFile,
onlineDepots,
outDir: outputFolder,
outputFormat: (copy ? OutputFormat.TorrentGzipRomba : OutputFormat.TorrentZip));
outputFormat: copy ? OutputFormat.TorrentGzipRomba : OutputFormat.TorrentZip);
}
}
}

View File

@@ -55,19 +55,16 @@ in -old DAT file. Ignores those entries in -old that are not in -new.";
return;
}
// Get the DatTool for opeations
DatTool dt = new DatTool();
// Create the encapsulating datfile
DatFile datfile = DatFile.Create();
datfile.Header.Name = name;
datfile.Header.Description = description;
dt.ParseInto(datfile, olddat);
DatTool.ParseInto(datfile, olddat);
// Diff against the new datfile
DatFile intDat = dt.CreateAndParse(newdat);
DatFile intDat = DatTool.CreateAndParse(newdat);
datfile.DiffAgainst(intDat, false);
dt.Write(intDat, outdat);
DatTool.Write(intDat, outdat);
}
}
}

View File

@@ -50,13 +50,12 @@ namespace RombaSharp.Features
}
// Create and write the encapsulating datfile
DatTool dt = new DatTool();
DatFile datfile = DatFile.Create();
datfile.Header.Name = string.IsNullOrWhiteSpace(name) ? "untitled" : name;
datfile.Header.Description = description;
dt.PopulateFromDir(datfile, source, asFiles: TreatAsFile.NonArchive);
DatTool.PopulateFromDir(datfile, source, asFiles: TreatAsFile.NonArchive);
datfile.ApplyCleaning(new Cleaner() { ExcludeFields = Hash.DeepHashes.AsFields() });
dt.Write(datfile, outdat);
DatTool.Write(datfile, outdat);
}
}
}

View File

@@ -50,16 +50,13 @@ namespace RombaSharp.Features
return;
}
// Get the DatTool for parsing
DatTool dt = new DatTool();
// Create the encapsulating datfile
DatFile datfile = dt.CreateAndParse(olddat);
DatFile datfile = DatTool.CreateAndParse(olddat);
// Diff against the new datfile
DatFile intDat = dt.CreateAndParse(newdat);
DatFile intDat = DatTool.CreateAndParse(newdat);
datfile.DiffAgainst(intDat, false);
dt.Write(intDat, outdat);
DatTool.Write(intDat, outdat);
}
}
}

View File

@@ -33,14 +33,11 @@ namespace RombaSharp.Features
// Create the new output directory if it doesn't exist
DirectoryExtensions.Ensure(Path.Combine(Globals.ExeDir, "out"), create: true);
// Get the DatTool for parsing
DatTool dt = new DatTool();
// Now that we have the dictionary, we can loop through and output to a new folder for each
foreach (string key in foundDats.Keys)
{
// Get the DAT file associated with the key
DatFile datFile = dt.CreateAndParse(Path.Combine(_dats, foundDats[key]));
DatFile datFile = DatTool.CreateAndParse(Path.Combine(_dats, foundDats[key]));
// Now loop through and see if all of the hash combinations exist in the database
/* ended here */

View File

@@ -60,10 +60,9 @@ contents of any changed dats.";
Directory.CreateDirectory(_dats);
// First get a list of SHA-1's from the input DATs
DatTool dt = new DatTool();
DatFile datroot = DatFile.Create();
datroot.Header.Type = "SuperDAT";
dt.PopulateFromDir(datroot, _dats, asFiles: TreatAsFile.NonArchive);
DatTool.PopulateFromDir(datroot, _dats, asFiles: TreatAsFile.NonArchive);
datroot.Items.BucketBy(Field.DatItem_SHA1, DedupeType.None);
// Create a List of dat hashes in the database (SHA-1)

View File

@@ -63,9 +63,8 @@ namespace RombaSharp.Features
}
// Now rescan the depot itself
DatTool dt = new DatTool();
DatFile depot = DatFile.Create();
dt.PopulateFromDir(depot, depotname, asFiles: TreatAsFile.NonArchive);
DatTool.PopulateFromDir(depot, depotname, asFiles: TreatAsFile.NonArchive);
depot.Items.BucketBy(Field.DatItem_SHA1, DedupeType.None);
// Set the base queries to use

View File

@@ -429,7 +429,6 @@ namespace SabreTools.DatFiles
/// <returns>List of DatHeader objects representing headers</returns>
public List<DatHeader> PopulateUserData(List<ParentablePath> inputs)
{
DatTool dt = new DatTool();
DatFile[] datFiles = new DatFile[inputs.Count];
InternalStopwatch watch = new InternalStopwatch("Processing individual DATs");
@@ -439,7 +438,7 @@ namespace SabreTools.DatFiles
var input = inputs[i];
logger.User($"Adding DAT: {input.CurrentPath}");
datFiles[i] = Create(Header.CloneFiltering());
dt.ParseInto(datFiles[i], input, i, keep: true);
DatTool.ParseInto(datFiles[i], input, i, keep: true);
});
watch.Stop();

View File

@@ -15,7 +15,6 @@ namespace SabreTools.DatFiles
{
// TODO: See if any of the methods can be broken up a bit more neatly
// TODO: See if any of this can be more stateful given the inputted DatFile
// TODO: Re-evaluate if these should be made static instead of instanced
public partial class DatTool
{
/// <summary>
@@ -27,7 +26,7 @@ namespace SabreTools.DatFiles
/// <param name="skipFileType">Type of files that should be skipped</param>
/// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param>
/// <param name="hashes">Hashes to include in the information</param>
public bool PopulateFromDir(
public static bool PopulateFromDir(
DatFile datFile,
string basePath,
TreatAsFile asFiles = 0x00,
@@ -102,7 +101,7 @@ namespace SabreTools.DatFiles
/// <param name="skipFileType">Type of files that should be skipped</param>
/// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param>
/// <param name="hashes">Hashes to include in the information</param>
private void CheckFileForHashes(
private static void CheckFileForHashes(
DatFile datFile,
string item,
string basePath,
@@ -176,7 +175,7 @@ namespace SabreTools.DatFiles
/// <param name="datFile">Current DatFile object to add to</param>
/// <param name="item">Filename of the item to be checked</param>
/// <returns>True if we checked a depot file, false otherwise</returns>
private bool CheckDepotFile(DatFile datFile, string item)
private static bool CheckDepotFile(DatFile datFile, string item)
{
// If we're not in Depot mode, return false
if (datFile.Header.OutputDepot?.IsActive != true)
@@ -210,7 +209,7 @@ namespace SabreTools.DatFiles
/// <param name="item">File to be added</param>
/// <param name="basePath">Path the represents the parent directory</param>
/// <param name="extracted">List of BaseFiles representing the internal files</param>
private void ProcessArchive(DatFile datFile, string item, string basePath, List<BaseFile> extracted)
private static void ProcessArchive(DatFile datFile, string item, string basePath, List<BaseFile> extracted)
{
// Get the parent path for all items
string parent = (Path.GetDirectoryName(Path.GetFullPath(item)) + Path.DirectorySeparatorChar).Remove(0, basePath.Length) + Path.GetFileNameWithoutExtension(item);
@@ -230,7 +229,7 @@ namespace SabreTools.DatFiles
/// <param name="item">File containing the blanks</param>
/// <param name="basePath">Path the represents the parent directory</param>
/// <param name="archive">BaseArchive to get blanks from</param>
private void ProcessArchiveBlanks(DatFile datFile, string item, string basePath, BaseArchive archive)
private static void ProcessArchiveBlanks(DatFile datFile, string item, string basePath, BaseArchive archive)
{
List<string> empties = new List<string>();
@@ -254,7 +253,7 @@ namespace SabreTools.DatFiles
/// </summary>
/// <param name="datFile">Current DatFile object to add to</param>
/// <param name="basePath">Path the represents the parent directory</param>
private void ProcessDirectoryBlanks(DatFile datFile, string basePath)
private static void ProcessDirectoryBlanks(DatFile datFile, string basePath)
{
// If we're in depot mode, we don't process blanks
if (datFile.Header.OutputDepot?.IsActive == true)
@@ -301,7 +300,7 @@ namespace SabreTools.DatFiles
/// <param name="basePath">Path the represents the parent directory</param>
/// <param name="hashes">Hashes to include in the information</param>
/// <param name="asFiles">TreatAsFiles representing CHD and Archive scanning</param>
private void ProcessFile(DatFile datFile, string item, string basePath, Hash hashes, TreatAsFile asFiles)
private static void ProcessFile(DatFile datFile, string item, string basePath, Hash hashes, TreatAsFile asFiles)
{
logger.Verbose($"'{Path.GetFileName(item)}' treated like a file");
BaseFile baseFile = BaseFile.GetInfo(item, header: datFile.Header.HeaderSkipper, hashes: hashes, asFiles: asFiles);
@@ -317,7 +316,7 @@ namespace SabreTools.DatFiles
/// <param name="item">Rom data to be used to write to file</param>
/// <param name="basepath">Path the represents the parent directory</param>
/// <param name="parent">Parent game to be used</param>
private void ProcessFileHelper(DatFile datFile, string item, DatItem datItem, string basepath, string parent)
private static void ProcessFileHelper(DatFile datFile, string item, DatItem datItem, string basepath, string parent)
{
// If we didn't get an accepted parsed type somehow, cancel out
List<ItemType> parsed = new List<ItemType> { ItemType.Disk, ItemType.Media, ItemType.Rom };
@@ -357,7 +356,7 @@ namespace SabreTools.DatFiles
/// <param name="item">Item name to use</param>
/// <param name="parent">Parent name to use</param>
/// <param name="basepath">Base path to use</param>
private void SetDatItemInfo(DatFile datFile, DatItem datItem, string item, string parent, string basepath)
private static void SetDatItemInfo(DatFile datFile, DatItem datItem, string item, string parent, string basepath)
{
// Get the data to be added as game and item names
string machineName, itemName;

View File

@@ -8,7 +8,6 @@ using SabreTools.IO;
// This file represents all methods related to parsing from a file
namespace SabreTools.DatFiles
{
// TODO: Re-evaluate if these should be made static instead of instanced
public partial class DatTool
{
/// <summary>
@@ -16,7 +15,7 @@ namespace SabreTools.DatFiles
/// </summary>
/// <param name="filename">Name of the file to be parsed</param>
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
public DatFile CreateAndParse(string filename, bool throwOnError = false)
public static DatFile CreateAndParse(string filename, bool throwOnError = false)
{
DatFile datFile = DatFile.Create();
ParseInto(datFile, new ParentablePath(filename), throwOnError: throwOnError);
@@ -33,7 +32,7 @@ namespace SabreTools.DatFiles
/// <param name="keepext">True if original extension should be kept, false otherwise (default)</param>
/// <param name="quotes">True if quotes are assumed in supported types (default), false otherwise</param>
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
public void ParseInto(
public static void ParseInto(
DatFile datFile,
string filename,
int indexId = 0,
@@ -56,7 +55,7 @@ namespace SabreTools.DatFiles
/// <param name="keepext">True if original extension should be kept, false otherwise (default)</param>
/// <param name="quotes">True if quotes are assumed in supported types (default), false otherwise</param>
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
public void ParseInto(
public static void ParseInto(
DatFile datFile,
ParentablePath input,
int indexId = 0,
@@ -101,7 +100,7 @@ namespace SabreTools.DatFiles
/// </summary>
/// <param name="filename">Name of the file to be parsed</param>
/// <returns>The DatFormat corresponding to the DAT</returns>
private DatFormat GetDatFormat(string filename)
private static DatFormat GetDatFormat(string filename)
{
// Limit the output formats based on extension
if (!PathExtensions.HasValidDatExtension(filename))

View File

@@ -13,7 +13,6 @@ using SabreTools.Skippers;
// This file represents all methods related to rebuilding from a DatFile
namespace SabreTools.DatFiles
{
// TODO: Re-evaluate if these should be made static instead of instanced
public partial class DatTool
{
/// <summary>
@@ -27,7 +26,7 @@ namespace SabreTools.DatFiles
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
/// <param name="outputFormat">Output format that files should be written to</param>
/// <returns>True if rebuilding was a success, false otherwise</returns>
public bool RebuildDepot(
public static bool RebuildDepot(
DatFile datFile,
List<string> inputs,
string outDir,
@@ -163,7 +162,7 @@ namespace SabreTools.DatFiles
/// <param name="outputFormat">Output format that files should be written to</param>
/// <param name="asFiles">TreatAsFiles representing special format scanning</param>
/// <returns>True if rebuilding was a success, false otherwise</returns>
public bool RebuildGeneric(
public static bool RebuildGeneric(
DatFile datFile,
List<string> inputs,
string outDir,
@@ -255,7 +254,7 @@ namespace SabreTools.DatFiles
/// <param name="outputFormat">Output format that files should be written to</param>
/// <param name="asFiles">TreatAsFiles representing special format scanning</param>
/// <returns>True if the file was used to rebuild, false otherwise</returns>
private bool RebuildGenericHelper(
private static bool RebuildGenericHelper(
DatFile datFile,
string file,
string outDir,
@@ -331,7 +330,7 @@ namespace SabreTools.DatFiles
/// <param name="outputFormat">Output format that files should be written to</param>
/// <param name="isZip">True if the input file is an archive, false if the file is TGZ/TXZ, null otherwise</param>
/// <returns>True if the file was able to be rebuilt, false otherwise</returns>
private bool RebuildIndividualFile(
private static bool RebuildIndividualFile(
DatFile datFile,
DatItem datItem,
string file,
@@ -470,7 +469,7 @@ namespace SabreTools.DatFiles
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
/// <param name="dupes">Output list of duplicate items to rebuild to</param>
/// <returns>True if the item should be rebuilt, false otherwise</returns>
private bool ShouldRebuild(DatFile datFile, DatItem datItem, Stream stream, bool inverse, out List<DatItem> dupes)
private static bool ShouldRebuild(DatFile datFile, DatItem datItem, Stream stream, bool inverse, out List<DatItem> dupes)
{
// Find if the file has duplicates in the DAT
dupes = datFile.Items.GetDuplicates(datItem);
@@ -526,7 +525,7 @@ namespace SabreTools.DatFiles
/// <param name="outputFormat">Output format that files should be written to</param>
/// <param name="isZip">True if the input file is an archive, false if the file is TGZ, null otherwise</param>
/// <returns>True if rebuilt properly, false otherwise</returns>
private bool RebuildTorrentGzip(DatFile datFile, DatItem datItem, string file, string outDir, OutputFormat outputFormat, bool? isZip)
private static bool RebuildTorrentGzip(DatFile datFile, DatItem datItem, string file, string outDir, OutputFormat outputFormat, bool? isZip)
{
// If we have a very specific TGZ->TGZ case, just copy it accordingly
GZipArchive tgz = new GZipArchive(file);
@@ -570,7 +569,7 @@ namespace SabreTools.DatFiles
/// <param name="outputFormat">Output format that files should be written to</param>
/// <param name="isZip">True if the input file is an archive, false if the file is TXZ, null otherwise</param>
/// <returns>True if rebuilt properly, false otherwise</returns>
private bool RebuildTorrentXz(DatFile datFile, DatItem datItem, string file, string outDir, OutputFormat outputFormat, bool? isZip)
private static bool RebuildTorrentXz(DatFile datFile, DatItem datItem, string file, string outDir, OutputFormat outputFormat, bool? isZip)
{
// If we have a very specific TGZ->TGZ case, just copy it accordingly
XZArchive txz = new XZArchive(file);
@@ -612,7 +611,7 @@ namespace SabreTools.DatFiles
/// <param name="isZip">Non-null if the input file is an archive</param>
/// <param name="stream">Output stream representing the opened file</param>
/// <returns>True if the stream opening succeeded, false otherwise</returns>
private bool GetFileStream(DatItem datItem, string file, bool? isZip, out Stream stream)
private static bool GetFileStream(DatItem datItem, string file, bool? isZip, out Stream stream)
{
// Get a generic stream for the file
stream = null;
@@ -644,7 +643,7 @@ namespace SabreTools.DatFiles
/// <summary>
/// Get the default OutputFormat associated with each PackingFlag
/// </summary>
private OutputFormat GetOutputFormat(PackingFlag packing)
private static OutputFormat GetOutputFormat(PackingFlag packing)
{
#if NET_FRAMEWORK
switch (packing)
@@ -680,7 +679,7 @@ namespace SabreTools.DatFiles
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
/// <param name="outputFormat">Output format that files should be written to</param>
/// <returns>Folder configured with proper flags</returns>
private Folder GetPreconfiguredFolder(DatFile datFile, bool date, OutputFormat outputFormat)
private static Folder GetPreconfiguredFolder(DatFile datFile, bool date, OutputFormat outputFormat)
{
Folder outputArchive = Folder.Create(outputFormat);
if (outputArchive is BaseArchive baseArchive && date)
@@ -700,7 +699,7 @@ namespace SabreTools.DatFiles
/// </summary>
/// <param name="itemType">OutputFormat to get value from</param>
/// <returns>String value corresponding to the OutputFormat</returns>
private string FromOutputFormat(OutputFormat itemType)
private static string FromOutputFormat(OutputFormat itemType)
{
#if NET_FRAMEWORK
switch (itemType)

View File

@@ -13,7 +13,6 @@ using NaturalSort;
// This file represents all methods related to splitting a DatFile into multiple
namespace SabreTools.DatFiles
{
// TODO: Re-evaluate if these should be made static instead of instanced
// TODO: Implement Level split
public partial class DatTool
{
@@ -24,7 +23,7 @@ namespace SabreTools.DatFiles
/// <param name="extA">List of extensions to split on (first DAT)</param>
/// <param name="extB">List of extensions to split on (second DAT)</param>
/// <returns>Extension Set A and Extension Set B DatFiles</returns>
public (DatFile extADat, DatFile extBDat) SplitByExtension(DatFile datFile, List<string> extA, List<string> extB)
public static (DatFile extADat, DatFile extBDat) SplitByExtension(DatFile datFile, List<string> extA, List<string> extB)
{
// If roms is empty, return false
if (datFile.Items.TotalCount == 0)
@@ -79,7 +78,7 @@ namespace SabreTools.DatFiles
/// </summary>
/// <param name="datFile">Current DatFile object to split</param>
/// <returns>Dictionary of Field to DatFile mappings</returns>
public Dictionary<Field, DatFile> SplitByHash(DatFile datFile)
public static Dictionary<Field, DatFile> SplitByHash(DatFile datFile)
{
// Create each of the respective output DATs
logger.User("Creating and populating new DATs");
@@ -219,7 +218,7 @@ namespace SabreTools.DatFiles
/// <param name="shortname">True if short names should be used, false otherwise</param>
/// <param name="basedat">True if original filenames should be used as the base for output filename, false otherwise</param>
/// <returns>True if split succeeded, false otherwise</returns>
public bool SplitByLevel(DatFile datFile, string outDir, bool shortname, bool basedat)
public static bool SplitByLevel(DatFile datFile, string outDir, bool shortname, bool basedat)
{
// First, bucket by games so that we can do the right thing
datFile.Items.BucketBy(Field.Machine_Name, DedupeType.None, lower: false, norename: true);
@@ -264,7 +263,7 @@ namespace SabreTools.DatFiles
/// <param name="a">First string to compare</param>
/// <param name="b">Second string to compare</param>
/// <returns>-1 for a coming before b, 0 for a == b, 1 for a coming after b</returns>
private int SplitByLevelSort(string a, string b)
private static int SplitByLevelSort(string a, string b)
{
NaturalComparer nc = new NaturalComparer();
int adeep = a.Count(c => c == '/' || c == '\\');
@@ -284,7 +283,7 @@ namespace SabreTools.DatFiles
/// <param name="outDir">Directory to write out to</param>
/// <param name="shortname">True if short naming scheme should be used, false otherwise</param>
/// <param name="restore">True if original filenames should be used as the base for output filename, false otherwise</param>
private void SplitByLevelHelper(DatFile datFile, DatFile newDatFile, string outDir, bool shortname, bool restore)
private static void SplitByLevelHelper(DatFile datFile, DatFile newDatFile, string outDir, bool shortname, bool restore)
{
// Get the name from the DAT to use separately
string name = newDatFile.Header.Name;
@@ -313,7 +312,7 @@ namespace SabreTools.DatFiles
/// <param name="datFile">Current DatFile object to split</param>
/// <param name="radix">Long value representing the split point</param>
/// <returns>Less Than and Greater Than DatFiles</returns>
public (DatFile lessThan, DatFile greaterThan) SplitBySize(DatFile datFile, long radix)
public static (DatFile lessThan, DatFile greaterThan) SplitBySize(DatFile datFile, long radix)
{
// Create each of the respective output DATs
logger.User("Creating and populating new DATs");
@@ -361,7 +360,7 @@ namespace SabreTools.DatFiles
/// </summary>
/// <param name="datFile">Current DatFile object to split</param>
/// <returns>Dictionary of ItemType to DatFile mappings</returns>
public Dictionary<ItemType, DatFile> SplitByType(DatFile datFile)
public static Dictionary<ItemType, DatFile> SplitByType(DatFile datFile)
{
// Create each of the respective output DATs
logger.User("Creating and populating new DATs");
@@ -403,7 +402,7 @@ namespace SabreTools.DatFiles
/// <param name="indexDat">DatFile to add found items to</param>
/// <param name="itemType">ItemType to retrieve items for</param>
/// <returns>DatFile containing all items with the ItemType/returns>
private void FillWithItemType(DatFile datFile, DatFile indexDat, ItemType itemType)
private static void FillWithItemType(DatFile datFile, DatFile indexDat, ItemType itemType)
{
// Loop through and add the items for this index to the output
Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>

View File

@@ -3,7 +3,6 @@ using System.IO;
using System.Linq;
using SabreTools.Core;
using SabreTools.DatFiles.Reports;
using SabreTools.DatItems;
using SabreTools.FileTypes;
using SabreTools.IO;
@@ -12,7 +11,6 @@ using SabreTools.Logging;
// This file represents all methods related to verifying with a DatFile
namespace SabreTools.DatFiles
{
// TODO: Re-evaluate if these should be made static instead of instanced
public partial class DatTool
{
/// <summary>
@@ -21,7 +19,7 @@ namespace SabreTools.DatFiles
/// <param name="datFile">Current DatFile object to verify against</param>
/// <param name="inputs">List of input directories to compare against</param>
/// <returns>True if verification was a success, false otherwise</returns>
public bool VerifyDepot(DatFile datFile, List<string> inputs)
public static bool VerifyDepot(DatFile datFile, List<string> inputs)
{
bool success = true;
@@ -104,7 +102,7 @@ namespace SabreTools.DatFiles
/// <param name="datFile">Current DatFile object to verify against</param>
/// <param name="hashOnly">True if only hashes should be checked, false for full file information</param>
/// <returns>True if verification was a success, false otherwise</returns>
public bool VerifyGeneric(DatFile datFile, bool hashOnly)
public static bool VerifyGeneric(DatFile datFile, bool hashOnly)
{
bool success = true;

View File

@@ -8,7 +8,6 @@ using SabreTools.IO;
// This file represents all methods related to writing to a file
namespace SabreTools.DatFiles
{
// TODO: Re-evaluate if these should be made static instead of instanced
public partial class DatTool
{
/// <summary>
@@ -21,7 +20,7 @@ namespace SabreTools.DatFiles
/// <param name="quotes">True if quotes are assumed in supported types (default), false otherwise</param>
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
/// <returns>True if the DAT was written correctly, false otherwise</returns>
public bool Write(
public static bool Write(
DatFile datFile,
string outDir,
bool overwrite = true,
@@ -47,7 +46,7 @@ namespace SabreTools.DatFiles
}
// Make sure that the three essential fields are filled in
EnsureHeaderFields();
EnsureHeaderFields(datFile);
// Bucket roms by game name, if not already
datFile.Items.BucketBy(Field.Machine_Name, DedupeType.None);
@@ -90,7 +89,7 @@ namespace SabreTools.DatFiles
/// Ensure that FileName, Name, and Description are filled with some value
/// </summary>
/// <param name="datFile">Current DatFile object to write from</param>
private void EnsureHeaderFields(DatFile datFile)
private static void EnsureHeaderFields(DatFile datFile)
{
// Empty FileName
if (string.IsNullOrWhiteSpace(datFile.Header.FileName))
@@ -127,7 +126,7 @@ namespace SabreTools.DatFiles
/// </summary>
/// <param name="datFile">Current DatFile object to write from</param>
/// <returns>True if there are any writable items, false otherwise</returns>
private bool HasWritable(DatFile datFile)
private static bool HasWritable(DatFile datFile)
{
// Force a statistics recheck, just in case
datFile.Items.RecalculateStats();

View File

@@ -2,7 +2,6 @@ using SabreTools.Logging;
// TODO: What sort of internal state should this have? Would a single DatFile be appropriate?
// TODO: How much of the stuff currently in DatFile should be moved here?
// TODO: Can things like CreateAndParse be moved here?
namespace SabreTools.DatFiles
{
/// <summary>
@@ -15,7 +14,7 @@ namespace SabreTools.DatFiles
/// <summary>
/// Logging object
/// </summary>
protected Logger logger;
private static readonly Logger logger = new Logger();
#endregion
}

View File

@@ -1395,9 +1395,6 @@ namespace SabreTools.DatFiles
string basepath = null;
ItemDictionary dirStats = new ItemDictionary();
// Get the DatTool for parsing
DatTool dt = new DatTool();
// Now process each of the input files
foreach (ParentablePath file in files)
{
@@ -1428,7 +1425,7 @@ namespace SabreTools.DatFiles
staticLogger.Verbose($"Beginning stat collection for '{file.CurrentPath}'");
List<string> games = new List<string>();
DatFile datdata = dt.CreateAndParse(file.CurrentPath);
DatFile datdata = DatTool.CreateAndParse(file.CurrentPath);
datdata.Items.BucketBy(Field.Machine_Name, DedupeType.None, norename: true);
// Output single DAT stats (if asked)

View File

@@ -67,7 +67,6 @@ Reset the internal state: reset();";
// Each batch file has its own state
int index = 0;
DatTool dt = new DatTool();
DatFile datFile = DatFile.Create();
string outputDirectory = null;
@@ -134,7 +133,7 @@ Reset the internal state: reset();";
// Assume there could be multiple
foreach (ParentablePath datFilePath in datFilePaths)
{
dt.ParseInto(datFile, datFilePath, index++);
DatTool.ParseInto(datFile, datFilePath, index++);
}
break;
@@ -154,7 +153,7 @@ Reset the internal state: reset();";
// Assume there could be multiple
foreach (string input in command.Arguments)
{
dt.PopulateFromDir(datFile, input);
DatTool.PopulateFromDir(datFile, input);
}
// TODO: We might not want to remove higher order hashes in the future
@@ -406,7 +405,7 @@ Reset the internal state: reset();";
}
// Write out the dat with the current state
dt.Write(datFile, outputDirectory, overwrite: overwrite.Value);
DatTool.Write(datFile, outputDirectory, overwrite: overwrite.Value);
break;
// Reset the internal state

View File

@@ -73,7 +73,6 @@ namespace SabreTools.Features
Cleaner.ExcludeFields.Add(Field.DatItem_Date);
// Create a new DATFromDir object and process the inputs
DatTool dt = new DatTool();
DatFile basedat = DatFile.Create(Header);
basedat.Header.Date = DateTime.Now.ToString("yyyy-MM-dd");
@@ -90,7 +89,7 @@ namespace SabreTools.Features
datdata.FillHeaderFromPath(basePath, noAutomaticDate);
// Now populate from the path
bool success = dt.PopulateFromDir(
bool success = DatTool.PopulateFromDir(
datdata,
basePath,
asFiles,
@@ -107,7 +106,7 @@ namespace SabreTools.Features
datdata.ApplyCleaning(Cleaner);
// Write out the file
dt.Write(datdata, OutputDir);
DatTool.Write(datdata, OutputDir);
}
else
{

View File

@@ -84,16 +84,13 @@ namespace SabreTools.Features
var datfiles = GetList(features, DatListValue);
var datfilePaths = DirectoryExtensions.GetFilesOnly(datfiles);
// Get the DatTool for operations
DatTool dt = new DatTool();
// If we are in individual mode, process each DAT on their own, appending the DAT name to the output dir
if (GetBoolean(features, IndividualValue))
{
foreach (ParentablePath datfile in datfilePaths)
{
DatFile datdata = DatFile.Create();
dt.ParseInto(datdata, datfile, int.MaxValue, keep: true);
DatTool.ParseInto(datdata, datfile, int.MaxValue, keep: true);
// Set depot information
datdata.Header.InputDepot = Header.InputDepot.Clone() as DepotInformation;
@@ -106,9 +103,9 @@ namespace SabreTools.Features
// If we have the depot flag, respect it
bool success;
if (Header.InputDepot?.IsActive ?? false)
success = dt.RebuildDepot(datdata, Inputs, Path.Combine(OutputDir, datdata.Header.FileName), date, delete, inverse, outputFormat);
success = DatTool.RebuildDepot(datdata, Inputs, Path.Combine(OutputDir, datdata.Header.FileName), date, delete, inverse, outputFormat);
else
success = dt.RebuildGeneric(datdata, Inputs, Path.Combine(OutputDir, datdata.Header.FileName), quickScan, date, delete, inverse, outputFormat, asFiles);
success = DatTool.RebuildGeneric(datdata, Inputs, Path.Combine(OutputDir, datdata.Header.FileName), quickScan, date, delete, inverse, outputFormat, asFiles);
// If we have a success and we're updating the DAT, write it out
if (success && updateDat)
@@ -117,7 +114,7 @@ namespace SabreTools.Features
datdata.Header.Name = $"fixDAT_{Header.Name}";
datdata.Header.Description = $"fixDAT_{Header.Description}";
datdata.Items.ClearMarked();
dt.Write(datdata, OutputDir);
DatTool.Write(datdata, OutputDir);
}
}
}
@@ -131,7 +128,7 @@ namespace SabreTools.Features
DatFile datdata = DatFile.Create();
foreach (ParentablePath datfile in datfilePaths)
{
dt.ParseInto(datdata, datfile, int.MaxValue, keep: true);
DatTool.ParseInto(datdata, datfile, int.MaxValue, keep: true);
}
// Set depot information
@@ -147,9 +144,9 @@ namespace SabreTools.Features
// If we have the depot flag, respect it
bool success;
if (Header.InputDepot?.IsActive ?? false)
success = dt.RebuildDepot(datdata, Inputs, OutputDir, date, delete, inverse, outputFormat);
success = DatTool.RebuildDepot(datdata, Inputs, OutputDir, date, delete, inverse, outputFormat);
else
success = dt.RebuildGeneric(datdata, Inputs, OutputDir, quickScan, date, delete, inverse, outputFormat, asFiles);
success = DatTool.RebuildGeneric(datdata, Inputs, OutputDir, quickScan, date, delete, inverse, outputFormat, asFiles);
// If we have a success and we're updating the DAT, write it out
if (success && updateDat)
@@ -158,7 +155,7 @@ namespace SabreTools.Features
datdata.Header.Name = $"fixDAT_{Header.Name}";
datdata.Header.Description = $"fixDAT_{Header.Description}";
datdata.Items.ClearMarked();
dt.Write(datdata, OutputDir);
DatTool.Write(datdata, OutputDir);
}
}
}

View File

@@ -50,15 +50,12 @@ namespace SabreTools.Features
// Get only files from the inputs
List<ParentablePath> files = DirectoryExtensions.GetFilesOnly(Inputs, appendparent: true);
// Get the DatTool for operations
DatTool dt = new DatTool();
// Loop over the input files
foreach (ParentablePath file in files)
{
// Create and fill the new DAT
DatFile internalDat = DatFile.Create(Header);
dt.ParseInto(internalDat, file);
DatTool.ParseInto(internalDat, file);
// Get the output directory
OutputDir = file.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
@@ -66,13 +63,13 @@ namespace SabreTools.Features
// Extension splitting
if (splittingMode.HasFlag(SplittingMode.Extension))
{
(DatFile extADat, DatFile extBDat) = dt.SplitByExtension(internalDat, GetList(features, ExtAListValue), GetList(features, ExtBListValue));
(DatFile extADat, DatFile extBDat) = DatTool.SplitByExtension(internalDat, GetList(features, ExtAListValue), GetList(features, ExtBListValue));
InternalStopwatch watch = new InternalStopwatch("Outputting extension-split DATs");
// Output both possible DatFiles
dt.Write(extADat, OutputDir);
dt.Write(extBDat, OutputDir);
DatTool.Write(extADat, OutputDir);
DatTool.Write(extBDat, OutputDir);
watch.Stop();
}
@@ -80,14 +77,14 @@ namespace SabreTools.Features
// Hash splitting
if (splittingMode.HasFlag(SplittingMode.Hash))
{
Dictionary<Field, DatFile> typeDats = dt.SplitByHash(internalDat);
Dictionary<Field, DatFile> typeDats = DatTool.SplitByHash(internalDat);
InternalStopwatch watch = new InternalStopwatch("Outputting hash-split DATs");
// Loop through each type DatFile
Parallel.ForEach(typeDats.Keys, Globals.ParallelOptions, itemType =>
{
dt.Write(typeDats[itemType], OutputDir);
DatTool.Write(typeDats[itemType], OutputDir);
});
watch.Stop();
@@ -97,7 +94,7 @@ namespace SabreTools.Features
if (splittingMode.HasFlag(SplittingMode.Level))
{
logger.Warning("This feature is not implemented: level-split");
dt.SplitByLevel(
DatTool.SplitByLevel(
internalDat,
OutputDir,
GetBoolean(features, ShortValue),
@@ -107,13 +104,13 @@ namespace SabreTools.Features
// Size splitting
if (splittingMode.HasFlag(SplittingMode.Size))
{
(DatFile lessThan, DatFile greaterThan) = dt.SplitBySize(internalDat, GetInt64(features, RadixInt64Value));
(DatFile lessThan, DatFile greaterThan) = DatTool.SplitBySize(internalDat, GetInt64(features, RadixInt64Value));
InternalStopwatch watch = new InternalStopwatch("Outputting size-split DATs");
// Output both possible DatFiles
dt.Write(lessThan, OutputDir);
dt.Write(greaterThan, OutputDir);
DatTool.Write(lessThan, OutputDir);
DatTool.Write(greaterThan, OutputDir);
watch.Stop();
}
@@ -121,14 +118,14 @@ namespace SabreTools.Features
// Type splitting
if (splittingMode.HasFlag(SplittingMode.Type))
{
Dictionary<ItemType, DatFile> typeDats = dt.SplitByType(internalDat);
Dictionary<ItemType, DatFile> typeDats = DatTool.SplitByType(internalDat);
InternalStopwatch watch = new InternalStopwatch("Outputting ItemType DATs");
// Loop through each type DatFile
Parallel.ForEach(typeDats.Keys, Globals.ParallelOptions, itemType =>
{
dt.Write(typeDats[itemType], OutputDir);
DatTool.Write(typeDats[itemType], OutputDir);
});
watch.Stop();

View File

@@ -152,9 +152,6 @@ namespace SabreTools.Features
List<ParentablePath> inputPaths = DirectoryExtensions.GetFilesOnly(Inputs, appendparent: true);
List<ParentablePath> basePaths = DirectoryExtensions.GetFilesOnly(GetList(features, BaseDatListValue));
// Get the DatTool for parsing
DatTool dt = new DatTool();
// If we're in standard update mode, run through all of the inputs
if (updateMode == UpdateMode.None)
{
@@ -164,7 +161,7 @@ namespace SabreTools.Features
// Create a new base DatFile
DatFile datFile = DatFile.Create(Header);
logger.User($"Processing '{Path.GetFileName(inputPath.CurrentPath)}'");
dt.ParseInto(datFile, inputPath, keep: true,
DatTool.ParseInto(datFile, inputPath, keep: true,
keepext: datFile.Header.DatFormat.HasFlag(DatFormat.TSV)
|| datFile.Header.DatFormat.HasFlag(DatFormat.CSV)
|| datFile.Header.DatFormat.HasFlag(DatFormat.SSV));
@@ -179,7 +176,7 @@ namespace SabreTools.Features
string realOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
// Try to output the file, overwriting only if it's not in the current directory
dt.Write(datFile, realOutDir, overwrite: GetBoolean(features, InplaceValue));
DatTool.Write(datFile, realOutDir, overwrite: GetBoolean(features, InplaceValue));
});
return;
@@ -219,7 +216,7 @@ namespace SabreTools.Features
DatFile dupeData = userInputDat.DiffDuplicates(inputPaths);
InternalStopwatch watch = new InternalStopwatch("Outputting duplicate DAT");
dt.Write(dupeData, OutputDir, overwrite: false);
DatTool.Write(dupeData, OutputDir, overwrite: false);
watch.Stop();
}
@@ -229,7 +226,7 @@ namespace SabreTools.Features
DatFile outerDiffData = userInputDat.DiffNoDuplicates(inputPaths);
InternalStopwatch watch = new InternalStopwatch("Outputting no duplicate DAT");
dt.Write(outerDiffData, OutputDir, overwrite: false);
DatTool.Write(outerDiffData, OutputDir, overwrite: false);
watch.Stop();
}
@@ -247,7 +244,7 @@ namespace SabreTools.Features
string path = inputPaths[j].GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
// Try to output the file
dt.Write(datFiles[j], path, overwrite: GetBoolean(features, InplaceValue));
DatTool.Write(datFiles[j], path, overwrite: GetBoolean(features, InplaceValue));
});
watch.Stop();
@@ -283,7 +280,7 @@ namespace SabreTools.Features
string path = inputPaths[j].GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
// Try to output the file
dt.Write(datFiles[j], path, overwrite: GetBoolean(features, InplaceValue));
DatTool.Write(datFiles[j], path, overwrite: GetBoolean(features, InplaceValue));
});
watch.Stop();
@@ -297,7 +294,7 @@ namespace SabreTools.Features
{
// Parse the path to a new DatFile
DatFile repDat = DatFile.Create(userInputDat.Header.CloneFiltering());
dt.ParseInto(repDat, inputPath, indexId: 1, keep: true);
DatTool.ParseInto(repDat, inputPath, indexId: 1, keep: true);
// Perform additional processing steps
repDat.ApplyExtras(Extras);
@@ -310,7 +307,7 @@ namespace SabreTools.Features
// Finally output the diffed DatFile
string interOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
dt.Write(repDat, interOutDir, overwrite: GetBoolean(features, InplaceValue));
DatTool.Write(repDat, interOutDir, overwrite: GetBoolean(features, InplaceValue));
});
}
@@ -322,7 +319,7 @@ namespace SabreTools.Features
{
// Parse the path to a new DatFile
DatFile repDat = DatFile.Create(userInputDat.Header.CloneFiltering());
dt.ParseInto(repDat, inputPath, indexId: 1, keep: true);
DatTool.ParseInto(repDat, inputPath, indexId: 1, keep: true);
// Perform additional processing steps
repDat.ApplyExtras(Extras);
@@ -335,7 +332,7 @@ namespace SabreTools.Features
// Finally output the replaced DatFile
string interOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
dt.Write(repDat, interOutDir, overwrite: GetBoolean(features, InplaceValue));
DatTool.Write(repDat, interOutDir, overwrite: GetBoolean(features, InplaceValue));
});
}
@@ -347,7 +344,7 @@ namespace SabreTools.Features
if (string.Equals(userInputDat.Header.Type, "SuperDAT", StringComparison.OrdinalIgnoreCase))
userInputDat.ApplySuperDAT(inputPaths);
dt.Write(userInputDat, OutputDir);
DatTool.Write(userInputDat, OutputDir);
}
}
}

View File

@@ -51,9 +51,6 @@ namespace SabreTools.Features
bool quickScan = GetBoolean(features, QuickValue);
var splitType = GetSplitType(features);
// Get the DatTool for required operations
DatTool dt = new DatTool();
// If we are in individual mode, process each DAT on their own
if (GetBoolean(features, IndividualValue))
{
@@ -61,7 +58,7 @@ namespace SabreTools.Features
{
// Parse in from the file
DatFile datdata = DatFile.Create();
dt.ParseInto(datdata, datfile, int.MaxValue, keep: true);
DatTool.ParseInto(datdata, datfile, int.MaxValue, keep: true);
// Perform additional processing steps
datdata.ApplyExtras(Extras);
@@ -79,7 +76,7 @@ namespace SabreTools.Features
// If we have the depot flag, respect it
if (Header.InputDepot?.IsActive ?? false)
{
dt.VerifyDepot(datdata, Inputs);
DatTool.VerifyDepot(datdata, Inputs);
}
else
{
@@ -87,15 +84,15 @@ namespace SabreTools.Features
logger.User("Processing files:\n");
foreach (string input in Inputs)
{
dt.PopulateFromDir(datdata, input, asFiles: asFiles, hashes: quickScan ? Hash.CRC : Hash.Standard);
DatTool.PopulateFromDir(datdata, input, asFiles: asFiles, hashes: quickScan ? Hash.CRC : Hash.Standard);
}
dt.VerifyGeneric(datdata, hashOnly);
DatTool.VerifyGeneric(datdata, hashOnly);
}
// Now write out if there are any items left
datdata.WriteStatsToConsole();
dt.Write(datdata, OutputDir);
DatTool.Write(datdata, OutputDir);
}
}
// Otherwise, process all DATs into the same output
@@ -107,7 +104,7 @@ namespace SabreTools.Features
DatFile datdata = DatFile.Create();
foreach (ParentablePath datfile in datfilePaths)
{
dt.ParseInto(datdata, datfile, int.MaxValue, keep: true);
DatTool.ParseInto(datdata, datfile, int.MaxValue, keep: true);
}
// Perform additional processing steps
@@ -128,7 +125,7 @@ namespace SabreTools.Features
// If we have the depot flag, respect it
if (Header.InputDepot?.IsActive ?? false)
{
dt.VerifyDepot(datdata, Inputs);
DatTool.VerifyDepot(datdata, Inputs);
}
else
{
@@ -136,15 +133,15 @@ namespace SabreTools.Features
logger.User("Processing files:\n");
foreach (string input in Inputs)
{
dt.PopulateFromDir(datdata, input, asFiles: asFiles, hashes: quickScan ? Hash.CRC : Hash.Standard);
DatTool.PopulateFromDir(datdata, input, asFiles: asFiles, hashes: quickScan ? Hash.CRC : Hash.Standard);
}
dt.VerifyGeneric(datdata, hashOnly);
DatTool.VerifyGeneric(datdata, hashOnly);
}
// Now write out if there are any items left
datdata.WriteStatsToConsole();
dt.Write(datdata, OutputDir);
DatTool.Write(datdata, OutputDir);
}
}
}