diff --git a/SabreTools.DatFiles/DatFile.cs b/SabreTools.DatFiles/DatFile.cs
index 031d120e..afb7dfb5 100644
--- a/SabreTools.DatFiles/DatFile.cs
+++ b/SabreTools.DatFiles/DatFile.cs
@@ -5,7 +5,6 @@ using System.Xml.Serialization;
using Newtonsoft.Json;
using SabreTools.Core.Filter;
using SabreTools.Core.Tools;
-using SabreTools.DatFiles.Formats;
using SabreTools.DatItems;
using SabreTools.DatItems.Formats;
using SabreTools.Hashing;
@@ -68,61 +67,6 @@ namespace SabreTools.DatFiles
}
}
- ///
- /// Create a specific type of DatFile to be used based on a format and a base DAT
- ///
- /// Format of the DAT to be created
- /// DatFile containing the information to use in specific operations
- /// For relevant types, assume the usage of quotes
- /// DatFile of the specific internal type that corresponds to the inputs
- public static DatFile Create(DatFormat? datFormat = null, DatFile? baseDat = null, bool quotes = true)
- {
- return datFormat switch
- {
- DatFormat.ArchiveDotOrg => new ArchiveDotOrg(baseDat),
- DatFormat.AttractMode => new AttractMode(baseDat),
- DatFormat.ClrMamePro => new ClrMamePro(baseDat, quotes),
- DatFormat.CSV => new CommaSeparatedValue(baseDat),
- DatFormat.DOSCenter => new DosCenter(baseDat),
- DatFormat.EverdriveSMDB => new EverdriveSMDB(baseDat),
- DatFormat.Listrom => new Listrom(baseDat),
- DatFormat.Listxml => new Listxml(baseDat),
- DatFormat.Logiqx => new Logiqx(baseDat, false),
- DatFormat.LogiqxDeprecated => new Logiqx(baseDat, true),
- DatFormat.MissFile => new Missfile(baseDat),
- DatFormat.OfflineList => new OfflineList(baseDat),
- DatFormat.OpenMSX => new OpenMSX(baseDat),
- DatFormat.RedumpMD5 => new Md5File(baseDat),
- DatFormat.RedumpSFV => new SfvFile(baseDat),
- DatFormat.RedumpSHA1 => new Sha1File(baseDat),
- DatFormat.RedumpSHA256 => new Sha256File(baseDat),
- DatFormat.RedumpSHA384 => new Sha384File(baseDat),
- DatFormat.RedumpSHA512 => new Sha512File(baseDat),
- DatFormat.RedumpSpamSum => new SpamSumFile(baseDat),
- DatFormat.RomCenter => new RomCenter(baseDat),
- DatFormat.SabreJSON => new SabreJSON(baseDat),
- DatFormat.SabreXML => new SabreXML(baseDat),
- DatFormat.SoftwareList => new Formats.SoftwareList(baseDat),
- DatFormat.SSV => new SemicolonSeparatedValue(baseDat),
- DatFormat.TSV => new TabSeparatedValue(baseDat),
-
- // We use new-style Logiqx as a backup for generic DatFile
- _ => new Logiqx(baseDat, false),
- };
- }
-
- ///
- /// Create a new DatFile from an existing DatHeader
- ///
- /// DatHeader to get the values from
- public static DatFile Create(DatHeader datHeader)
- {
- DatFormat format = datHeader.GetFieldValue(DatHeader.DatFormatKey);
- DatFile datFile = Create(format);
- datFile.Header = (DatHeader)datHeader.Clone();
- return datFile;
- }
-
///
/// Fill the header values based on existing Header and path
///
@@ -174,6 +118,15 @@ namespace SabreTools.DatFiles
ItemsDB = new ItemDictionaryDB();
}
+ ///
+ /// Set the internal header
+ ///
+ /// Replacement header to be used
+ public void SetHeader(DatHeader datHeader)
+ {
+ Header = (DatHeader)datHeader.Clone();;
+ }
+
#endregion
#region Filtering
diff --git a/SabreTools.DatFiles/DatFileTool.cs b/SabreTools.DatFiles/DatFileTool.cs
index 8f717d22..0dab5208 100644
--- a/SabreTools.DatFiles/DatFileTool.cs
+++ b/SabreTools.DatFiles/DatFileTool.cs
@@ -6,6 +6,7 @@ using System.Linq;
using System.Threading.Tasks;
#endif
using SabreTools.Core.Tools;
+using SabreTools.DatFiles.Formats;
using SabreTools.DatItems;
using SabreTools.DatItems.Formats;
using SabreTools.IO;
@@ -28,6 +29,65 @@ namespace SabreTools.DatFiles
#endregion
+ #region Creation
+
+ ///
+ /// Create a specific type of DatFile to be used based on a format and a base DAT
+ ///
+ /// Format of the DAT to be created
+ /// DatFile containing the information to use in specific operations
+ /// For relevant types, assume the usage of quotes
+ /// DatFile of the specific internal type that corresponds to the inputs
+ public static DatFile CreateDatFile(DatFormat? datFormat = null, DatFile? baseDat = null, bool quotes = true)
+ {
+ return datFormat switch
+ {
+ DatFormat.ArchiveDotOrg => new ArchiveDotOrg(baseDat),
+ DatFormat.AttractMode => new AttractMode(baseDat),
+ DatFormat.ClrMamePro => new ClrMamePro(baseDat, quotes),
+ DatFormat.CSV => new CommaSeparatedValue(baseDat),
+ DatFormat.DOSCenter => new DosCenter(baseDat),
+ DatFormat.EverdriveSMDB => new EverdriveSMDB(baseDat),
+ DatFormat.Listrom => new Listrom(baseDat),
+ DatFormat.Listxml => new Listxml(baseDat),
+ DatFormat.Logiqx => new Logiqx(baseDat, false),
+ DatFormat.LogiqxDeprecated => new Logiqx(baseDat, true),
+ DatFormat.MissFile => new Missfile(baseDat),
+ DatFormat.OfflineList => new OfflineList(baseDat),
+ DatFormat.OpenMSX => new OpenMSX(baseDat),
+ DatFormat.RedumpMD5 => new Md5File(baseDat),
+ DatFormat.RedumpSFV => new SfvFile(baseDat),
+ DatFormat.RedumpSHA1 => new Sha1File(baseDat),
+ DatFormat.RedumpSHA256 => new Sha256File(baseDat),
+ DatFormat.RedumpSHA384 => new Sha384File(baseDat),
+ DatFormat.RedumpSHA512 => new Sha512File(baseDat),
+ DatFormat.RedumpSpamSum => new SpamSumFile(baseDat),
+ DatFormat.RomCenter => new RomCenter(baseDat),
+ DatFormat.SabreJSON => new SabreJSON(baseDat),
+ DatFormat.SabreXML => new SabreXML(baseDat),
+ DatFormat.SoftwareList => new Formats.SoftwareList(baseDat),
+ DatFormat.SSV => new SemicolonSeparatedValue(baseDat),
+ DatFormat.TSV => new TabSeparatedValue(baseDat),
+
+ // We use new-style Logiqx as a backup for generic DatFile
+ _ => new Logiqx(baseDat, false),
+ };
+ }
+
+ ///
+ /// Create a new DatFile from an existing DatHeader
+ ///
+ /// DatHeader to get the values from
+ public static DatFile CreateDatFile(DatHeader datHeader)
+ {
+ DatFormat format = datHeader.GetFieldValue(DatHeader.DatFormatKey);
+ DatFile datFile = CreateDatFile(format);
+ datFile.SetHeader(datHeader);
+ return datFile;
+ }
+
+ #endregion
+
#region Sorting and Merging
///
@@ -734,7 +794,7 @@ namespace SabreTools.DatFiles
for (int j = 0; j < datHeaders.Count; j++)
#endif
{
- DatFile diffData = DatFile.Create(datHeaders[j]);
+ DatFile diffData = DatFileTool.CreateDatFile(datHeaders[j]);
diffData.ResetDictionary();
FillWithSourceIndex(datFile, diffData, j);
//FillWithSourceIndexDB(datFile, diffData, j);
@@ -783,7 +843,7 @@ namespace SabreTools.DatFiles
datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "datFile.All DATs");
string post = " (Duplicates)";
- DatFile dupeData = DatFile.Create(datFile.Header);
+ DatFile dupeData = DatFileTool.CreateDatFile(datFile.Header);
dupeData.Header.SetFieldValue(DatHeader.FileNameKey, dupeData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post);
dupeData.Header.SetFieldValue(Models.Metadata.Header.NameKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post);
dupeData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post);
@@ -861,7 +921,7 @@ namespace SabreTools.DatFiles
datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "datFile.All DATs");
string post = " (Duplicates)";
- DatFile dupeData = DatFile.Create(datFile.Header);
+ DatFile dupeData = DatFileTool.CreateDatFile(datFile.Header);
dupeData.Header.SetFieldValue(DatHeader.FileNameKey, dupeData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post);
dupeData.Header.SetFieldValue(Models.Metadata.Header.NameKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post);
dupeData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post);
@@ -999,7 +1059,7 @@ namespace SabreTools.DatFiles
#endif
{
string innerpost = $" ({j} - {inputs[j].GetNormalizedFileName(true)} Only)";
- DatFile diffData = DatFile.Create(datFile.Header);
+ DatFile diffData = DatFileTool.CreateDatFile(datFile.Header);
diffData.Header.SetFieldValue(DatHeader.FileNameKey, diffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + innerpost);
diffData.Header.SetFieldValue(Models.Metadata.Header.NameKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + innerpost);
diffData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + innerpost);
@@ -1092,7 +1152,7 @@ namespace SabreTools.DatFiles
#endif
{
string innerpost = $" ({j} - {inputs[j].GetNormalizedFileName(true)} Only)";
- DatFile diffData = DatFile.Create(datFile.Header);
+ DatFile diffData = DatFileTool.CreateDatFile(datFile.Header);
diffData.Header.SetFieldValue(DatHeader.FileNameKey, diffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + innerpost);
diffData.Header.SetFieldValue(Models.Metadata.Header.NameKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + innerpost);
diffData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + innerpost);
@@ -1218,7 +1278,7 @@ namespace SabreTools.DatFiles
datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "All DATs");
string post = " (No Duplicates)";
- DatFile outerDiffData = DatFile.Create(datFile.Header);
+ DatFile outerDiffData = DatFileTool.CreateDatFile(datFile.Header);
outerDiffData.Header.SetFieldValue(DatHeader.FileNameKey, outerDiffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post);
outerDiffData.Header.SetFieldValue(Models.Metadata.Header.NameKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post);
outerDiffData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post);
@@ -1294,7 +1354,7 @@ namespace SabreTools.DatFiles
datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "All DATs");
string post = " (No Duplicates)";
- DatFile outerDiffData = DatFile.Create(datFile.Header);
+ DatFile outerDiffData = DatFileTool.CreateDatFile(datFile.Header);
outerDiffData.Header.SetFieldValue(DatHeader.FileNameKey, outerDiffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post);
outerDiffData.Header.SetFieldValue(Models.Metadata.Header.NameKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post);
outerDiffData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post);
@@ -1427,7 +1487,7 @@ namespace SabreTools.DatFiles
{
var input = inputs[i];
_staticLogger.User($"Adding DAT: {input.CurrentPath}");
- datFiles[i] = DatFile.Create(datFile.Header.CloneFiltering());
+ datFiles[i] = DatFileTool.CreateDatFile(datFile.Header.CloneFiltering());
Parser.ParseInto(datFiles[i], input, i, keep: true);
#if NET40_OR_GREATER || NETCOREAPP
});
diff --git a/SabreTools.DatFiles/Parser.cs b/SabreTools.DatFiles/Parser.cs
index aadae024..f9c33fb2 100644
--- a/SabreTools.DatFiles/Parser.cs
+++ b/SabreTools.DatFiles/Parser.cs
@@ -33,9 +33,9 @@ namespace SabreTools.DatFiles
{
// Null filenames are invalid
if (filename == null)
- return DatFile.Create();
+ return DatFileTool.CreateDatFile();
- DatFile datFile = DatFile.Create();
+ DatFile datFile = DatFileTool.CreateDatFile();
ParseInto(datFile, new ParentablePath(filename), statsOnly: statsOnly, throwOnError: throwOnError);
return datFile;
}
@@ -112,7 +112,7 @@ namespace SabreTools.DatFiles
// Now parse the correct type of DAT
try
{
- var parsingDatFile = DatFile.Create(currentPathFormat, datFile, quotes);
+ var parsingDatFile = DatFileTool.CreateDatFile(currentPathFormat, datFile, quotes);
parsingDatFile?.ParseFile(currentPath, indexId, keep, statsOnly: statsOnly, throwOnError: throwOnError);
}
catch (Exception ex) when (!throwOnError)
diff --git a/SabreTools.DatTools/Splitter.cs b/SabreTools.DatTools/Splitter.cs
index 3b687b92..f1d9ceb7 100644
--- a/SabreTools.DatTools/Splitter.cs
+++ b/SabreTools.DatTools/Splitter.cs
@@ -54,12 +54,12 @@ namespace SabreTools.DatTools
string newExtBString = string.Join(",", newExtB);
// Set all of the appropriate outputs for each of the subsets
- DatFile extADat = DatFile.Create(datFile.Header.CloneStandard());
+ DatFile extADat = DatFileTool.CreateDatFile(datFile.Header.CloneStandard());
extADat.Header.SetFieldValue(DatHeader.FileNameKey, extADat.Header.GetStringFieldValue(DatHeader.FileNameKey) + $" ({newExtAString})");
extADat.Header.SetFieldValue(Models.Metadata.Header.NameKey, extADat.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + $" ({newExtAString})");
extADat.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, extADat.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + $" ({newExtAString})");
- DatFile extBDat = DatFile.Create(datFile.Header.CloneStandard());
+ DatFile extBDat = DatFileTool.CreateDatFile(datFile.Header.CloneStandard());
extBDat.Header.SetFieldValue(DatHeader.FileNameKey, extBDat.Header.GetStringFieldValue(DatHeader.FileNameKey) + $" ({newExtBString})");
extBDat.Header.SetFieldValue(Models.Metadata.Header.NameKey, extBDat.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + $" ({newExtBString})");
extBDat.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, extBDat.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + $" ({newExtBString})");
@@ -131,12 +131,12 @@ namespace SabreTools.DatTools
string newExtBString = string.Join(",", newExtB);
// Set all of the appropriate outputs for each of the subsets
- DatFile extADat = DatFile.Create(datFile.Header.CloneStandard());
+ DatFile extADat = DatFileTool.CreateDatFile(datFile.Header.CloneStandard());
extADat.Header.SetFieldValue(DatHeader.FileNameKey, extADat.Header.GetStringFieldValue(DatHeader.FileNameKey) + $" ({newExtAString})");
extADat.Header.SetFieldValue(Models.Metadata.Header.NameKey, extADat.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + $" ({newExtAString})");
extADat.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, extADat.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + $" ({newExtAString})");
- DatFile extBDat = DatFile.Create(datFile.Header.CloneStandard());
+ DatFile extBDat = DatFileTool.CreateDatFile(datFile.Header.CloneStandard());
extBDat.Header.SetFieldValue(DatHeader.FileNameKey, extBDat.Header.GetStringFieldValue(DatHeader.FileNameKey) + $" ({newExtBString})");
extBDat.Header.SetFieldValue(Models.Metadata.Header.NameKey, extBDat.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + $" ({newExtBString})");
extBDat.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, extBDat.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + $" ({newExtBString})");
@@ -232,7 +232,7 @@ namespace SabreTools.DatTools
Dictionary fieldDats = [];
foreach (var kvp in mappings)
{
- fieldDats[kvp.Key] = DatFile.Create(datFile.Header.CloneStandard());
+ fieldDats[kvp.Key] = DatFileTool.CreateDatFile(datFile.Header.CloneStandard());
fieldDats[kvp.Key].Header.SetFieldValue(DatHeader.FileNameKey, fieldDats[kvp.Key].Header.GetStringFieldValue(DatHeader.FileNameKey) + kvp.Value);
fieldDats[kvp.Key].Header.SetFieldValue(Models.Metadata.Header.NameKey, fieldDats[kvp.Key].Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + kvp.Value);
fieldDats[kvp.Key].Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, fieldDats[kvp.Key].Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + kvp.Value);
@@ -343,7 +343,7 @@ namespace SabreTools.DatTools
Dictionary fieldDats = [];
foreach (var kvp in mappings)
{
- fieldDats[kvp.Key] = DatFile.Create(datFile.Header.CloneStandard());
+ fieldDats[kvp.Key] = DatFileTool.CreateDatFile(datFile.Header.CloneStandard());
fieldDats[kvp.Key].Header.SetFieldValue(DatHeader.FileNameKey, fieldDats[kvp.Key].Header.GetStringFieldValue(DatHeader.FileNameKey) + kvp.Value);
fieldDats[kvp.Key].Header.SetFieldValue(Models.Metadata.Header.NameKey, fieldDats[kvp.Key].Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + kvp.Value);
fieldDats[kvp.Key].Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, fieldDats[kvp.Key].Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + kvp.Value);
@@ -480,7 +480,7 @@ namespace SabreTools.DatTools
datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None, lower: false, norename: true);
// Create a temporary DAT to add things to
- DatFile tempDat = DatFile.Create(datFile.Header);
+ DatFile tempDat = DatFileTool.CreateDatFile(datFile.Header);
tempDat.Header.SetFieldValue(Models.Metadata.Header.NameKey, null);
// Sort the input keys
@@ -500,7 +500,7 @@ namespace SabreTools.DatTools
if (tempDat.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) != null && tempDat.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) != Path.GetDirectoryName(key))
{
// Reset the DAT for the next items
- tempDat = DatFile.Create(datFile.Header);
+ tempDat = DatFileTool.CreateDatFile(datFile.Header);
tempDat.Header.SetFieldValue(Models.Metadata.Header.NameKey, null);
}
@@ -603,12 +603,12 @@ namespace SabreTools.DatTools
// Create each of the respective output DATs
InternalStopwatch watch = new($"Splitting DAT by size");
- DatFile lessThan = DatFile.Create(datFile.Header.CloneStandard());
+ DatFile lessThan = DatFileTool.CreateDatFile(datFile.Header.CloneStandard());
lessThan.Header.SetFieldValue(DatHeader.FileNameKey, lessThan.Header.GetStringFieldValue(DatHeader.FileNameKey) + $" (less than {radix})");
lessThan.Header.SetFieldValue(Models.Metadata.Header.NameKey, lessThan.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + $" (less than {radix})");
lessThan.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, lessThan.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + $" (less than {radix})");
- DatFile greaterThan = DatFile.Create(datFile.Header.CloneStandard());
+ DatFile greaterThan = DatFileTool.CreateDatFile(datFile.Header.CloneStandard());
greaterThan.Header.SetFieldValue(DatHeader.FileNameKey, greaterThan.Header.GetStringFieldValue(DatHeader.FileNameKey) + $" (equal-greater than {radix})");
greaterThan.Header.SetFieldValue(Models.Metadata.Header.NameKey, greaterThan.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + $" (equal-greater than {radix})");
greaterThan.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, greaterThan.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + $" (equal-greater than {radix})");
@@ -669,12 +669,12 @@ namespace SabreTools.DatTools
// Create each of the respective output DATs
var watch = new InternalStopwatch($"Splitting DAT by size");
- DatFile lessThan = DatFile.Create(datFile.Header.CloneStandard());
+ DatFile lessThan = DatFileTool.CreateDatFile(datFile.Header.CloneStandard());
lessThan.Header.SetFieldValue(DatHeader.FileNameKey, lessThan.Header.GetStringFieldValue(DatHeader.FileNameKey) + $" (less than {radix})");
lessThan.Header.SetFieldValue(Models.Metadata.Header.NameKey, lessThan.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + $" (less than {radix})");
lessThan.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, lessThan.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + $" (less than {radix})");
- DatFile greaterThan = DatFile.Create(datFile.Header.CloneStandard());
+ DatFile greaterThan = DatFileTool.CreateDatFile(datFile.Header.CloneStandard());
greaterThan.Header.SetFieldValue(DatHeader.FileNameKey, greaterThan.Header.GetStringFieldValue(DatHeader.FileNameKey) + $" (equal-greater than {radix})");
greaterThan.Header.SetFieldValue(Models.Metadata.Header.NameKey, greaterThan.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + $" (equal-greater than {radix})");
greaterThan.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, greaterThan.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + $" (equal-greater than {radix})");
@@ -772,7 +772,7 @@ namespace SabreTools.DatTools
// Initialize everything
long currentSize = 0;
long currentIndex = 0;
- DatFile currentDat = DatFile.Create(datFile.Header.CloneStandard());
+ DatFile currentDat = DatFileTool.CreateDatFile(datFile.Header.CloneStandard());
currentDat.Header.SetFieldValue(DatHeader.FileNameKey, currentDat.Header.GetStringFieldValue(DatHeader.FileNameKey) + $"_{currentIndex}");
currentDat.Header.SetFieldValue(Models.Metadata.Header.NameKey, currentDat.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + $"_{currentIndex}");
currentDat.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, currentDat.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + $"_{currentIndex}");
@@ -815,7 +815,7 @@ namespace SabreTools.DatTools
datFiles.Add(currentDat);
currentSize = 0;
currentIndex++;
- currentDat = DatFile.Create(datFile.Header.CloneStandard());
+ currentDat = DatFileTool.CreateDatFile(datFile.Header.CloneStandard());
currentDat.Header.SetFieldValue(DatHeader.FileNameKey, currentDat.Header.GetStringFieldValue(DatHeader.FileNameKey) + $"_{currentIndex}");
currentDat.Header.SetFieldValue(Models.Metadata.Header.NameKey, currentDat.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + $"_{currentIndex}");
currentDat.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, currentDat.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + $"_{currentIndex}");
@@ -859,7 +859,7 @@ namespace SabreTools.DatTools
// Setup all of the DatFiles
foreach (ItemType itemType in outputTypes)
{
- typeDats[itemType] = DatFile.Create(datFile.Header.CloneStandard());
+ typeDats[itemType] = DatFileTool.CreateDatFile(datFile.Header.CloneStandard());
typeDats[itemType].Header.SetFieldValue(DatHeader.FileNameKey, typeDats[itemType].Header.GetStringFieldValue(DatHeader.FileNameKey) + $" ({itemType})");
typeDats[itemType].Header.SetFieldValue(Models.Metadata.Header.NameKey, typeDats[itemType].Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + $" ({itemType})");
typeDats[itemType].Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, typeDats[itemType].Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + $" ({itemType})");
diff --git a/SabreTools.DatTools/Writer.cs b/SabreTools.DatTools/Writer.cs
index ee8c072f..d2dfb986 100644
--- a/SabreTools.DatTools/Writer.cs
+++ b/SabreTools.DatTools/Writer.cs
@@ -90,7 +90,7 @@ namespace SabreTools.DatTools
string outfile = outfiles[datFormat];
try
{
- DatFile.Create(datFormat, datFile, quotes)?.WriteToFile(outfile, ignoreblanks, throwOnError);
+ DatFileTool.CreateDatFile(datFormat, datFile, quotes)?.WriteToFile(outfile, ignoreblanks, throwOnError);
}
catch (Exception ex) when (!throwOnError)
{
diff --git a/SabreTools/Features/Batch.cs b/SabreTools/Features/Batch.cs
index 185d1ad2..bf9e6128 100644
--- a/SabreTools/Features/Batch.cs
+++ b/SabreTools/Features/Batch.cs
@@ -901,7 +901,7 @@ Reset the internal state: reset();";
///
private class BatchState
{
- public DatFile DatFile { get; set; } = DatFile.Create();
+ public DatFile DatFile { get; set; } = DatFileTool.CreateDatFile();
public int Index { get; set; } = 0;
public string? OutputDirectory { get; set; } = null;
@@ -911,7 +911,7 @@ Reset the internal state: reset();";
public void Reset()
{
Index = 0;
- DatFile = DatFile.Create();
+ DatFile = DatFileTool.CreateDatFile();
OutputDirectory = null;
}
}
diff --git a/SabreTools/Features/DatFromDir.cs b/SabreTools/Features/DatFromDir.cs
index 0759bfac..aecdc236 100644
--- a/SabreTools/Features/DatFromDir.cs
+++ b/SabreTools/Features/DatFromDir.cs
@@ -71,7 +71,7 @@ namespace SabreTools.Features
Remover!.PopulateExclusionsFromList(["DatItem.Date"]);
// Create a new DATFromDir object and process the inputs
- DatFile basedat = DatFile.Create(Header!);
+ DatFile basedat = DatFileTool.CreateDatFile(Header!);
basedat.Header.SetFieldValue(Models.Metadata.Header.DateKey, DateTime.Now.ToString("yyyy-MM-dd"));
// Update the cleaner based on certain flags
@@ -84,7 +84,7 @@ namespace SabreTools.Features
if (Directory.Exists(path) || File.Exists(path))
{
// Clone the base Dat for information
- DatFile datdata = DatFile.Create(basedat.Header);
+ DatFile datdata = DatFileTool.CreateDatFile(basedat.Header);
// Get the base path and fill the header, if needed
string basePath = Path.GetFullPath(path);
diff --git a/SabreTools/Features/Sort.cs b/SabreTools/Features/Sort.cs
index 0b02afe0..3be1ec93 100644
--- a/SabreTools/Features/Sort.cs
+++ b/SabreTools/Features/Sort.cs
@@ -93,7 +93,7 @@ namespace SabreTools.Features
{
foreach (ParentablePath datfile in datfilePaths)
{
- DatFile datdata = DatFile.Create();
+ DatFile datdata = DatFileTool.CreateDatFile();
Parser.ParseInto(datdata, datfile, int.MaxValue, keep: true);
// Skip if nothing was parsed
@@ -134,7 +134,7 @@ namespace SabreTools.Features
var watch = new InternalStopwatch("Populating internal DAT");
// Add all of the input DATs into one huge internal DAT
- DatFile datdata = DatFile.Create();
+ DatFile datdata = DatFileTool.CreateDatFile();
foreach (ParentablePath datfile in datfilePaths)
{
Parser.ParseInto(datdata, datfile, int.MaxValue, keep: true);
diff --git a/SabreTools/Features/Split.cs b/SabreTools/Features/Split.cs
index de7822dd..57117a07 100644
--- a/SabreTools/Features/Split.cs
+++ b/SabreTools/Features/Split.cs
@@ -64,7 +64,7 @@ namespace SabreTools.Features
foreach (ParentablePath file in files)
{
// Create and fill the new DAT
- DatFile internalDat = DatFile.Create(Header!);
+ DatFile internalDat = DatFileTool.CreateDatFile(Header!);
Parser.ParseInto(internalDat, file);
// Get the output directory
diff --git a/SabreTools/Features/Update.cs b/SabreTools/Features/Update.cs
index 293df009..dc644d2b 100644
--- a/SabreTools/Features/Update.cs
+++ b/SabreTools/Features/Update.cs
@@ -168,7 +168,7 @@ namespace SabreTools.Features
#endif
{
// Create a new base DatFile
- DatFile datFile = DatFile.Create(Header);
+ DatFile datFile = DatFileTool.CreateDatFile(Header);
_logger.User($"Processing '{Path.GetFileName(inputPath.CurrentPath)}'");
// Check the current format
@@ -228,7 +228,7 @@ namespace SabreTools.Features
}
// Create a DAT to capture inputs
- DatFile userInputDat = DatFile.Create(Header);
+ DatFile userInputDat = DatFileTool.CreateDatFile(Header);
// Populate using the correct set
List datHeaders;
@@ -391,7 +391,7 @@ namespace SabreTools.Features
#endif
{
// Parse the path to a new DatFile
- DatFile repDat = DatFile.Create(Header);
+ DatFile repDat = DatFileTool.CreateDatFile(Header);
Parser.ParseInto(repDat, inputPath, indexId: 1, keep: true);
// Perform additional processing steps
@@ -432,7 +432,7 @@ namespace SabreTools.Features
#endif
{
// Parse the path to a new DatFile
- DatFile repDat = DatFile.Create(Header);
+ DatFile repDat = DatFileTool.CreateDatFile(Header);
Parser.ParseInto(repDat, inputPath, indexId: 1, keep: true);
// Perform additional processing steps
diff --git a/SabreTools/Features/Verify.cs b/SabreTools/Features/Verify.cs
index e37da9f9..5aab9b04 100644
--- a/SabreTools/Features/Verify.cs
+++ b/SabreTools/Features/Verify.cs
@@ -62,7 +62,7 @@ namespace SabreTools.Features
foreach (ParentablePath datfile in datfilePaths)
{
// Parse in from the file
- DatFile datdata = DatFile.Create();
+ DatFile datdata = DatFileTool.CreateDatFile();
Parser.ParseInto(datdata, datfile, int.MaxValue, keep: true);
// Perform additional processing steps
@@ -111,7 +111,7 @@ namespace SabreTools.Features
var watch = new InternalStopwatch("Populating internal DAT");
// Add all of the input DATs into one huge internal DAT
- DatFile datdata = DatFile.Create();
+ DatFile datdata = DatFileTool.CreateDatFile();
foreach (ParentablePath datfile in datfilePaths)
{
Parser.ParseInto(datdata, datfile, int.MaxValue, keep: true);