mirror of
https://github.com/claunia/SabreTools.git
synced 2025-12-16 19:14:27 +00:00
Move static DatFile creation
This commit is contained in:
@@ -5,7 +5,6 @@ using System.Xml.Serialization;
|
||||
using Newtonsoft.Json;
|
||||
using SabreTools.Core.Filter;
|
||||
using SabreTools.Core.Tools;
|
||||
using SabreTools.DatFiles.Formats;
|
||||
using SabreTools.DatItems;
|
||||
using SabreTools.DatItems.Formats;
|
||||
using SabreTools.Hashing;
|
||||
@@ -68,61 +67,6 @@ namespace SabreTools.DatFiles
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a specific type of DatFile to be used based on a format and a base DAT
|
||||
/// </summary>
|
||||
/// <param name="datFormat">Format of the DAT to be created</param>
|
||||
/// <param name="baseDat">DatFile containing the information to use in specific operations</param>
|
||||
/// <param name="quotes">For relevant types, assume the usage of quotes</param>
|
||||
/// <returns>DatFile of the specific internal type that corresponds to the inputs</returns>
|
||||
public static DatFile Create(DatFormat? datFormat = null, DatFile? baseDat = null, bool quotes = true)
|
||||
{
|
||||
return datFormat switch
|
||||
{
|
||||
DatFormat.ArchiveDotOrg => new ArchiveDotOrg(baseDat),
|
||||
DatFormat.AttractMode => new AttractMode(baseDat),
|
||||
DatFormat.ClrMamePro => new ClrMamePro(baseDat, quotes),
|
||||
DatFormat.CSV => new CommaSeparatedValue(baseDat),
|
||||
DatFormat.DOSCenter => new DosCenter(baseDat),
|
||||
DatFormat.EverdriveSMDB => new EverdriveSMDB(baseDat),
|
||||
DatFormat.Listrom => new Listrom(baseDat),
|
||||
DatFormat.Listxml => new Listxml(baseDat),
|
||||
DatFormat.Logiqx => new Logiqx(baseDat, false),
|
||||
DatFormat.LogiqxDeprecated => new Logiqx(baseDat, true),
|
||||
DatFormat.MissFile => new Missfile(baseDat),
|
||||
DatFormat.OfflineList => new OfflineList(baseDat),
|
||||
DatFormat.OpenMSX => new OpenMSX(baseDat),
|
||||
DatFormat.RedumpMD5 => new Md5File(baseDat),
|
||||
DatFormat.RedumpSFV => new SfvFile(baseDat),
|
||||
DatFormat.RedumpSHA1 => new Sha1File(baseDat),
|
||||
DatFormat.RedumpSHA256 => new Sha256File(baseDat),
|
||||
DatFormat.RedumpSHA384 => new Sha384File(baseDat),
|
||||
DatFormat.RedumpSHA512 => new Sha512File(baseDat),
|
||||
DatFormat.RedumpSpamSum => new SpamSumFile(baseDat),
|
||||
DatFormat.RomCenter => new RomCenter(baseDat),
|
||||
DatFormat.SabreJSON => new SabreJSON(baseDat),
|
||||
DatFormat.SabreXML => new SabreXML(baseDat),
|
||||
DatFormat.SoftwareList => new Formats.SoftwareList(baseDat),
|
||||
DatFormat.SSV => new SemicolonSeparatedValue(baseDat),
|
||||
DatFormat.TSV => new TabSeparatedValue(baseDat),
|
||||
|
||||
// We use new-style Logiqx as a backup for generic DatFile
|
||||
_ => new Logiqx(baseDat, false),
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a new DatFile from an existing DatHeader
|
||||
/// </summary>
|
||||
/// <param name="datHeader">DatHeader to get the values from</param>
|
||||
public static DatFile Create(DatHeader datHeader)
|
||||
{
|
||||
DatFormat format = datHeader.GetFieldValue<DatFormat>(DatHeader.DatFormatKey);
|
||||
DatFile datFile = Create(format);
|
||||
datFile.Header = (DatHeader)datHeader.Clone();
|
||||
return datFile;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Fill the header values based on existing Header and path
|
||||
/// </summary>
|
||||
@@ -174,6 +118,15 @@ namespace SabreTools.DatFiles
|
||||
ItemsDB = new ItemDictionaryDB();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Set the internal header
|
||||
/// </summary>
|
||||
/// <param name="datHeader">Replacement header to be used</param>
|
||||
public void SetHeader(DatHeader datHeader)
|
||||
{
|
||||
Header = (DatHeader)datHeader.Clone();;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Filtering
|
||||
|
||||
@@ -6,6 +6,7 @@ using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
#endif
|
||||
using SabreTools.Core.Tools;
|
||||
using SabreTools.DatFiles.Formats;
|
||||
using SabreTools.DatItems;
|
||||
using SabreTools.DatItems.Formats;
|
||||
using SabreTools.IO;
|
||||
@@ -28,6 +29,65 @@ namespace SabreTools.DatFiles
|
||||
|
||||
#endregion
|
||||
|
||||
#region Creation
|
||||
|
||||
/// <summary>
|
||||
/// Create a specific type of DatFile to be used based on a format and a base DAT
|
||||
/// </summary>
|
||||
/// <param name="datFormat">Format of the DAT to be created</param>
|
||||
/// <param name="baseDat">DatFile containing the information to use in specific operations</param>
|
||||
/// <param name="quotes">For relevant types, assume the usage of quotes</param>
|
||||
/// <returns>DatFile of the specific internal type that corresponds to the inputs</returns>
|
||||
public static DatFile CreateDatFile(DatFormat? datFormat = null, DatFile? baseDat = null, bool quotes = true)
|
||||
{
|
||||
return datFormat switch
|
||||
{
|
||||
DatFormat.ArchiveDotOrg => new ArchiveDotOrg(baseDat),
|
||||
DatFormat.AttractMode => new AttractMode(baseDat),
|
||||
DatFormat.ClrMamePro => new ClrMamePro(baseDat, quotes),
|
||||
DatFormat.CSV => new CommaSeparatedValue(baseDat),
|
||||
DatFormat.DOSCenter => new DosCenter(baseDat),
|
||||
DatFormat.EverdriveSMDB => new EverdriveSMDB(baseDat),
|
||||
DatFormat.Listrom => new Listrom(baseDat),
|
||||
DatFormat.Listxml => new Listxml(baseDat),
|
||||
DatFormat.Logiqx => new Logiqx(baseDat, false),
|
||||
DatFormat.LogiqxDeprecated => new Logiqx(baseDat, true),
|
||||
DatFormat.MissFile => new Missfile(baseDat),
|
||||
DatFormat.OfflineList => new OfflineList(baseDat),
|
||||
DatFormat.OpenMSX => new OpenMSX(baseDat),
|
||||
DatFormat.RedumpMD5 => new Md5File(baseDat),
|
||||
DatFormat.RedumpSFV => new SfvFile(baseDat),
|
||||
DatFormat.RedumpSHA1 => new Sha1File(baseDat),
|
||||
DatFormat.RedumpSHA256 => new Sha256File(baseDat),
|
||||
DatFormat.RedumpSHA384 => new Sha384File(baseDat),
|
||||
DatFormat.RedumpSHA512 => new Sha512File(baseDat),
|
||||
DatFormat.RedumpSpamSum => new SpamSumFile(baseDat),
|
||||
DatFormat.RomCenter => new RomCenter(baseDat),
|
||||
DatFormat.SabreJSON => new SabreJSON(baseDat),
|
||||
DatFormat.SabreXML => new SabreXML(baseDat),
|
||||
DatFormat.SoftwareList => new Formats.SoftwareList(baseDat),
|
||||
DatFormat.SSV => new SemicolonSeparatedValue(baseDat),
|
||||
DatFormat.TSV => new TabSeparatedValue(baseDat),
|
||||
|
||||
// We use new-style Logiqx as a backup for generic DatFile
|
||||
_ => new Logiqx(baseDat, false),
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a new DatFile from an existing DatHeader
|
||||
/// </summary>
|
||||
/// <param name="datHeader">DatHeader to get the values from</param>
|
||||
public static DatFile CreateDatFile(DatHeader datHeader)
|
||||
{
|
||||
DatFormat format = datHeader.GetFieldValue<DatFormat>(DatHeader.DatFormatKey);
|
||||
DatFile datFile = CreateDatFile(format);
|
||||
datFile.SetHeader(datHeader);
|
||||
return datFile;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Sorting and Merging
|
||||
|
||||
/// <summary>
|
||||
@@ -734,7 +794,7 @@ namespace SabreTools.DatFiles
|
||||
for (int j = 0; j < datHeaders.Count; j++)
|
||||
#endif
|
||||
{
|
||||
DatFile diffData = DatFile.Create(datHeaders[j]);
|
||||
DatFile diffData = DatFileTool.CreateDatFile(datHeaders[j]);
|
||||
diffData.ResetDictionary();
|
||||
FillWithSourceIndex(datFile, diffData, j);
|
||||
//FillWithSourceIndexDB(datFile, diffData, j);
|
||||
@@ -783,7 +843,7 @@ namespace SabreTools.DatFiles
|
||||
datFile.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, "datFile.All DATs");
|
||||
|
||||
string post = " (Duplicates)";
|
||||
DatFile dupeData = DatFile.Create(datFile.Header);
|
||||
DatFile dupeData = DatFileTool.CreateDatFile(datFile.Header);
|
||||
dupeData.Header.SetFieldValue<string?>(DatHeader.FileNameKey, dupeData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post);
|
||||
dupeData.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post);
|
||||
dupeData.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post);
|
||||
@@ -861,7 +921,7 @@ namespace SabreTools.DatFiles
|
||||
datFile.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, "datFile.All DATs");
|
||||
|
||||
string post = " (Duplicates)";
|
||||
DatFile dupeData = DatFile.Create(datFile.Header);
|
||||
DatFile dupeData = DatFileTool.CreateDatFile(datFile.Header);
|
||||
dupeData.Header.SetFieldValue<string?>(DatHeader.FileNameKey, dupeData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post);
|
||||
dupeData.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post);
|
||||
dupeData.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post);
|
||||
@@ -999,7 +1059,7 @@ namespace SabreTools.DatFiles
|
||||
#endif
|
||||
{
|
||||
string innerpost = $" ({j} - {inputs[j].GetNormalizedFileName(true)} Only)";
|
||||
DatFile diffData = DatFile.Create(datFile.Header);
|
||||
DatFile diffData = DatFileTool.CreateDatFile(datFile.Header);
|
||||
diffData.Header.SetFieldValue<string?>(DatHeader.FileNameKey, diffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + innerpost);
|
||||
diffData.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + innerpost);
|
||||
diffData.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + innerpost);
|
||||
@@ -1092,7 +1152,7 @@ namespace SabreTools.DatFiles
|
||||
#endif
|
||||
{
|
||||
string innerpost = $" ({j} - {inputs[j].GetNormalizedFileName(true)} Only)";
|
||||
DatFile diffData = DatFile.Create(datFile.Header);
|
||||
DatFile diffData = DatFileTool.CreateDatFile(datFile.Header);
|
||||
diffData.Header.SetFieldValue<string?>(DatHeader.FileNameKey, diffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + innerpost);
|
||||
diffData.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + innerpost);
|
||||
diffData.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + innerpost);
|
||||
@@ -1218,7 +1278,7 @@ namespace SabreTools.DatFiles
|
||||
datFile.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, "All DATs");
|
||||
|
||||
string post = " (No Duplicates)";
|
||||
DatFile outerDiffData = DatFile.Create(datFile.Header);
|
||||
DatFile outerDiffData = DatFileTool.CreateDatFile(datFile.Header);
|
||||
outerDiffData.Header.SetFieldValue<string?>(DatHeader.FileNameKey, outerDiffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post);
|
||||
outerDiffData.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post);
|
||||
outerDiffData.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post);
|
||||
@@ -1294,7 +1354,7 @@ namespace SabreTools.DatFiles
|
||||
datFile.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, "All DATs");
|
||||
|
||||
string post = " (No Duplicates)";
|
||||
DatFile outerDiffData = DatFile.Create(datFile.Header);
|
||||
DatFile outerDiffData = DatFileTool.CreateDatFile(datFile.Header);
|
||||
outerDiffData.Header.SetFieldValue<string?>(DatHeader.FileNameKey, outerDiffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post);
|
||||
outerDiffData.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post);
|
||||
outerDiffData.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post);
|
||||
@@ -1427,7 +1487,7 @@ namespace SabreTools.DatFiles
|
||||
{
|
||||
var input = inputs[i];
|
||||
_staticLogger.User($"Adding DAT: {input.CurrentPath}");
|
||||
datFiles[i] = DatFile.Create(datFile.Header.CloneFiltering());
|
||||
datFiles[i] = DatFileTool.CreateDatFile(datFile.Header.CloneFiltering());
|
||||
Parser.ParseInto(datFiles[i], input, i, keep: true);
|
||||
#if NET40_OR_GREATER || NETCOREAPP
|
||||
});
|
||||
|
||||
@@ -33,9 +33,9 @@ namespace SabreTools.DatFiles
|
||||
{
|
||||
// Null filenames are invalid
|
||||
if (filename == null)
|
||||
return DatFile.Create();
|
||||
return DatFileTool.CreateDatFile();
|
||||
|
||||
DatFile datFile = DatFile.Create();
|
||||
DatFile datFile = DatFileTool.CreateDatFile();
|
||||
ParseInto(datFile, new ParentablePath(filename), statsOnly: statsOnly, throwOnError: throwOnError);
|
||||
return datFile;
|
||||
}
|
||||
@@ -112,7 +112,7 @@ namespace SabreTools.DatFiles
|
||||
// Now parse the correct type of DAT
|
||||
try
|
||||
{
|
||||
var parsingDatFile = DatFile.Create(currentPathFormat, datFile, quotes);
|
||||
var parsingDatFile = DatFileTool.CreateDatFile(currentPathFormat, datFile, quotes);
|
||||
parsingDatFile?.ParseFile(currentPath, indexId, keep, statsOnly: statsOnly, throwOnError: throwOnError);
|
||||
}
|
||||
catch (Exception ex) when (!throwOnError)
|
||||
|
||||
@@ -54,12 +54,12 @@ namespace SabreTools.DatTools
|
||||
string newExtBString = string.Join(",", newExtB);
|
||||
|
||||
// Set all of the appropriate outputs for each of the subsets
|
||||
DatFile extADat = DatFile.Create(datFile.Header.CloneStandard());
|
||||
DatFile extADat = DatFileTool.CreateDatFile(datFile.Header.CloneStandard());
|
||||
extADat.Header.SetFieldValue<string?>(DatHeader.FileNameKey, extADat.Header.GetStringFieldValue(DatHeader.FileNameKey) + $" ({newExtAString})");
|
||||
extADat.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, extADat.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + $" ({newExtAString})");
|
||||
extADat.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, extADat.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + $" ({newExtAString})");
|
||||
|
||||
DatFile extBDat = DatFile.Create(datFile.Header.CloneStandard());
|
||||
DatFile extBDat = DatFileTool.CreateDatFile(datFile.Header.CloneStandard());
|
||||
extBDat.Header.SetFieldValue<string?>(DatHeader.FileNameKey, extBDat.Header.GetStringFieldValue(DatHeader.FileNameKey) + $" ({newExtBString})");
|
||||
extBDat.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, extBDat.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + $" ({newExtBString})");
|
||||
extBDat.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, extBDat.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + $" ({newExtBString})");
|
||||
@@ -131,12 +131,12 @@ namespace SabreTools.DatTools
|
||||
string newExtBString = string.Join(",", newExtB);
|
||||
|
||||
// Set all of the appropriate outputs for each of the subsets
|
||||
DatFile extADat = DatFile.Create(datFile.Header.CloneStandard());
|
||||
DatFile extADat = DatFileTool.CreateDatFile(datFile.Header.CloneStandard());
|
||||
extADat.Header.SetFieldValue<string?>(DatHeader.FileNameKey, extADat.Header.GetStringFieldValue(DatHeader.FileNameKey) + $" ({newExtAString})");
|
||||
extADat.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, extADat.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + $" ({newExtAString})");
|
||||
extADat.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, extADat.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + $" ({newExtAString})");
|
||||
|
||||
DatFile extBDat = DatFile.Create(datFile.Header.CloneStandard());
|
||||
DatFile extBDat = DatFileTool.CreateDatFile(datFile.Header.CloneStandard());
|
||||
extBDat.Header.SetFieldValue<string?>(DatHeader.FileNameKey, extBDat.Header.GetStringFieldValue(DatHeader.FileNameKey) + $" ({newExtBString})");
|
||||
extBDat.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, extBDat.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + $" ({newExtBString})");
|
||||
extBDat.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, extBDat.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + $" ({newExtBString})");
|
||||
@@ -232,7 +232,7 @@ namespace SabreTools.DatTools
|
||||
Dictionary<string, DatFile> fieldDats = [];
|
||||
foreach (var kvp in mappings)
|
||||
{
|
||||
fieldDats[kvp.Key] = DatFile.Create(datFile.Header.CloneStandard());
|
||||
fieldDats[kvp.Key] = DatFileTool.CreateDatFile(datFile.Header.CloneStandard());
|
||||
fieldDats[kvp.Key].Header.SetFieldValue<string?>(DatHeader.FileNameKey, fieldDats[kvp.Key].Header.GetStringFieldValue(DatHeader.FileNameKey) + kvp.Value);
|
||||
fieldDats[kvp.Key].Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, fieldDats[kvp.Key].Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + kvp.Value);
|
||||
fieldDats[kvp.Key].Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, fieldDats[kvp.Key].Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + kvp.Value);
|
||||
@@ -343,7 +343,7 @@ namespace SabreTools.DatTools
|
||||
Dictionary<string, DatFile> fieldDats = [];
|
||||
foreach (var kvp in mappings)
|
||||
{
|
||||
fieldDats[kvp.Key] = DatFile.Create(datFile.Header.CloneStandard());
|
||||
fieldDats[kvp.Key] = DatFileTool.CreateDatFile(datFile.Header.CloneStandard());
|
||||
fieldDats[kvp.Key].Header.SetFieldValue<string?>(DatHeader.FileNameKey, fieldDats[kvp.Key].Header.GetStringFieldValue(DatHeader.FileNameKey) + kvp.Value);
|
||||
fieldDats[kvp.Key].Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, fieldDats[kvp.Key].Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + kvp.Value);
|
||||
fieldDats[kvp.Key].Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, fieldDats[kvp.Key].Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + kvp.Value);
|
||||
@@ -480,7 +480,7 @@ namespace SabreTools.DatTools
|
||||
datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None, lower: false, norename: true);
|
||||
|
||||
// Create a temporary DAT to add things to
|
||||
DatFile tempDat = DatFile.Create(datFile.Header);
|
||||
DatFile tempDat = DatFileTool.CreateDatFile(datFile.Header);
|
||||
tempDat.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, null);
|
||||
|
||||
// Sort the input keys
|
||||
@@ -500,7 +500,7 @@ namespace SabreTools.DatTools
|
||||
if (tempDat.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) != null && tempDat.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) != Path.GetDirectoryName(key))
|
||||
{
|
||||
// Reset the DAT for the next items
|
||||
tempDat = DatFile.Create(datFile.Header);
|
||||
tempDat = DatFileTool.CreateDatFile(datFile.Header);
|
||||
tempDat.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, null);
|
||||
}
|
||||
|
||||
@@ -603,12 +603,12 @@ namespace SabreTools.DatTools
|
||||
// Create each of the respective output DATs
|
||||
InternalStopwatch watch = new($"Splitting DAT by size");
|
||||
|
||||
DatFile lessThan = DatFile.Create(datFile.Header.CloneStandard());
|
||||
DatFile lessThan = DatFileTool.CreateDatFile(datFile.Header.CloneStandard());
|
||||
lessThan.Header.SetFieldValue<string?>(DatHeader.FileNameKey, lessThan.Header.GetStringFieldValue(DatHeader.FileNameKey) + $" (less than {radix})");
|
||||
lessThan.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, lessThan.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + $" (less than {radix})");
|
||||
lessThan.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, lessThan.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + $" (less than {radix})");
|
||||
|
||||
DatFile greaterThan = DatFile.Create(datFile.Header.CloneStandard());
|
||||
DatFile greaterThan = DatFileTool.CreateDatFile(datFile.Header.CloneStandard());
|
||||
greaterThan.Header.SetFieldValue<string?>(DatHeader.FileNameKey, greaterThan.Header.GetStringFieldValue(DatHeader.FileNameKey) + $" (equal-greater than {radix})");
|
||||
greaterThan.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, greaterThan.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + $" (equal-greater than {radix})");
|
||||
greaterThan.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, greaterThan.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + $" (equal-greater than {radix})");
|
||||
@@ -669,12 +669,12 @@ namespace SabreTools.DatTools
|
||||
// Create each of the respective output DATs
|
||||
var watch = new InternalStopwatch($"Splitting DAT by size");
|
||||
|
||||
DatFile lessThan = DatFile.Create(datFile.Header.CloneStandard());
|
||||
DatFile lessThan = DatFileTool.CreateDatFile(datFile.Header.CloneStandard());
|
||||
lessThan.Header.SetFieldValue<string?>(DatHeader.FileNameKey, lessThan.Header.GetStringFieldValue(DatHeader.FileNameKey) + $" (less than {radix})");
|
||||
lessThan.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, lessThan.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + $" (less than {radix})");
|
||||
lessThan.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, lessThan.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + $" (less than {radix})");
|
||||
|
||||
DatFile greaterThan = DatFile.Create(datFile.Header.CloneStandard());
|
||||
DatFile greaterThan = DatFileTool.CreateDatFile(datFile.Header.CloneStandard());
|
||||
greaterThan.Header.SetFieldValue<string?>(DatHeader.FileNameKey, greaterThan.Header.GetStringFieldValue(DatHeader.FileNameKey) + $" (equal-greater than {radix})");
|
||||
greaterThan.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, greaterThan.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + $" (equal-greater than {radix})");
|
||||
greaterThan.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, greaterThan.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + $" (equal-greater than {radix})");
|
||||
@@ -772,7 +772,7 @@ namespace SabreTools.DatTools
|
||||
// Initialize everything
|
||||
long currentSize = 0;
|
||||
long currentIndex = 0;
|
||||
DatFile currentDat = DatFile.Create(datFile.Header.CloneStandard());
|
||||
DatFile currentDat = DatFileTool.CreateDatFile(datFile.Header.CloneStandard());
|
||||
currentDat.Header.SetFieldValue<string?>(DatHeader.FileNameKey, currentDat.Header.GetStringFieldValue(DatHeader.FileNameKey) + $"_{currentIndex}");
|
||||
currentDat.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, currentDat.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + $"_{currentIndex}");
|
||||
currentDat.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, currentDat.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + $"_{currentIndex}");
|
||||
@@ -815,7 +815,7 @@ namespace SabreTools.DatTools
|
||||
datFiles.Add(currentDat);
|
||||
currentSize = 0;
|
||||
currentIndex++;
|
||||
currentDat = DatFile.Create(datFile.Header.CloneStandard());
|
||||
currentDat = DatFileTool.CreateDatFile(datFile.Header.CloneStandard());
|
||||
currentDat.Header.SetFieldValue<string?>(DatHeader.FileNameKey, currentDat.Header.GetStringFieldValue(DatHeader.FileNameKey) + $"_{currentIndex}");
|
||||
currentDat.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, currentDat.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + $"_{currentIndex}");
|
||||
currentDat.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, currentDat.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + $"_{currentIndex}");
|
||||
@@ -859,7 +859,7 @@ namespace SabreTools.DatTools
|
||||
// Setup all of the DatFiles
|
||||
foreach (ItemType itemType in outputTypes)
|
||||
{
|
||||
typeDats[itemType] = DatFile.Create(datFile.Header.CloneStandard());
|
||||
typeDats[itemType] = DatFileTool.CreateDatFile(datFile.Header.CloneStandard());
|
||||
typeDats[itemType].Header.SetFieldValue<string?>(DatHeader.FileNameKey, typeDats[itemType].Header.GetStringFieldValue(DatHeader.FileNameKey) + $" ({itemType})");
|
||||
typeDats[itemType].Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, typeDats[itemType].Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + $" ({itemType})");
|
||||
typeDats[itemType].Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, typeDats[itemType].Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + $" ({itemType})");
|
||||
|
||||
@@ -90,7 +90,7 @@ namespace SabreTools.DatTools
|
||||
string outfile = outfiles[datFormat];
|
||||
try
|
||||
{
|
||||
DatFile.Create(datFormat, datFile, quotes)?.WriteToFile(outfile, ignoreblanks, throwOnError);
|
||||
DatFileTool.CreateDatFile(datFormat, datFile, quotes)?.WriteToFile(outfile, ignoreblanks, throwOnError);
|
||||
}
|
||||
catch (Exception ex) when (!throwOnError)
|
||||
{
|
||||
|
||||
@@ -901,7 +901,7 @@ Reset the internal state: reset();";
|
||||
/// </summary>
|
||||
private class BatchState
|
||||
{
|
||||
public DatFile DatFile { get; set; } = DatFile.Create();
|
||||
public DatFile DatFile { get; set; } = DatFileTool.CreateDatFile();
|
||||
public int Index { get; set; } = 0;
|
||||
public string? OutputDirectory { get; set; } = null;
|
||||
|
||||
@@ -911,7 +911,7 @@ Reset the internal state: reset();";
|
||||
public void Reset()
|
||||
{
|
||||
Index = 0;
|
||||
DatFile = DatFile.Create();
|
||||
DatFile = DatFileTool.CreateDatFile();
|
||||
OutputDirectory = null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -71,7 +71,7 @@ namespace SabreTools.Features
|
||||
Remover!.PopulateExclusionsFromList(["DatItem.Date"]);
|
||||
|
||||
// Create a new DATFromDir object and process the inputs
|
||||
DatFile basedat = DatFile.Create(Header!);
|
||||
DatFile basedat = DatFileTool.CreateDatFile(Header!);
|
||||
basedat.Header.SetFieldValue<string?>(Models.Metadata.Header.DateKey, DateTime.Now.ToString("yyyy-MM-dd"));
|
||||
|
||||
// Update the cleaner based on certain flags
|
||||
@@ -84,7 +84,7 @@ namespace SabreTools.Features
|
||||
if (Directory.Exists(path) || File.Exists(path))
|
||||
{
|
||||
// Clone the base Dat for information
|
||||
DatFile datdata = DatFile.Create(basedat.Header);
|
||||
DatFile datdata = DatFileTool.CreateDatFile(basedat.Header);
|
||||
|
||||
// Get the base path and fill the header, if needed
|
||||
string basePath = Path.GetFullPath(path);
|
||||
|
||||
@@ -93,7 +93,7 @@ namespace SabreTools.Features
|
||||
{
|
||||
foreach (ParentablePath datfile in datfilePaths)
|
||||
{
|
||||
DatFile datdata = DatFile.Create();
|
||||
DatFile datdata = DatFileTool.CreateDatFile();
|
||||
Parser.ParseInto(datdata, datfile, int.MaxValue, keep: true);
|
||||
|
||||
// Skip if nothing was parsed
|
||||
@@ -134,7 +134,7 @@ namespace SabreTools.Features
|
||||
var watch = new InternalStopwatch("Populating internal DAT");
|
||||
|
||||
// Add all of the input DATs into one huge internal DAT
|
||||
DatFile datdata = DatFile.Create();
|
||||
DatFile datdata = DatFileTool.CreateDatFile();
|
||||
foreach (ParentablePath datfile in datfilePaths)
|
||||
{
|
||||
Parser.ParseInto(datdata, datfile, int.MaxValue, keep: true);
|
||||
|
||||
@@ -64,7 +64,7 @@ namespace SabreTools.Features
|
||||
foreach (ParentablePath file in files)
|
||||
{
|
||||
// Create and fill the new DAT
|
||||
DatFile internalDat = DatFile.Create(Header!);
|
||||
DatFile internalDat = DatFileTool.CreateDatFile(Header!);
|
||||
Parser.ParseInto(internalDat, file);
|
||||
|
||||
// Get the output directory
|
||||
|
||||
@@ -168,7 +168,7 @@ namespace SabreTools.Features
|
||||
#endif
|
||||
{
|
||||
// Create a new base DatFile
|
||||
DatFile datFile = DatFile.Create(Header);
|
||||
DatFile datFile = DatFileTool.CreateDatFile(Header);
|
||||
_logger.User($"Processing '{Path.GetFileName(inputPath.CurrentPath)}'");
|
||||
|
||||
// Check the current format
|
||||
@@ -228,7 +228,7 @@ namespace SabreTools.Features
|
||||
}
|
||||
|
||||
// Create a DAT to capture inputs
|
||||
DatFile userInputDat = DatFile.Create(Header);
|
||||
DatFile userInputDat = DatFileTool.CreateDatFile(Header);
|
||||
|
||||
// Populate using the correct set
|
||||
List<DatHeader> datHeaders;
|
||||
@@ -391,7 +391,7 @@ namespace SabreTools.Features
|
||||
#endif
|
||||
{
|
||||
// Parse the path to a new DatFile
|
||||
DatFile repDat = DatFile.Create(Header);
|
||||
DatFile repDat = DatFileTool.CreateDatFile(Header);
|
||||
Parser.ParseInto(repDat, inputPath, indexId: 1, keep: true);
|
||||
|
||||
// Perform additional processing steps
|
||||
@@ -432,7 +432,7 @@ namespace SabreTools.Features
|
||||
#endif
|
||||
{
|
||||
// Parse the path to a new DatFile
|
||||
DatFile repDat = DatFile.Create(Header);
|
||||
DatFile repDat = DatFileTool.CreateDatFile(Header);
|
||||
Parser.ParseInto(repDat, inputPath, indexId: 1, keep: true);
|
||||
|
||||
// Perform additional processing steps
|
||||
|
||||
@@ -62,7 +62,7 @@ namespace SabreTools.Features
|
||||
foreach (ParentablePath datfile in datfilePaths)
|
||||
{
|
||||
// Parse in from the file
|
||||
DatFile datdata = DatFile.Create();
|
||||
DatFile datdata = DatFileTool.CreateDatFile();
|
||||
Parser.ParseInto(datdata, datfile, int.MaxValue, keep: true);
|
||||
|
||||
// Perform additional processing steps
|
||||
@@ -111,7 +111,7 @@ namespace SabreTools.Features
|
||||
var watch = new InternalStopwatch("Populating internal DAT");
|
||||
|
||||
// Add all of the input DATs into one huge internal DAT
|
||||
DatFile datdata = DatFile.Create();
|
||||
DatFile datdata = DatFileTool.CreateDatFile();
|
||||
foreach (ParentablePath datfile in datfilePaths)
|
||||
{
|
||||
Parser.ParseInto(datdata, datfile, int.MaxValue, keep: true);
|
||||
|
||||
Reference in New Issue
Block a user