diff --git a/SabreTools.DatTools/Diffing.cs b/SabreTools.DatTools/Diffing.cs
index 7be8bd17..dce8b29f 100644
--- a/SabreTools.DatTools/Diffing.cs
+++ b/SabreTools.DatTools/Diffing.cs
@@ -15,6 +15,8 @@ namespace SabreTools.DatTools
///
public class Diffing
{
+ #region Against
+
///
/// Output diffs against a base set represented by the current DAT
///
@@ -122,6 +124,10 @@ namespace SabreTools.DatTools
watch.Stop();
}
+ #endregion
+
+ #region Cascade
+
///
/// Output cascading diffs
///
@@ -166,6 +172,10 @@ namespace SabreTools.DatTools
return outDats;
}
+ #endregion
+
+ #region Duplicates
+
///
/// Output duplicate item diff
///
@@ -175,7 +185,6 @@ namespace SabreTools.DatTools
{
List paths = inputs.ConvertAll(i => new ParentablePath(i));
return Duplicates(datFile, paths);
- //return DuplicatesDB(datFile, paths);
}
///
@@ -185,7 +194,25 @@ namespace SabreTools.DatTools
/// List of inputs to write out from
public static DatFile Duplicates(DatFile datFile, List inputs)
{
- InternalStopwatch watch = new("Initializing duplicate DAT");
+ // Initialize duplicate data
+ DatFile dupeData = DuplicatesInit(datFile);
+
+ // Now, loop through the dictionary and populate the correct DATs
+ var watch = new InternalStopwatch("Populating duplicate DAT");
+ DuplicatesImpl(datFile, inputs, dupeData);
+ DuplicatesDBImpl(datFile, inputs, dupeData);
+ watch.Stop();
+
+ return dupeData;
+ }
+
+ ///
+ /// Create a duplicate data DatFile based on an input DatFile
+ ///
+ /// DatFile containing header and modifier information
+ private static DatFile DuplicatesInit(DatFile datFile)
+ {
+ var watch = new InternalStopwatch("Initializing duplicate DAT");
// Fill in any information not in the base DAT
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
@@ -205,10 +232,17 @@ namespace SabreTools.DatTools
dupeData.ResetDictionary();
watch.Stop();
+ return dupeData;
+ }
- // Now, loop through the dictionary and populate the correct DATs
- watch.Start("Populating duplicate DAT");
-
+ ///
+ /// Populate duplicates data from inputs
+ ///
+ /// Source DatFile to process
+ /// Set of input paths for naming
+ /// Duplicate data DatFile
+ private static void DuplicatesImpl(DatFile datFile, List inputs, DatFile dupeData)
+ {
#if NET452_OR_GREATER || NETCOREAPP
Parallel.ForEach(datFile.Items.SortedKeys, Core.Globals.ParallelOptions, key =>
#elif NET40_OR_GREATER
@@ -250,43 +284,16 @@ namespace SabreTools.DatTools
#else
}
#endif
-
- watch.Stop();
-
- return dupeData;
}
///
- /// Output duplicate item diff
+ /// Populate duplicates data from inputs
///
- /// Current DatFile object to use for updating
- /// List of inputs to write out from
- public static DatFile DuplicatesDB(DatFile datFile, List inputs)
+ /// Source DatFile to process
+ /// Set of input paths for naming
+ /// Duplicate data DatFile
+ private static void DuplicatesDBImpl(DatFile datFile, List inputs, DatFile dupeData)
{
- var watch = new InternalStopwatch("Initializing duplicate DAT");
-
- // Fill in any information not in the base DAT
- if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
- datFile.Header.SetFieldValue(DatHeader.FileNameKey, "All DATs");
-
- if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey)))
- datFile.Header.SetFieldValue(Models.Metadata.Header.NameKey, "datFile.All DATs");
-
- if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey)))
- datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "datFile.All DATs");
-
- string post = " (Duplicates)";
- DatFile dupeData = Parser.CreateDatFile(datFile.Header, datFile.Modifiers);
- dupeData.Header.SetFieldValue(DatHeader.FileNameKey, dupeData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post);
- dupeData.Header.SetFieldValue(Models.Metadata.Header.NameKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post);
- dupeData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post);
- dupeData.ResetDictionary();
-
- watch.Stop();
-
- // Now, loop through the dictionary and populate the correct DATs
- watch.Start("Populating duplicate DAT");
-
// Get all current items, machines, and mappings
var datItems = datFile.ItemsDB.GetItems();
var machines = datFile.GetMachinesDB();
@@ -363,12 +370,12 @@ namespace SabreTools.DatTools
#else
}
#endif
-
- watch.Stop();
-
- return dupeData;
}
+ #endregion
+
+ #region Individuals
+
///
/// Output non-cascading diffs
///
@@ -378,7 +385,6 @@ namespace SabreTools.DatTools
{
List paths = inputs.ConvertAll(i => new ParentablePath(i));
return Individuals(datFile, paths);
- //return IndividualsDB(datFile, paths);
}
///
@@ -388,7 +394,26 @@ namespace SabreTools.DatTools
/// List of inputs to write out from
public static List Individuals(DatFile datFile, List inputs)
{
- InternalStopwatch watch = new("Initializing all individual DATs");
+ // Create a set of DatData objects representing individual output files
+ DatFile[] outDats = IndividualsInit(datFile, inputs);
+
+ // Now, loop through the dictionary and populate the correct DATs
+ var watch = new InternalStopwatch("Populating all individual DATs");
+ IndividualsImpl(datFile, outDats);
+ IndividualsDBImpl(datFile, outDats);
+ watch.Stop();
+
+ return [.. outDats];
+ }
+
+ ///
+ /// Create an individual data DatFile based on an input DatFile
+ ///
+ /// DatFile containing header and modifier information
+ /// List of inputs to write out from
+ private static DatFile[] IndividualsInit(DatFile datFile, List inputs)
+ {
+ var watch = new InternalStopwatch("Initializing all individual DATs");
// Fill in any information not in the base DAT
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
@@ -423,15 +448,17 @@ namespace SabreTools.DatTools
#else
}
#endif
-
- // Create a list of DatData objects representing individual output files
- List outDats = [.. outDatsArray];
-
watch.Stop();
+ return outDatsArray;
+ }
- // Now, loop through the dictionary and populate the correct DATs
- watch.Start("Populating all individual DATs");
-
+ ///
+ /// Populate individuals data from inputs
+ ///
+ /// Source DatFile to process
+ /// Individual data DatFiles
+ private static void IndividualsImpl(DatFile datFile, DatFile[] outDats)
+ {
#if NET452_OR_GREATER || NETCOREAPP
Parallel.ForEach(datFile.Items.SortedKeys, Core.Globals.ParallelOptions, key =>
#elif NET40_OR_GREATER
@@ -468,63 +495,15 @@ namespace SabreTools.DatTools
#else
}
#endif
-
- watch.Stop();
-
- return [.. outDats];
}
///
- /// Output non-cascading diffs
+ /// Populate individuals data from inputs
///
- /// Current DatFile object to use for updating
- /// List of inputs to write out from
- public static List IndividualsDB(DatFile datFile, List inputs)
+ /// Source DatFile to process
+ /// Individual data DatFiles
+ private static void IndividualsDBImpl(DatFile datFile, DatFile[] outDats)
{
- InternalStopwatch watch = new("Initializing all individual DATs");
-
- // Fill in any information not in the base DAT
- if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
- datFile.Header.SetFieldValue(DatHeader.FileNameKey, "All DATs");
-
- if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey)))
- datFile.Header.SetFieldValue(Models.Metadata.Header.NameKey, "All DATs");
-
- if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey)))
- datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "All DATs");
-
- // Loop through each of the inputs and get or create a new DatData object
- DatFile[] outDatsArray = new DatFile[inputs.Count];
-
-#if NET452_OR_GREATER || NETCOREAPP
- Parallel.For(0, inputs.Count, Core.Globals.ParallelOptions, j =>
-#elif NET40_OR_GREATER
- Parallel.For(0, inputs.Count, j =>
-#else
- for (int j = 0; j < inputs.Count; j++)
-#endif
- {
- string innerpost = $" ({j} - {inputs[j].GetNormalizedFileName(true)} Only)";
- DatFile diffData = Parser.CreateDatFile(datFile.Header, datFile.Modifiers);
- diffData.Header.SetFieldValue(DatHeader.FileNameKey, diffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + innerpost);
- diffData.Header.SetFieldValue(Models.Metadata.Header.NameKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + innerpost);
- diffData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + innerpost);
- diffData.ResetDictionary();
- outDatsArray[j] = diffData;
-#if NET40_OR_GREATER || NETCOREAPP
- });
-#else
- }
-#endif
-
- // Create a list of DatData objects representing individual output files
- List outDats = [.. outDatsArray];
-
- watch.Stop();
-
- // Now, loop through the dictionary and populate the correct DATs
- watch.Start("Populating all individual DATs");
-
// Get all current items, machines, and mappings
var datItems = datFile.ItemsDB.GetItems();
var machines = datFile.GetMachinesDB();
@@ -540,7 +519,7 @@ namespace SabreTools.DatTools
long newSourceIndex = outDats[0].AddSourceDB(source.Value);
sourceRemapping[source.Key] = newSourceIndex;
- for (int i = 1; i < outDats.Count; i++)
+ for (int i = 1; i < outDats.Length; i++)
{
_ = outDats[i].AddSourceDB(source.Value);
}
@@ -552,7 +531,7 @@ namespace SabreTools.DatTools
long newMachineIndex = outDats[0].AddMachineDB(machine.Value);
machineRemapping[machine.Key] = newMachineIndex;
- for (int i = 1; i < outDats.Count; i++)
+ for (int i = 1; i < outDats.Length; i++)
{
_ = outDats[i].AddMachineDB(machine.Value);
}
@@ -591,12 +570,12 @@ namespace SabreTools.DatTools
#else
}
#endif
-
- watch.Stop();
-
- return [.. outDats];
}
+ #endregion
+
+ #region NoDuplicates
+
///
/// Output non-duplicate item diff
///
@@ -606,7 +585,6 @@ namespace SabreTools.DatTools
{
List paths = inputs.ConvertAll(i => new ParentablePath(i));
return NoDuplicates(datFile, paths);
- //return NoDuplicatesDB(datFile, paths);
}
///
@@ -616,7 +594,25 @@ namespace SabreTools.DatTools
/// List of inputs to write out from
public static DatFile NoDuplicates(DatFile datFile, List inputs)
{
- InternalStopwatch watch = new("Initializing no duplicate DAT");
+ // Initialize no duplicate data
+ DatFile outerDiffData = NoDuplicatesInit(datFile);
+
+ // Now, loop through the dictionary and populate the correct DATs
+ var watch = new InternalStopwatch("Populating no duplicate DAT");
+ NoDuplicatesImpl(datFile, inputs, outerDiffData);
+ NoDuplicatesDBImpl(datFile, inputs, outerDiffData);
+ watch.Stop();
+
+ return outerDiffData;
+ }
+
+ ///
+ /// Create a no duplicate data DatFile based on an input DatFile
+ ///
+ /// DatFile containing header and modifier information
+ private static DatFile NoDuplicatesInit(DatFile datFile)
+ {
+ var watch = new InternalStopwatch("Initializing no duplicate DAT");
// Fill in any information not in the base DAT
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
@@ -636,10 +632,17 @@ namespace SabreTools.DatTools
outerDiffData.ResetDictionary();
watch.Stop();
+ return outerDiffData;
+ }
- // Now, loop through the dictionary and populate the correct DATs
- watch.Start("Populating no duplicate DAT");
-
+ ///
+ /// Populate no duplicates data from inputs
+ ///
+ /// Source DatFile to process
+ /// Set of input paths for naming
+ /// No duplicate data DatFile
+ private static void NoDuplicatesImpl(DatFile datFile, List inputs, DatFile outerDiffData)
+ {
#if NET452_OR_GREATER || NETCOREAPP
Parallel.ForEach(datFile.Items.SortedKeys, Core.Globals.ParallelOptions, key =>
#elif NET40_OR_GREATER
@@ -679,43 +682,16 @@ namespace SabreTools.DatTools
#else
}
#endif
-
- watch.Stop();
-
- return outerDiffData;
}
///
- /// Output non-duplicate item diff
+ /// Populate no duplicates data from inputs
///
- /// Current DatFile object to use for updating
- /// List of inputs to write out from
- public static DatFile NoDuplicatesDB(DatFile datFile, List inputs)
+ /// Source DatFile to process
+ /// Set of input paths for naming
+ /// No duplicate data DatFile
+ private static void NoDuplicatesDBImpl(DatFile datFile, List inputs, DatFile outerDiffData)
{
- var watch = new InternalStopwatch("Initializing no duplicate DAT");
-
- // Fill in any information not in the base DAT
- if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
- datFile.Header.SetFieldValue(DatHeader.FileNameKey, "All DATs");
-
- if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey)))
- datFile.Header.SetFieldValue(Models.Metadata.Header.NameKey, "All DATs");
-
- if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey)))
- datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "All DATs");
-
- string post = " (No Duplicates)";
- DatFile outerDiffData = Parser.CreateDatFile(datFile.Header, datFile.Modifiers);
- outerDiffData.Header.SetFieldValue(DatHeader.FileNameKey, outerDiffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post);
- outerDiffData.Header.SetFieldValue(Models.Metadata.Header.NameKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post);
- outerDiffData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post);
- outerDiffData.ResetDictionary();
-
- watch.Stop();
-
- // Now, loop through the dictionary and populate the correct DATs
- watch.Start("Populating no duplicate DAT");
-
// Get all current items, machines, and mappings
var datItems = datFile.ItemsDB.GetItems();
var machines = datFile.GetMachinesDB();
@@ -792,12 +768,12 @@ namespace SabreTools.DatTools
#else
}
#endif
-
- watch.Stop();
-
- return outerDiffData;
}
+ #endregion
+
+ #region Helpers
+
///
/// Fill a DatFile with all items with a particular source index ID
///
@@ -895,5 +871,7 @@ namespace SabreTools.DatTools
}
#endif
}
+
+ #endregion
}
}
\ No newline at end of file