diff --git a/SabreTools.DatFiles/DatFileTool.cs b/SabreTools.DatFiles/DatFileTool.cs
index 27368d09..f5299c31 100644
--- a/SabreTools.DatFiles/DatFileTool.cs
+++ b/SabreTools.DatFiles/DatFileTool.cs
@@ -662,793 +662,6 @@ namespace SabreTools.DatFiles
#endregion
- #region Diffing
-
- ///
- /// Output diffs against a base set represented by the current DAT
- ///
- /// Current DatFile object to use for updating
- /// DatFile to replace the values in
- /// True to diff using games, false to use hashes
- public static void DiffAgainst(DatFile datFile, DatFile intDat, bool useGames)
- {
- InternalStopwatch watch = new($"Comparing '{intDat.Header.GetStringFieldValue(DatHeader.FileNameKey)}' to base DAT");
-
- // For comparison's sake, we want to a the base bucketing
- if (useGames)
- {
- intDat.BucketBy(ItemKey.Machine);
- }
- else
- {
- intDat.BucketBy(ItemKey.CRC);
- intDat.Deduplicate();
- }
-
- // Then we compare against the base DAT
-#if NET452_OR_GREATER || NETCOREAPP
- Parallel.ForEach(intDat.Items.SortedKeys, Core.Globals.ParallelOptions, key =>
-#elif NET40_OR_GREATER
- Parallel.ForEach(intDat.Items.SortedKeys, key =>
-#else
- foreach (var key in intDat.Items.SortedKeys)
-#endif
- {
- // Game Against uses game names
- if (useGames)
- {
- // If the key is null, keep it
- var intList = intDat.GetItemsForBucket(key);
- if (intList.Count == 0)
-#if NET40_OR_GREATER || NETCOREAPP
- return;
-#else
- continue;
-#endif
-
- // If the base DAT doesn't contain the key, keep it
- var list = datFile.GetItemsForBucket(key);
- if (list.Count == 0)
-#if NET40_OR_GREATER || NETCOREAPP
- return;
-#else
- continue;
-#endif
-
- // If the number of items is different, then keep it
- if (list.Count != intList.Count)
-#if NET40_OR_GREATER || NETCOREAPP
- return;
-#else
- continue;
-#endif
-
- // Otherwise, compare by name and hash the remaining files
- bool exactMatch = true;
- foreach (DatItem item in intList)
- {
- // TODO: Make this granular to name as well
- if (!list.Contains(item))
- {
- exactMatch = false;
- break;
- }
- }
-
- // If we have an exact match, remove the game
- if (exactMatch)
- intDat.RemoveBucket(key);
- }
-
- // Standard Against uses hashes
- else
- {
- List? datItems = intDat.GetItemsForBucket(key);
- if (datItems == null)
-#if NET40_OR_GREATER || NETCOREAPP
- return;
-#else
- continue;
-#endif
-
- List keepDatItems = [];
- foreach (DatItem datItem in datItems)
- {
- if (!datFile.HasDuplicates(datItem, true))
- keepDatItems.Add(datItem);
- }
-
- // Now add the new list to the key
- intDat.RemoveBucket(key);
- keepDatItems.ForEach(item => intDat.AddItem(item, statsOnly: false));
- }
-#if NET40_OR_GREATER || NETCOREAPP
- });
-#else
- }
-#endif
-
- watch.Stop();
- }
-
- ///
- /// Output cascading diffs
- ///
- /// Current DatFile object to use for updating
- /// Dat headers used optionally
- /// List of DatFiles representing the individually indexed items
- public static List DiffCascade(DatFile datFile, List datHeaders)
- {
- // Create a list of DatData objects representing output files
- List outDats = [];
-
- // Ensure the current DatFile is sorted optimally
- datFile.BucketBy(ItemKey.CRC);
-
- // Loop through each of the inputs and get or create a new DatData object
- InternalStopwatch watch = new("Initializing and filling all output DATs");
-
- // Create the DatFiles from the set of headers
- DatFile[] outDatsArray = new DatFile[datHeaders.Count];
-#if NET452_OR_GREATER || NETCOREAPP
- Parallel.For(0, datHeaders.Count, Core.Globals.ParallelOptions, j =>
-#elif NET40_OR_GREATER
- Parallel.For(0, datHeaders.Count, j =>
-#else
- for (int j = 0; j < datHeaders.Count; j++)
-#endif
- {
- DatFile diffData = CreateDatFile(datHeaders[j], new DatModifiers());
- diffData.ResetDictionary();
- FillWithSourceIndex(datFile, diffData, j);
- FillWithSourceIndexDB(datFile, diffData, j);
- outDatsArray[j] = diffData;
-#if NET40_OR_GREATER || NETCOREAPP
- });
-#else
- }
-#endif
-
- outDats = [.. outDatsArray];
- watch.Stop();
-
- return outDats;
- }
-
- ///
- /// Output duplicate item diff
- ///
- /// Current DatFile object to use for updating
- /// List of inputs to write out from
- public static DatFile DiffDuplicates(DatFile datFile, List inputs)
- {
- List paths = inputs.ConvertAll(i => new ParentablePath(i));
- return DiffDuplicates(datFile, paths);
- //return DiffDuplicatesDB(datFile, paths);
- }
-
- ///
- /// Output duplicate item diff
- ///
- /// Current DatFile object to use for updating
- /// List of inputs to write out from
- public static DatFile DiffDuplicates(DatFile datFile, List inputs)
- {
- InternalStopwatch watch = new("Initializing duplicate DAT");
-
- // Fill in any information not in the base DAT
- if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
- datFile.Header.SetFieldValue(DatHeader.FileNameKey, "All DATs");
-
- if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey)))
- datFile.Header.SetFieldValue(Models.Metadata.Header.NameKey, "datFile.All DATs");
-
- if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey)))
- datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "datFile.All DATs");
-
- string post = " (Duplicates)";
- DatFile dupeData = CreateDatFile(datFile.Header, datFile.Modifiers);
- dupeData.Header.SetFieldValue(DatHeader.FileNameKey, dupeData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post);
- dupeData.Header.SetFieldValue(Models.Metadata.Header.NameKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post);
- dupeData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post);
- dupeData.ResetDictionary();
-
- watch.Stop();
-
- // Now, loop through the dictionary and populate the correct DATs
- watch.Start("Populating duplicate DAT");
-
-#if NET452_OR_GREATER || NETCOREAPP
- Parallel.ForEach(datFile.Items.SortedKeys, Core.Globals.ParallelOptions, key =>
-#elif NET40_OR_GREATER
- Parallel.ForEach(datFile.Items.SortedKeys, key =>
-#else
- foreach (var key in datFile.Items.SortedKeys)
-#endif
- {
- List items = Merge(datFile.GetItemsForBucket(key));
-
- // If the rom list is empty or null, just skip it
- if (items == null || items.Count == 0)
-#if NET40_OR_GREATER || NETCOREAPP
- return;
-#else
- continue;
-#endif
-
- // Loop through and add the items correctly
- foreach (DatItem item in items)
- {
-#if NET20 || NET35
- if ((item.GetFieldValue(DatItem.DupeTypeKey) & DupeType.External) != 0)
-#else
- if (item.GetFieldValue(DatItem.DupeTypeKey).HasFlag(DupeType.External))
-#endif
- {
- if (item.Clone() is not DatItem newrom)
- continue;
-
- if (item.GetFieldValue(DatItem.SourceKey) != null)
- newrom.GetFieldValue(DatItem.MachineKey)!.SetFieldValue(Models.Metadata.Machine.NameKey, newrom.GetFieldValue(DatItem.MachineKey)!.GetStringFieldValue(Models.Metadata.Machine.NameKey) + $" ({Path.GetFileNameWithoutExtension(inputs[item.GetFieldValue(DatItem.SourceKey)!.Index].CurrentPath)})");
-
- dupeData.AddItem(newrom, statsOnly: false);
- }
- }
-#if NET40_OR_GREATER || NETCOREAPP
- });
-#else
- }
-#endif
-
- watch.Stop();
-
- return dupeData;
- }
-
- ///
- /// Output duplicate item diff
- ///
- /// Current DatFile object to use for updating
- /// List of inputs to write out from
- public static DatFile DiffDuplicatesDB(DatFile datFile, List inputs)
- {
- var watch = new InternalStopwatch("Initializing duplicate DAT");
-
- // Fill in any information not in the base DAT
- if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
- datFile.Header.SetFieldValue(DatHeader.FileNameKey, "All DATs");
-
- if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey)))
- datFile.Header.SetFieldValue(Models.Metadata.Header.NameKey, "datFile.All DATs");
-
- if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey)))
- datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "datFile.All DATs");
-
- string post = " (Duplicates)";
- DatFile dupeData = CreateDatFile(datFile.Header, datFile.Modifiers);
- dupeData.Header.SetFieldValue(DatHeader.FileNameKey, dupeData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post);
- dupeData.Header.SetFieldValue(Models.Metadata.Header.NameKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post);
- dupeData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post);
- dupeData.ResetDictionary();
-
- watch.Stop();
-
- // Now, loop through the dictionary and populate the correct DATs
- watch.Start("Populating duplicate DAT");
-
- // Get all current items, machines, and mappings
- var datItems = datFile.ItemsDB.GetItems();
- var machines = datFile.GetMachinesDB();
- var sources = datFile.ItemsDB.GetSources();
-
- // Create mappings from old index to new index
- var machineRemapping = new Dictionary();
- var sourceRemapping = new Dictionary();
-
- // Loop through and add all sources
- foreach (var source in sources)
- {
- long newSourceIndex = dupeData.AddSourceDB(source.Value);
- sourceRemapping[source.Key] = newSourceIndex;
- }
-
- // Loop through and add all machines
- foreach (var machine in machines)
- {
- long newMachineIndex = dupeData.AddMachineDB(machine.Value);
- machineRemapping[machine.Key] = newMachineIndex;
- }
-
- // Loop through and add the items
-#if NET452_OR_GREATER || NETCOREAPP
- Parallel.ForEach(datItems, Core.Globals.ParallelOptions, item =>
-#elif NET40_OR_GREATER
- Parallel.ForEach(datItems, item =>
-#else
- foreach (var item in datItems)
-#endif
- {
- // Get the machine and source index for this item
- long machineIndex = datFile.ItemsDB.GetMachineForItem(item.Key).Key;
- long sourceIndex = datFile.ItemsDB.GetSourceForItem(item.Key).Key;
-
- // If the current item isn't an external duplicate
-#if NET20 || NET35
- if ((item.Value.GetFieldValue(DatItem.DupeTypeKey) & DupeType.External) == 0)
-#else
- if (!item.Value.GetFieldValue(DatItem.DupeTypeKey).HasFlag(DupeType.External))
-#endif
-#if NET40_OR_GREATER || NETCOREAPP
- return;
-#else
- continue;
-#endif
-
- // Get the current source and machine
- var currentSource = sources[sourceIndex];
- string? currentMachineName = machines[machineIndex].GetStringFieldValue(Models.Metadata.Machine.NameKey);
- var currentMachine = datFile.ItemsDB.GetMachine(currentMachineName);
- if (currentMachine.Value == null)
-#if NET40_OR_GREATER || NETCOREAPP
- return;
-#else
- continue;
-#endif
-
- // Get the source-specific machine
- string? renamedMachineName = $"{currentMachineName} ({Path.GetFileNameWithoutExtension(inputs[currentSource!.Index].CurrentPath)})";
- var renamedMachine = datFile.ItemsDB.GetMachine(renamedMachineName);
- if (renamedMachine.Value == null)
- {
- var newMachine = currentMachine.Value.Clone() as Machine;
- newMachine!.SetFieldValue(Models.Metadata.Machine.NameKey, renamedMachineName);
- long newMachineIndex = dupeData.AddMachineDB(newMachine!);
- renamedMachine = new KeyValuePair(newMachineIndex, newMachine);
- }
-
- dupeData.AddItemDB(item.Value, renamedMachine.Key, sourceRemapping[sourceIndex], statsOnly: false);
-#if NET40_OR_GREATER || NETCOREAPP
- });
-#else
- }
-#endif
-
- watch.Stop();
-
- return dupeData;
- }
-
- ///
- /// Output non-cascading diffs
- ///
- /// Current DatFile object to use for updating
- /// List of inputs to write out from
- public static List DiffIndividuals(DatFile datFile, List inputs)
- {
- List paths = inputs.ConvertAll(i => new ParentablePath(i));
- return DiffIndividuals(datFile, paths);
- //return DiffIndividualsDB(datFile, paths);
- }
-
- ///
- /// Output non-cascading diffs
- ///
- /// Current DatFile object to use for updating
- /// List of inputs to write out from
- public static List DiffIndividuals(DatFile datFile, List inputs)
- {
- InternalStopwatch watch = new("Initializing all individual DATs");
-
- // Fill in any information not in the base DAT
- if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
- datFile.Header.SetFieldValue(DatHeader.FileNameKey, "All DATs");
-
- if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey)))
- datFile.Header.SetFieldValue(Models.Metadata.Header.NameKey, "All DATs");
-
- if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey)))
- datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "All DATs");
-
- // Loop through each of the inputs and get or create a new DatData object
- DatFile[] outDatsArray = new DatFile[inputs.Count];
-
-#if NET452_OR_GREATER || NETCOREAPP
- Parallel.For(0, inputs.Count, Core.Globals.ParallelOptions, j =>
-#elif NET40_OR_GREATER
- Parallel.For(0, inputs.Count, j =>
-#else
- for (int j = 0; j < inputs.Count; j++)
-#endif
- {
- string innerpost = $" ({j} - {inputs[j].GetNormalizedFileName(true)} Only)";
- DatFile diffData = CreateDatFile(datFile.Header, datFile.Modifiers);
- diffData.Header.SetFieldValue(DatHeader.FileNameKey, diffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + innerpost);
- diffData.Header.SetFieldValue(Models.Metadata.Header.NameKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + innerpost);
- diffData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + innerpost);
- diffData.ResetDictionary();
- outDatsArray[j] = diffData;
-#if NET40_OR_GREATER || NETCOREAPP
- });
-#else
- }
-#endif
-
- // Create a list of DatData objects representing individual output files
- List outDats = [.. outDatsArray];
-
- watch.Stop();
-
- // Now, loop through the dictionary and populate the correct DATs
- watch.Start("Populating all individual DATs");
-
-#if NET452_OR_GREATER || NETCOREAPP
- Parallel.ForEach(datFile.Items.SortedKeys, Core.Globals.ParallelOptions, key =>
-#elif NET40_OR_GREATER
- Parallel.ForEach(datFile.Items.SortedKeys, key =>
-#else
- foreach (var key in datFile.Items.SortedKeys)
-#endif
- {
- List items = Merge(datFile.GetItemsForBucket(key));
-
- // If the rom list is empty or null, just skip it
- if (items == null || items.Count == 0)
-#if NET40_OR_GREATER || NETCOREAPP
- return;
-#else
- continue;
-#endif
-
- // Loop through and add the items correctly
- foreach (DatItem item in items)
- {
- if (item.GetFieldValue(DatItem.SourceKey) == null)
- continue;
-
-#if NET20 || NET35
- if ((item.GetFieldValue(DatItem.DupeTypeKey) & DupeType.Internal) != 0 || item.GetFieldValue(DatItem.DupeTypeKey) == 0x00)
-#else
- if (item.GetFieldValue(DatItem.DupeTypeKey).HasFlag(DupeType.Internal) || item.GetFieldValue(DatItem.DupeTypeKey) == 0x00)
-#endif
- outDats[item.GetFieldValue(DatItem.SourceKey)!.Index].AddItem(item, statsOnly: false);
- }
-#if NET40_OR_GREATER || NETCOREAPP
- });
-#else
- }
-#endif
-
- watch.Stop();
-
- return [.. outDats];
- }
-
- ///
- /// Output non-cascading diffs
- ///
- /// Current DatFile object to use for updating
- /// List of inputs to write out from
- public static List DiffIndividualsDB(DatFile datFile, List inputs)
- {
- InternalStopwatch watch = new("Initializing all individual DATs");
-
- // Fill in any information not in the base DAT
- if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
- datFile.Header.SetFieldValue(DatHeader.FileNameKey, "All DATs");
-
- if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey)))
- datFile.Header.SetFieldValue(Models.Metadata.Header.NameKey, "All DATs");
-
- if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey)))
- datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "All DATs");
-
- // Loop through each of the inputs and get or create a new DatData object
- DatFile[] outDatsArray = new DatFile[inputs.Count];
-
-#if NET452_OR_GREATER || NETCOREAPP
- Parallel.For(0, inputs.Count, Core.Globals.ParallelOptions, j =>
-#elif NET40_OR_GREATER
- Parallel.For(0, inputs.Count, j =>
-#else
- for (int j = 0; j < inputs.Count; j++)
-#endif
- {
- string innerpost = $" ({j} - {inputs[j].GetNormalizedFileName(true)} Only)";
- DatFile diffData = CreateDatFile(datFile.Header, datFile.Modifiers);
- diffData.Header.SetFieldValue(DatHeader.FileNameKey, diffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + innerpost);
- diffData.Header.SetFieldValue(Models.Metadata.Header.NameKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + innerpost);
- diffData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + innerpost);
- diffData.ResetDictionary();
- outDatsArray[j] = diffData;
-#if NET40_OR_GREATER || NETCOREAPP
- });
-#else
- }
-#endif
-
- // Create a list of DatData objects representing individual output files
- List outDats = [.. outDatsArray];
-
- watch.Stop();
-
- // Now, loop through the dictionary and populate the correct DATs
- watch.Start("Populating all individual DATs");
-
- // Get all current items, machines, and mappings
- var datItems = datFile.ItemsDB.GetItems();
- var machines = datFile.GetMachinesDB();
- var sources = datFile.ItemsDB.GetSources();
-
- // Create mappings from old index to new index
- var machineRemapping = new Dictionary();
- var sourceRemapping = new Dictionary();
-
- // Loop through and add all sources
- foreach (var source in sources)
- {
- long newSourceIndex = outDats[0].AddSourceDB(source.Value);
- sourceRemapping[source.Key] = newSourceIndex;
-
- for (int i = 1; i < outDats.Count; i++)
- {
- _ = outDats[i].AddSourceDB(source.Value);
- }
- }
-
- // Loop through and add all machines
- foreach (var machine in machines)
- {
- long newMachineIndex = outDats[0].AddMachineDB(machine.Value);
- machineRemapping[machine.Key] = newMachineIndex;
-
- for (int i = 1; i < outDats.Count; i++)
- {
- _ = outDats[i].AddMachineDB(machine.Value);
- }
- }
-
- // Loop through and add the items
-#if NET452_OR_GREATER || NETCOREAPP
- Parallel.ForEach(datItems, Core.Globals.ParallelOptions, item =>
-#elif NET40_OR_GREATER
- Parallel.ForEach(datItems, item =>
-#else
- foreach (var item in datItems)
-#endif
- {
- // Get the machine and source index for this item
- long machineIndex = datFile.ItemsDB.GetMachineForItem(item.Key).Key;
- long sourceIndex = datFile.ItemsDB.GetSourceForItem(item.Key).Key;
-
- // Get the source associated with the item
- var source = datFile.ItemsDB.GetSource(sourceIndex);
- if (source == null)
-#if NET40_OR_GREATER || NETCOREAPP
- return;
-#else
- continue;
-#endif
-
-#if NET20 || NET35
- if ((item.Value.GetFieldValue(DatItem.DupeTypeKey) & DupeType.Internal) != 0 || item.Value.GetFieldValue(DatItem.DupeTypeKey) == 0x00)
-#else
- if (item.Value.GetFieldValue(DatItem.DupeTypeKey).HasFlag(DupeType.Internal) || item.Value.GetFieldValue(DatItem.DupeTypeKey) == 0x00)
-#endif
- outDats[source.Index].AddItemDB(item.Value, machineRemapping[machineIndex], sourceRemapping[sourceIndex], statsOnly: false);
-#if NET40_OR_GREATER || NETCOREAPP
- });
-#else
- }
-#endif
-
- watch.Stop();
-
- return [.. outDats];
- }
-
- ///
- /// Output non-duplicate item diff
- ///
- /// Current DatFile object to use for updating
- /// List of inputs to write out from
- public static DatFile DiffNoDuplicates(DatFile datFile, List inputs)
- {
- List paths = inputs.ConvertAll(i => new ParentablePath(i));
- return DiffNoDuplicates(datFile, paths);
- //return DiffNoDuplicatesDB(datFile, paths);
- }
-
- ///
- /// Output non-duplicate item diff
- ///
- /// Current DatFile object to use for updating
- /// List of inputs to write out from
- public static DatFile DiffNoDuplicates(DatFile datFile, List inputs)
- {
- InternalStopwatch watch = new("Initializing no duplicate DAT");
-
- // Fill in any information not in the base DAT
- if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
- datFile.Header.SetFieldValue(DatHeader.FileNameKey, "All DATs");
-
- if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey)))
- datFile.Header.SetFieldValue(Models.Metadata.Header.NameKey, "All DATs");
-
- if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey)))
- datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "All DATs");
-
- string post = " (No Duplicates)";
- DatFile outerDiffData = CreateDatFile(datFile.Header, datFile.Modifiers);
- outerDiffData.Header.SetFieldValue(DatHeader.FileNameKey, outerDiffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post);
- outerDiffData.Header.SetFieldValue(Models.Metadata.Header.NameKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post);
- outerDiffData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post);
- outerDiffData.ResetDictionary();
-
- watch.Stop();
-
- // Now, loop through the dictionary and populate the correct DATs
- watch.Start("Populating no duplicate DAT");
-
-#if NET452_OR_GREATER || NETCOREAPP
- Parallel.ForEach(datFile.Items.SortedKeys, Core.Globals.ParallelOptions, key =>
-#elif NET40_OR_GREATER
- Parallel.ForEach(datFile.Items.SortedKeys, key =>
-#else
- foreach (var key in datFile.Items.SortedKeys)
-#endif
- {
- List items = Merge(datFile.GetItemsForBucket(key));
-
- // If the rom list is empty or null, just skip it
- if (items == null || items.Count == 0)
-#if NET40_OR_GREATER || NETCOREAPP
- return;
-#else
- continue;
-#endif
-
- // Loop through and add the items correctly
- foreach (DatItem item in items)
- {
-#if NET20 || NET35
- if ((item.GetFieldValue(DatItem.DupeTypeKey) & DupeType.Internal) != 0 || item.GetFieldValue(DatItem.DupeTypeKey) == 0x00)
-#else
- if (item.GetFieldValue(DatItem.DupeTypeKey).HasFlag(DupeType.Internal) || item.GetFieldValue(DatItem.DupeTypeKey) == 0x00)
-#endif
- {
- if (item.Clone() is not DatItem newrom || newrom.GetFieldValue(DatItem.SourceKey) == null)
- continue;
-
- newrom.GetFieldValue(DatItem.MachineKey)!.SetFieldValue(Models.Metadata.Machine.NameKey, newrom.GetFieldValue(DatItem.MachineKey)!.GetStringFieldValue(Models.Metadata.Machine.NameKey) + $" ({Path.GetFileNameWithoutExtension(inputs[newrom.GetFieldValue(DatItem.SourceKey)!.Index].CurrentPath)})");
- outerDiffData.AddItem(newrom, statsOnly: false);
- }
- }
-#if NET40_OR_GREATER || NETCOREAPP
- });
-#else
- }
-#endif
-
- watch.Stop();
-
- return outerDiffData;
- }
-
- ///
- /// Output non-duplicate item diff
- ///
- /// Current DatFile object to use for updating
- /// List of inputs to write out from
- public static DatFile DiffNoDuplicatesDB(DatFile datFile, List inputs)
- {
- var watch = new InternalStopwatch("Initializing no duplicate DAT");
-
- // Fill in any information not in the base DAT
- if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
- datFile.Header.SetFieldValue(DatHeader.FileNameKey, "All DATs");
-
- if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey)))
- datFile.Header.SetFieldValue(Models.Metadata.Header.NameKey, "All DATs");
-
- if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey)))
- datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "All DATs");
-
- string post = " (No Duplicates)";
- DatFile outerDiffData = CreateDatFile(datFile.Header, datFile.Modifiers);
- outerDiffData.Header.SetFieldValue(DatHeader.FileNameKey, outerDiffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post);
- outerDiffData.Header.SetFieldValue(Models.Metadata.Header.NameKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post);
- outerDiffData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post);
- outerDiffData.ResetDictionary();
-
- watch.Stop();
-
- // Now, loop through the dictionary and populate the correct DATs
- watch.Start("Populating no duplicate DAT");
-
- // Get all current items, machines, and mappings
- var datItems = datFile.ItemsDB.GetItems();
- var machines = datFile.GetMachinesDB();
- var sources = datFile.ItemsDB.GetSources();
-
- // Create mappings from old index to new index
- var machineRemapping = new Dictionary();
- var sourceRemapping = new Dictionary();
-
- // Loop through and add all sources
- foreach (var source in sources)
- {
- long newSourceIndex = outerDiffData.AddSourceDB(source.Value);
- sourceRemapping[source.Key] = newSourceIndex;
- }
-
- // Loop through and add all machines
- foreach (var machine in machines)
- {
- long newMachineIndex = outerDiffData.AddMachineDB(machine.Value);
- machineRemapping[machine.Key] = newMachineIndex;
- }
-
- // Loop through and add the items
-#if NET452_OR_GREATER || NETCOREAPP
- Parallel.ForEach(datItems, Core.Globals.ParallelOptions, item =>
-#elif NET40_OR_GREATER
- Parallel.ForEach(datItems, item =>
-#else
- foreach (var item in datItems)
-#endif
- {
- // Get the machine and source index for this item
- long machineIndex = datFile.ItemsDB.GetMachineForItem(item.Key).Key;
- long sourceIndex = datFile.ItemsDB.GetSourceForItem(item.Key).Key;
-
- // If the current item isn't a duplicate
-#if NET20 || NET35
- if ((item.Value.GetFieldValue(DatItem.DupeTypeKey) & DupeType.Internal) == 0 && item.Value.GetFieldValue(DatItem.DupeTypeKey) != 0x00)
-#else
- if (!item.Value.GetFieldValue(DatItem.DupeTypeKey).HasFlag(DupeType.Internal) && item.Value.GetFieldValue(DatItem.DupeTypeKey) != 0x00)
-#endif
-#if NET40_OR_GREATER || NETCOREAPP
- return;
-#else
- continue;
-#endif
-
- // Get the current source and machine
- var currentSource = sources[sourceIndex];
- string? currentMachineName = machines[machineIndex].GetStringFieldValue(Models.Metadata.Machine.NameKey);
- var currentMachine = datFile.ItemsDB.GetMachine(currentMachineName);
- if (currentMachine.Value == null)
-#if NET40_OR_GREATER || NETCOREAPP
- return;
-#else
- continue;
-#endif
-
- // Get the source-specific machine
- string? renamedMachineName = $"{currentMachineName} ({Path.GetFileNameWithoutExtension(inputs[currentSource!.Index].CurrentPath)})";
- var renamedMachine = datFile.ItemsDB.GetMachine(renamedMachineName);
- if (renamedMachine.Value == null)
- {
- var newMachine = currentMachine.Value.Clone() as Machine;
- newMachine!.SetFieldValue(Models.Metadata.Machine.NameKey, renamedMachineName);
- long newMachineIndex = outerDiffData.AddMachineDB(newMachine);
- renamedMachine = new KeyValuePair(newMachineIndex, newMachine);
- }
-
- outerDiffData.AddItemDB(item.Value, renamedMachine.Key, sourceRemapping[sourceIndex], statsOnly: false);
-#if NET40_OR_GREATER || NETCOREAPP
- });
-#else
- }
-#endif
-
- watch.Stop();
-
- return outerDiffData;
- }
-
- #endregion
-
#region Population
///
@@ -1592,104 +805,6 @@ namespace SabreTools.DatFiles
addFrom.ResetDictionary();
}
- ///
- /// Fill a DatFile with all items with a particular source index ID
- ///
- /// Current DatFile object to use for updating
- /// DatFile to add found items to
- /// Source index ID to retrieve items for
- /// DatFile containing all items with the source index ID/returns>
- private static void FillWithSourceIndex(DatFile datFile, DatFile indexDat, int index)
- {
- // Loop through and add the items for this index to the output
-#if NET452_OR_GREATER || NETCOREAPP
- Parallel.ForEach(datFile.Items.SortedKeys, Core.Globals.ParallelOptions, key =>
-#elif NET40_OR_GREATER
- Parallel.ForEach(datFile.Items.SortedKeys, key =>
-#else
- foreach (var key in datFile.Items.SortedKeys)
-#endif
- {
- List items = Merge(datFile.GetItemsForBucket(key));
-
- // If the rom list is empty or null, just skip it
- if (items == null || items.Count == 0)
-#if NET40_OR_GREATER || NETCOREAPP
- return;
-#else
- continue;
-#endif
-
- foreach (DatItem item in items)
- {
- var source = item.GetFieldValue(DatItem.SourceKey);
- if (source != null && source.Index == index)
- indexDat.AddItem(item, statsOnly: false);
- }
-#if NET40_OR_GREATER || NETCOREAPP
- });
-#else
- }
-#endif
- }
-
- ///
- /// Fill a DatFile with all items with a particular source index ID
- ///
- /// Current DatFile object to use for updating
- /// DatFile to add found items to
- /// Source index ID to retrieve items for
- /// DatFile containing all items with the source index ID/returns>
- private static void FillWithSourceIndexDB(DatFile datFile, DatFile indexDat, int index)
- {
- // Get all current items, machines, and mappings
- var datItems = datFile.ItemsDB.GetItems();
- var machines = datFile.GetMachinesDB();
- var sources = datFile.ItemsDB.GetSources();
-
- // Create mappings from old index to new index
- var machineRemapping = new Dictionary();
- var sourceRemapping = new Dictionary();
-
- // Loop through and add all sources
- foreach (var source in sources)
- {
- long newSourceIndex = indexDat.AddSourceDB(source.Value);
- sourceRemapping[source.Key] = newSourceIndex;
- }
-
- // Loop through and add all machines
- foreach (var machine in machines)
- {
- long newMachineIndex = indexDat.AddMachineDB(machine.Value);
- machineRemapping[machine.Key] = newMachineIndex;
- }
-
- // Loop through and add the items
-#if NET452_OR_GREATER || NETCOREAPP
- Parallel.ForEach(datItems, Core.Globals.ParallelOptions, item =>
-#elif NET40_OR_GREATER
- Parallel.ForEach(datItems, item =>
-#else
- foreach (var item in datItems)
-#endif
- {
- // Get the machine and source index for this item
- long machineIndex = datFile.ItemsDB.GetMachineForItem(item.Key).Key;
- long sourceIndex = datFile.ItemsDB.GetSourceForItem(item.Key).Key;
-
- // Get the source associated with the item
- var source = datFile.ItemsDB.GetSource(sourceIndex);
-
- if (source != null && source.Index == index)
- indexDat.AddItemDB(item.Value, machineRemapping[machineIndex], sourceRemapping[sourceIndex], statsOnly: false);
-#if NET40_OR_GREATER || NETCOREAPP
- });
-#else
- }
-#endif
- }
-
#endregion
}
}
diff --git a/SabreTools.DatTools/Diffing.cs b/SabreTools.DatTools/Diffing.cs
new file mode 100644
index 00000000..ef3c9841
--- /dev/null
+++ b/SabreTools.DatTools/Diffing.cs
@@ -0,0 +1,899 @@
+using System.Collections.Generic;
+using System.IO;
+#if NET40_OR_GREATER || NETCOREAPP
+using System.Threading.Tasks;
+#endif
+using SabreTools.DatFiles;
+using SabreTools.DatItems;
+using SabreTools.IO;
+using SabreTools.IO.Logging;
+
+namespace SabreTools.DatTools
+{
+ ///
+ /// This file represents all methods for diffing DatFiles
+ ///
+ public class Diffing
+ {
+ ///
+ /// Output diffs against a base set represented by the current DAT
+ ///
+ /// Current DatFile object to use for updating
+ /// DatFile to replace the values in
+ /// True to diff using games, false to use hashes
+ public static void Against(DatFile datFile, DatFile intDat, bool useGames)
+ {
+ InternalStopwatch watch = new($"Comparing '{intDat.Header.GetStringFieldValue(DatHeader.FileNameKey)}' to base DAT");
+
+ // For comparison's sake, we want to a the base bucketing
+ if (useGames)
+ {
+ intDat.BucketBy(ItemKey.Machine);
+ }
+ else
+ {
+ intDat.BucketBy(ItemKey.CRC);
+ intDat.Deduplicate();
+ }
+
+ // Then we compare against the base DAT
+#if NET452_OR_GREATER || NETCOREAPP
+ Parallel.ForEach(intDat.Items.SortedKeys, Core.Globals.ParallelOptions, key =>
+#elif NET40_OR_GREATER
+ Parallel.ForEach(intDat.Items.SortedKeys, key =>
+#else
+ foreach (var key in intDat.Items.SortedKeys)
+#endif
+ {
+ // Game Against uses game names
+ if (useGames)
+ {
+ // If the key is null, keep it
+ var intList = intDat.GetItemsForBucket(key);
+ if (intList.Count == 0)
+#if NET40_OR_GREATER || NETCOREAPP
+ return;
+#else
+ continue;
+#endif
+
+ // If the base DAT doesn't contain the key, keep it
+ var list = datFile.GetItemsForBucket(key);
+ if (list.Count == 0)
+#if NET40_OR_GREATER || NETCOREAPP
+ return;
+#else
+ continue;
+#endif
+
+ // If the number of items is different, then keep it
+ if (list.Count != intList.Count)
+#if NET40_OR_GREATER || NETCOREAPP
+ return;
+#else
+ continue;
+#endif
+
+ // Otherwise, compare by name and hash the remaining files
+ bool exactMatch = true;
+ foreach (DatItem item in intList)
+ {
+ // TODO: Make this granular to name as well
+ if (!list.Contains(item))
+ {
+ exactMatch = false;
+ break;
+ }
+ }
+
+ // If we have an exact match, remove the game
+ if (exactMatch)
+ intDat.RemoveBucket(key);
+ }
+
+ // Standard Against uses hashes
+ else
+ {
+ List? datItems = intDat.GetItemsForBucket(key);
+ if (datItems == null)
+#if NET40_OR_GREATER || NETCOREAPP
+ return;
+#else
+ continue;
+#endif
+
+ List keepDatItems = [];
+ foreach (DatItem datItem in datItems)
+ {
+ if (!datFile.HasDuplicates(datItem, true))
+ keepDatItems.Add(datItem);
+ }
+
+ // Now add the new list to the key
+ intDat.RemoveBucket(key);
+ keepDatItems.ForEach(item => intDat.AddItem(item, statsOnly: false));
+ }
+#if NET40_OR_GREATER || NETCOREAPP
+ });
+#else
+ }
+#endif
+
+ watch.Stop();
+ }
+
+ ///
+ /// Output cascading diffs
+ ///
+ /// Current DatFile object to use for updating
+ /// Dat headers used optionally
+ /// List of DatFiles representing the individually indexed items
+ public static List Cascade(DatFile datFile, List datHeaders)
+ {
+ // Create a list of DatData objects representing output files
+ List outDats = [];
+
+ // Ensure the current DatFile is sorted optimally
+ datFile.BucketBy(ItemKey.CRC);
+
+ // Loop through each of the inputs and get or create a new DatData object
+ InternalStopwatch watch = new("Initializing and filling all output DATs");
+
+ // Create the DatFiles from the set of headers
+ DatFile[] outDatsArray = new DatFile[datHeaders.Count];
+#if NET452_OR_GREATER || NETCOREAPP
+ Parallel.For(0, datHeaders.Count, Core.Globals.ParallelOptions, j =>
+#elif NET40_OR_GREATER
+ Parallel.For(0, datHeaders.Count, j =>
+#else
+ for (int j = 0; j < datHeaders.Count; j++)
+#endif
+ {
+ DatFile diffData = DatFileTool.CreateDatFile(datHeaders[j], new DatModifiers());
+ diffData.ResetDictionary();
+ FillWithSourceIndex(datFile, diffData, j);
+ FillWithSourceIndexDB(datFile, diffData, j);
+ outDatsArray[j] = diffData;
+#if NET40_OR_GREATER || NETCOREAPP
+ });
+#else
+ }
+#endif
+
+ outDats = [.. outDatsArray];
+ watch.Stop();
+
+ return outDats;
+ }
+
+ ///
+ /// Output duplicate item diff
+ ///
+ /// Current DatFile object to use for updating
+ /// List of inputs to write out from
+ public static DatFile Duplicates(DatFile datFile, List inputs)
+ {
+ List paths = inputs.ConvertAll(i => new ParentablePath(i));
+ return Duplicates(datFile, paths);
+ //return DuplicatesDB(datFile, paths);
+ }
+
+ ///
+ /// Output duplicate item diff
+ ///
+ /// Current DatFile object to use for updating
+ /// List of inputs to write out from
+ public static DatFile Duplicates(DatFile datFile, List inputs)
+ {
+ InternalStopwatch watch = new("Initializing duplicate DAT");
+
+ // Fill in any information not in the base DAT
+ if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
+ datFile.Header.SetFieldValue(DatHeader.FileNameKey, "All DATs");
+
+ if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey)))
+ datFile.Header.SetFieldValue(Models.Metadata.Header.NameKey, "datFile.All DATs");
+
+ if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey)))
+ datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "datFile.All DATs");
+
+ string post = " (Duplicates)";
+ DatFile dupeData = DatFileTool.CreateDatFile(datFile.Header, datFile.Modifiers);
+ dupeData.Header.SetFieldValue(DatHeader.FileNameKey, dupeData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post);
+ dupeData.Header.SetFieldValue(Models.Metadata.Header.NameKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post);
+ dupeData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post);
+ dupeData.ResetDictionary();
+
+ watch.Stop();
+
+ // Now, loop through the dictionary and populate the correct DATs
+ watch.Start("Populating duplicate DAT");
+
+#if NET452_OR_GREATER || NETCOREAPP
+ Parallel.ForEach(datFile.Items.SortedKeys, Core.Globals.ParallelOptions, key =>
+#elif NET40_OR_GREATER
+ Parallel.ForEach(datFile.Items.SortedKeys, key =>
+#else
+ foreach (var key in datFile.Items.SortedKeys)
+#endif
+ {
+ List items = DatFileTool.Merge(datFile.GetItemsForBucket(key));
+
+ // If the rom list is empty or null, just skip it
+ if (items == null || items.Count == 0)
+#if NET40_OR_GREATER || NETCOREAPP
+ return;
+#else
+ continue;
+#endif
+
+ // Loop through and add the items correctly
+ foreach (DatItem item in items)
+ {
+#if NET20 || NET35
+ if ((item.GetFieldValue(DatItem.DupeTypeKey) & DupeType.External) != 0)
+#else
+ if (item.GetFieldValue(DatItem.DupeTypeKey).HasFlag(DupeType.External))
+#endif
+ {
+ if (item.Clone() is not DatItem newrom)
+ continue;
+
+ if (item.GetFieldValue(DatItem.SourceKey) != null)
+ newrom.GetFieldValue(DatItem.MachineKey)!.SetFieldValue(Models.Metadata.Machine.NameKey, newrom.GetFieldValue(DatItem.MachineKey)!.GetStringFieldValue(Models.Metadata.Machine.NameKey) + $" ({Path.GetFileNameWithoutExtension(inputs[item.GetFieldValue(DatItem.SourceKey)!.Index].CurrentPath)})");
+
+ dupeData.AddItem(newrom, statsOnly: false);
+ }
+ }
+#if NET40_OR_GREATER || NETCOREAPP
+ });
+#else
+ }
+#endif
+
+ watch.Stop();
+
+ return dupeData;
+ }
+
+ ///
+ /// Output duplicate item diff
+ ///
+ /// Current DatFile object to use for updating
+ /// List of inputs to write out from
+ public static DatFile DuplicatesDB(DatFile datFile, List inputs)
+ {
+ var watch = new InternalStopwatch("Initializing duplicate DAT");
+
+ // Fill in any information not in the base DAT
+ if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
+ datFile.Header.SetFieldValue(DatHeader.FileNameKey, "All DATs");
+
+ if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey)))
+ datFile.Header.SetFieldValue(Models.Metadata.Header.NameKey, "datFile.All DATs");
+
+ if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey)))
+ datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "datFile.All DATs");
+
+ string post = " (Duplicates)";
+ DatFile dupeData = DatFileTool.CreateDatFile(datFile.Header, datFile.Modifiers);
+ dupeData.Header.SetFieldValue(DatHeader.FileNameKey, dupeData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post);
+ dupeData.Header.SetFieldValue(Models.Metadata.Header.NameKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post);
+ dupeData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post);
+ dupeData.ResetDictionary();
+
+ watch.Stop();
+
+ // Now, loop through the dictionary and populate the correct DATs
+ watch.Start("Populating duplicate DAT");
+
+ // Get all current items, machines, and mappings
+ var datItems = datFile.ItemsDB.GetItems();
+ var machines = datFile.GetMachinesDB();
+ var sources = datFile.ItemsDB.GetSources();
+
+ // Create mappings from old index to new index
+ var machineRemapping = new Dictionary();
+ var sourceRemapping = new Dictionary();
+
+ // Loop through and add all sources
+ foreach (var source in sources)
+ {
+ long newSourceIndex = dupeData.AddSourceDB(source.Value);
+ sourceRemapping[source.Key] = newSourceIndex;
+ }
+
+ // Loop through and add all machines
+ foreach (var machine in machines)
+ {
+ long newMachineIndex = dupeData.AddMachineDB(machine.Value);
+ machineRemapping[machine.Key] = newMachineIndex;
+ }
+
+ // Loop through and add the items
+#if NET452_OR_GREATER || NETCOREAPP
+ Parallel.ForEach(datItems, Core.Globals.ParallelOptions, item =>
+#elif NET40_OR_GREATER
+ Parallel.ForEach(datItems, item =>
+#else
+ foreach (var item in datItems)
+#endif
+ {
+ // Get the machine and source index for this item
+ long machineIndex = datFile.ItemsDB.GetMachineForItem(item.Key).Key;
+ long sourceIndex = datFile.ItemsDB.GetSourceForItem(item.Key).Key;
+
+ // If the current item isn't an external duplicate
+#if NET20 || NET35
+ if ((item.Value.GetFieldValue(DatItem.DupeTypeKey) & DupeType.External) == 0)
+#else
+ if (!item.Value.GetFieldValue(DatItem.DupeTypeKey).HasFlag(DupeType.External))
+#endif
+#if NET40_OR_GREATER || NETCOREAPP
+ return;
+#else
+ continue;
+#endif
+
+ // Get the current source and machine
+ var currentSource = sources[sourceIndex];
+ string? currentMachineName = machines[machineIndex].GetStringFieldValue(Models.Metadata.Machine.NameKey);
+ var currentMachine = datFile.ItemsDB.GetMachine(currentMachineName);
+ if (currentMachine.Value == null)
+#if NET40_OR_GREATER || NETCOREAPP
+ return;
+#else
+ continue;
+#endif
+
+ // Get the source-specific machine
+ string? renamedMachineName = $"{currentMachineName} ({Path.GetFileNameWithoutExtension(inputs[currentSource!.Index].CurrentPath)})";
+ var renamedMachine = datFile.ItemsDB.GetMachine(renamedMachineName);
+ if (renamedMachine.Value == null)
+ {
+ var newMachine = currentMachine.Value.Clone() as Machine;
+ newMachine!.SetFieldValue(Models.Metadata.Machine.NameKey, renamedMachineName);
+ long newMachineIndex = dupeData.AddMachineDB(newMachine!);
+ renamedMachine = new KeyValuePair(newMachineIndex, newMachine);
+ }
+
+ dupeData.AddItemDB(item.Value, renamedMachine.Key, sourceRemapping[sourceIndex], statsOnly: false);
+#if NET40_OR_GREATER || NETCOREAPP
+ });
+#else
+ }
+#endif
+
+ watch.Stop();
+
+ return dupeData;
+ }
+
+ ///
+ /// Output non-cascading diffs
+ ///
+ /// Current DatFile object to use for updating
+ /// List of inputs to write out from
+ public static List Individuals(DatFile datFile, List inputs)
+ {
+ List paths = inputs.ConvertAll(i => new ParentablePath(i));
+ return Individuals(datFile, paths);
+ //return IndividualsDB(datFile, paths);
+ }
+
+ ///
+ /// Output non-cascading diffs
+ ///
+ /// Current DatFile object to use for updating
+ /// List of inputs to write out from
+ public static List Individuals(DatFile datFile, List inputs)
+ {
+ InternalStopwatch watch = new("Initializing all individual DATs");
+
+ // Fill in any information not in the base DAT
+ if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
+ datFile.Header.SetFieldValue(DatHeader.FileNameKey, "All DATs");
+
+ if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey)))
+ datFile.Header.SetFieldValue(Models.Metadata.Header.NameKey, "All DATs");
+
+ if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey)))
+ datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "All DATs");
+
+ // Loop through each of the inputs and get or create a new DatData object
+ DatFile[] outDatsArray = new DatFile[inputs.Count];
+
+#if NET452_OR_GREATER || NETCOREAPP
+ Parallel.For(0, inputs.Count, Core.Globals.ParallelOptions, j =>
+#elif NET40_OR_GREATER
+ Parallel.For(0, inputs.Count, j =>
+#else
+ for (int j = 0; j < inputs.Count; j++)
+#endif
+ {
+ string innerpost = $" ({j} - {inputs[j].GetNormalizedFileName(true)} Only)";
+ DatFile diffData = DatFileTool.CreateDatFile(datFile.Header, datFile.Modifiers);
+ diffData.Header.SetFieldValue(DatHeader.FileNameKey, diffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + innerpost);
+ diffData.Header.SetFieldValue(Models.Metadata.Header.NameKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + innerpost);
+ diffData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + innerpost);
+ diffData.ResetDictionary();
+ outDatsArray[j] = diffData;
+#if NET40_OR_GREATER || NETCOREAPP
+ });
+#else
+ }
+#endif
+
+ // Create a list of DatData objects representing individual output files
+ List outDats = [.. outDatsArray];
+
+ watch.Stop();
+
+ // Now, loop through the dictionary and populate the correct DATs
+ watch.Start("Populating all individual DATs");
+
+#if NET452_OR_GREATER || NETCOREAPP
+ Parallel.ForEach(datFile.Items.SortedKeys, Core.Globals.ParallelOptions, key =>
+#elif NET40_OR_GREATER
+ Parallel.ForEach(datFile.Items.SortedKeys, key =>
+#else
+ foreach (var key in datFile.Items.SortedKeys)
+#endif
+ {
+ List items = DatFileTool.Merge(datFile.GetItemsForBucket(key));
+
+ // If the rom list is empty or null, just skip it
+ if (items == null || items.Count == 0)
+#if NET40_OR_GREATER || NETCOREAPP
+ return;
+#else
+ continue;
+#endif
+
+ // Loop through and add the items correctly
+ foreach (DatItem item in items)
+ {
+ if (item.GetFieldValue(DatItem.SourceKey) == null)
+ continue;
+
+#if NET20 || NET35
+ if ((item.GetFieldValue(DatItem.DupeTypeKey) & DupeType.Internal) != 0 || item.GetFieldValue(DatItem.DupeTypeKey) == 0x00)
+#else
+ if (item.GetFieldValue(DatItem.DupeTypeKey).HasFlag(DupeType.Internal) || item.GetFieldValue(DatItem.DupeTypeKey) == 0x00)
+#endif
+ outDats[item.GetFieldValue(DatItem.SourceKey)!.Index].AddItem(item, statsOnly: false);
+ }
+#if NET40_OR_GREATER || NETCOREAPP
+ });
+#else
+ }
+#endif
+
+ watch.Stop();
+
+ return [.. outDats];
+ }
+
+ ///
+ /// Output non-cascading diffs
+ ///
+ /// Current DatFile object to use for updating
+ /// List of inputs to write out from
+ public static List IndividualsDB(DatFile datFile, List inputs)
+ {
+ InternalStopwatch watch = new("Initializing all individual DATs");
+
+ // Fill in any information not in the base DAT
+ if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
+ datFile.Header.SetFieldValue(DatHeader.FileNameKey, "All DATs");
+
+ if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey)))
+ datFile.Header.SetFieldValue(Models.Metadata.Header.NameKey, "All DATs");
+
+ if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey)))
+ datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "All DATs");
+
+ // Loop through each of the inputs and get or create a new DatData object
+ DatFile[] outDatsArray = new DatFile[inputs.Count];
+
+#if NET452_OR_GREATER || NETCOREAPP
+ Parallel.For(0, inputs.Count, Core.Globals.ParallelOptions, j =>
+#elif NET40_OR_GREATER
+ Parallel.For(0, inputs.Count, j =>
+#else
+ for (int j = 0; j < inputs.Count; j++)
+#endif
+ {
+ string innerpost = $" ({j} - {inputs[j].GetNormalizedFileName(true)} Only)";
+ DatFile diffData = DatFileTool.CreateDatFile(datFile.Header, datFile.Modifiers);
+ diffData.Header.SetFieldValue(DatHeader.FileNameKey, diffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + innerpost);
+ diffData.Header.SetFieldValue(Models.Metadata.Header.NameKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + innerpost);
+ diffData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + innerpost);
+ diffData.ResetDictionary();
+ outDatsArray[j] = diffData;
+#if NET40_OR_GREATER || NETCOREAPP
+ });
+#else
+ }
+#endif
+
+ // Create a list of DatData objects representing individual output files
+ List outDats = [.. outDatsArray];
+
+ watch.Stop();
+
+ // Now, loop through the dictionary and populate the correct DATs
+ watch.Start("Populating all individual DATs");
+
+ // Get all current items, machines, and mappings
+ var datItems = datFile.ItemsDB.GetItems();
+ var machines = datFile.GetMachinesDB();
+ var sources = datFile.ItemsDB.GetSources();
+
+ // Create mappings from old index to new index
+ var machineRemapping = new Dictionary();
+ var sourceRemapping = new Dictionary();
+
+ // Loop through and add all sources
+ foreach (var source in sources)
+ {
+ long newSourceIndex = outDats[0].AddSourceDB(source.Value);
+ sourceRemapping[source.Key] = newSourceIndex;
+
+ for (int i = 1; i < outDats.Count; i++)
+ {
+ _ = outDats[i].AddSourceDB(source.Value);
+ }
+ }
+
+ // Loop through and add all machines
+ foreach (var machine in machines)
+ {
+ long newMachineIndex = outDats[0].AddMachineDB(machine.Value);
+ machineRemapping[machine.Key] = newMachineIndex;
+
+ for (int i = 1; i < outDats.Count; i++)
+ {
+ _ = outDats[i].AddMachineDB(machine.Value);
+ }
+ }
+
+ // Loop through and add the items
+#if NET452_OR_GREATER || NETCOREAPP
+ Parallel.ForEach(datItems, Core.Globals.ParallelOptions, item =>
+#elif NET40_OR_GREATER
+ Parallel.ForEach(datItems, item =>
+#else
+ foreach (var item in datItems)
+#endif
+ {
+ // Get the machine and source index for this item
+ long machineIndex = datFile.ItemsDB.GetMachineForItem(item.Key).Key;
+ long sourceIndex = datFile.ItemsDB.GetSourceForItem(item.Key).Key;
+
+ // Get the source associated with the item
+ var source = datFile.ItemsDB.GetSource(sourceIndex);
+ if (source == null)
+#if NET40_OR_GREATER || NETCOREAPP
+ return;
+#else
+ continue;
+#endif
+
+#if NET20 || NET35
+ if ((item.Value.GetFieldValue(DatItem.DupeTypeKey) & DupeType.Internal) != 0 || item.Value.GetFieldValue(DatItem.DupeTypeKey) == 0x00)
+#else
+ if (item.Value.GetFieldValue(DatItem.DupeTypeKey).HasFlag(DupeType.Internal) || item.Value.GetFieldValue(DatItem.DupeTypeKey) == 0x00)
+#endif
+ outDats[source.Index].AddItemDB(item.Value, machineRemapping[machineIndex], sourceRemapping[sourceIndex], statsOnly: false);
+#if NET40_OR_GREATER || NETCOREAPP
+ });
+#else
+ }
+#endif
+
+ watch.Stop();
+
+ return [.. outDats];
+ }
+
+ ///
+ /// Output non-duplicate item diff
+ ///
+ /// Current DatFile object to use for updating
+ /// List of inputs to write out from
+ public static DatFile NoDuplicates(DatFile datFile, List inputs)
+ {
+ List paths = inputs.ConvertAll(i => new ParentablePath(i));
+ return NoDuplicates(datFile, paths);
+ //return NoDuplicatesDB(datFile, paths);
+ }
+
+ ///
+ /// Output non-duplicate item diff
+ ///
+ /// Current DatFile object to use for updating
+ /// List of inputs to write out from
+ public static DatFile NoDuplicates(DatFile datFile, List inputs)
+ {
+ InternalStopwatch watch = new("Initializing no duplicate DAT");
+
+ // Fill in any information not in the base DAT
+ if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
+ datFile.Header.SetFieldValue(DatHeader.FileNameKey, "All DATs");
+
+ if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey)))
+ datFile.Header.SetFieldValue(Models.Metadata.Header.NameKey, "All DATs");
+
+ if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey)))
+ datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "All DATs");
+
+ string post = " (No Duplicates)";
+ DatFile outerDiffData = DatFileTool.CreateDatFile(datFile.Header, datFile.Modifiers);
+ outerDiffData.Header.SetFieldValue(DatHeader.FileNameKey, outerDiffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post);
+ outerDiffData.Header.SetFieldValue(Models.Metadata.Header.NameKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post);
+ outerDiffData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post);
+ outerDiffData.ResetDictionary();
+
+ watch.Stop();
+
+ // Now, loop through the dictionary and populate the correct DATs
+ watch.Start("Populating no duplicate DAT");
+
+#if NET452_OR_GREATER || NETCOREAPP
+ Parallel.ForEach(datFile.Items.SortedKeys, Core.Globals.ParallelOptions, key =>
+#elif NET40_OR_GREATER
+ Parallel.ForEach(datFile.Items.SortedKeys, key =>
+#else
+ foreach (var key in datFile.Items.SortedKeys)
+#endif
+ {
+ List items = DatFileTool.Merge(datFile.GetItemsForBucket(key));
+
+ // If the rom list is empty or null, just skip it
+ if (items == null || items.Count == 0)
+#if NET40_OR_GREATER || NETCOREAPP
+ return;
+#else
+ continue;
+#endif
+
+ // Loop through and add the items correctly
+ foreach (DatItem item in items)
+ {
+#if NET20 || NET35
+ if ((item.GetFieldValue(DatItem.DupeTypeKey) & DupeType.Internal) != 0 || item.GetFieldValue(DatItem.DupeTypeKey) == 0x00)
+#else
+ if (item.GetFieldValue(DatItem.DupeTypeKey).HasFlag(DupeType.Internal) || item.GetFieldValue(DatItem.DupeTypeKey) == 0x00)
+#endif
+ {
+ if (item.Clone() is not DatItem newrom || newrom.GetFieldValue(DatItem.SourceKey) == null)
+ continue;
+
+ newrom.GetFieldValue(DatItem.MachineKey)!.SetFieldValue(Models.Metadata.Machine.NameKey, newrom.GetFieldValue(DatItem.MachineKey)!.GetStringFieldValue(Models.Metadata.Machine.NameKey) + $" ({Path.GetFileNameWithoutExtension(inputs[newrom.GetFieldValue(DatItem.SourceKey)!.Index].CurrentPath)})");
+ outerDiffData.AddItem(newrom, statsOnly: false);
+ }
+ }
+#if NET40_OR_GREATER || NETCOREAPP
+ });
+#else
+ }
+#endif
+
+ watch.Stop();
+
+ return outerDiffData;
+ }
+
+ ///
+ /// Output non-duplicate item diff
+ ///
+ /// Current DatFile object to use for updating
+ /// List of inputs to write out from
+ public static DatFile NoDuplicatesDB(DatFile datFile, List inputs)
+ {
+ var watch = new InternalStopwatch("Initializing no duplicate DAT");
+
+ // Fill in any information not in the base DAT
+ if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
+ datFile.Header.SetFieldValue(DatHeader.FileNameKey, "All DATs");
+
+ if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey)))
+ datFile.Header.SetFieldValue(Models.Metadata.Header.NameKey, "All DATs");
+
+ if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey)))
+ datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "All DATs");
+
+ string post = " (No Duplicates)";
+ DatFile outerDiffData = DatFileTool.CreateDatFile(datFile.Header, datFile.Modifiers);
+ outerDiffData.Header.SetFieldValue(DatHeader.FileNameKey, outerDiffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post);
+ outerDiffData.Header.SetFieldValue(Models.Metadata.Header.NameKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post);
+ outerDiffData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post);
+ outerDiffData.ResetDictionary();
+
+ watch.Stop();
+
+ // Now, loop through the dictionary and populate the correct DATs
+ watch.Start("Populating no duplicate DAT");
+
+ // Get all current items, machines, and mappings
+ var datItems = datFile.ItemsDB.GetItems();
+ var machines = datFile.GetMachinesDB();
+ var sources = datFile.ItemsDB.GetSources();
+
+ // Create mappings from old index to new index
+ var machineRemapping = new Dictionary();
+ var sourceRemapping = new Dictionary();
+
+ // Loop through and add all sources
+ foreach (var source in sources)
+ {
+ long newSourceIndex = outerDiffData.AddSourceDB(source.Value);
+ sourceRemapping[source.Key] = newSourceIndex;
+ }
+
+ // Loop through and add all machines
+ foreach (var machine in machines)
+ {
+ long newMachineIndex = outerDiffData.AddMachineDB(machine.Value);
+ machineRemapping[machine.Key] = newMachineIndex;
+ }
+
+ // Loop through and add the items
+#if NET452_OR_GREATER || NETCOREAPP
+ Parallel.ForEach(datItems, Core.Globals.ParallelOptions, item =>
+#elif NET40_OR_GREATER
+ Parallel.ForEach(datItems, item =>
+#else
+ foreach (var item in datItems)
+#endif
+ {
+ // Get the machine and source index for this item
+ long machineIndex = datFile.ItemsDB.GetMachineForItem(item.Key).Key;
+ long sourceIndex = datFile.ItemsDB.GetSourceForItem(item.Key).Key;
+
+ // If the current item isn't a duplicate
+#if NET20 || NET35
+ if ((item.Value.GetFieldValue(DatItem.DupeTypeKey) & DupeType.Internal) == 0 && item.Value.GetFieldValue(DatItem.DupeTypeKey) != 0x00)
+#else
+ if (!item.Value.GetFieldValue(DatItem.DupeTypeKey).HasFlag(DupeType.Internal) && item.Value.GetFieldValue(DatItem.DupeTypeKey) != 0x00)
+#endif
+#if NET40_OR_GREATER || NETCOREAPP
+ return;
+#else
+ continue;
+#endif
+
+ // Get the current source and machine
+ var currentSource = sources[sourceIndex];
+ string? currentMachineName = machines[machineIndex].GetStringFieldValue(Models.Metadata.Machine.NameKey);
+ var currentMachine = datFile.ItemsDB.GetMachine(currentMachineName);
+ if (currentMachine.Value == null)
+#if NET40_OR_GREATER || NETCOREAPP
+ return;
+#else
+ continue;
+#endif
+
+ // Get the source-specific machine
+ string? renamedMachineName = $"{currentMachineName} ({Path.GetFileNameWithoutExtension(inputs[currentSource!.Index].CurrentPath)})";
+ var renamedMachine = datFile.ItemsDB.GetMachine(renamedMachineName);
+ if (renamedMachine.Value == null)
+ {
+ var newMachine = currentMachine.Value.Clone() as Machine;
+ newMachine!.SetFieldValue(Models.Metadata.Machine.NameKey, renamedMachineName);
+ long newMachineIndex = outerDiffData.AddMachineDB(newMachine);
+ renamedMachine = new KeyValuePair(newMachineIndex, newMachine);
+ }
+
+ outerDiffData.AddItemDB(item.Value, renamedMachine.Key, sourceRemapping[sourceIndex], statsOnly: false);
+#if NET40_OR_GREATER || NETCOREAPP
+ });
+#else
+ }
+#endif
+
+ watch.Stop();
+
+ return outerDiffData;
+ }
+
+ ///
+ /// Fill a DatFile with all items with a particular source index ID
+ ///
+ /// Current DatFile object to use for updating
+ /// DatFile to add found items to
+ /// Source index ID to retrieve items for
+ /// DatFile containing all items with the source index ID/returns>
+ private static void FillWithSourceIndex(DatFile datFile, DatFile indexDat, int index)
+ {
+ // Loop through and add the items for this index to the output
+#if NET452_OR_GREATER || NETCOREAPP
+ Parallel.ForEach(datFile.Items.SortedKeys, Core.Globals.ParallelOptions, key =>
+#elif NET40_OR_GREATER
+ Parallel.ForEach(datFile.Items.SortedKeys, key =>
+#else
+ foreach (var key in datFile.Items.SortedKeys)
+#endif
+ {
+ List items = DatFileTool.Merge(datFile.GetItemsForBucket(key));
+
+ // If the rom list is empty or null, just skip it
+ if (items == null || items.Count == 0)
+#if NET40_OR_GREATER || NETCOREAPP
+ return;
+#else
+ continue;
+#endif
+
+ foreach (DatItem item in items)
+ {
+ var source = item.GetFieldValue(DatItem.SourceKey);
+ if (source != null && source.Index == index)
+ indexDat.AddItem(item, statsOnly: false);
+ }
+#if NET40_OR_GREATER || NETCOREAPP
+ });
+#else
+ }
+#endif
+ }
+
+ ///
+ /// Fill a DatFile with all items with a particular source index ID
+ ///
+ /// Current DatFile object to use for updating
+ /// DatFile to add found items to
+ /// Source index ID to retrieve items for
+ /// DatFile containing all items with the source index ID/returns>
+ private static void FillWithSourceIndexDB(DatFile datFile, DatFile indexDat, int index)
+ {
+ // Get all current items, machines, and mappings
+ var datItems = datFile.ItemsDB.GetItems();
+ var machines = datFile.GetMachinesDB();
+ var sources = datFile.ItemsDB.GetSources();
+
+ // Create mappings from old index to new index
+ var machineRemapping = new Dictionary();
+ var sourceRemapping = new Dictionary();
+
+ // Loop through and add all sources
+ foreach (var source in sources)
+ {
+ long newSourceIndex = indexDat.AddSourceDB(source.Value);
+ sourceRemapping[source.Key] = newSourceIndex;
+ }
+
+ // Loop through and add all machines
+ foreach (var machine in machines)
+ {
+ long newMachineIndex = indexDat.AddMachineDB(machine.Value);
+ machineRemapping[machine.Key] = newMachineIndex;
+ }
+
+ // Loop through and add the items
+#if NET452_OR_GREATER || NETCOREAPP
+ Parallel.ForEach(datItems, Core.Globals.ParallelOptions, item =>
+#elif NET40_OR_GREATER
+ Parallel.ForEach(datItems, item =>
+#else
+ foreach (var item in datItems)
+#endif
+ {
+ // Get the machine and source index for this item
+ long machineIndex = datFile.ItemsDB.GetMachineForItem(item.Key).Key;
+ long sourceIndex = datFile.ItemsDB.GetSourceForItem(item.Key).Key;
+
+ // Get the source associated with the item
+ var source = datFile.ItemsDB.GetSource(sourceIndex);
+
+ if (source != null && source.Index == index)
+ indexDat.AddItemDB(item.Value, machineRemapping[machineIndex], sourceRemapping[sourceIndex], statsOnly: false);
+#if NET40_OR_GREATER || NETCOREAPP
+ });
+#else
+ }
+#endif
+ }
+ }
+}
\ No newline at end of file
diff --git a/SabreTools/Features/Update.cs b/SabreTools/Features/Update.cs
index 4cb118db..a1d15639 100644
--- a/SabreTools/Features/Update.cs
+++ b/SabreTools/Features/Update.cs
@@ -256,8 +256,8 @@ namespace SabreTools.Features
if (updateMode.HasFlag(UpdateMode.DiffDupesOnly))
#endif
{
- DatFile dupeData = DatFileTool.DiffDuplicates(userInputDat, inputPaths);
- //DatFile dupeData = DatFileTool.DiffDuplicatesDB(userInputDat, inputPaths);
+ DatFile dupeData = Diffing.Duplicates(userInputDat, inputPaths);
+ //DatFile dupeData = Diffing.DuplicatesDB(userInputDat, inputPaths);
InternalStopwatch watch = new("Outputting duplicate DAT");
Writer.Write(dupeData, OutputDir, overwrite: false);
@@ -271,8 +271,8 @@ namespace SabreTools.Features
if (updateMode.HasFlag(UpdateMode.DiffNoDupesOnly))
#endif
{
- DatFile outerDiffData = DatFileTool.DiffNoDuplicates(userInputDat, inputPaths);
- //DatFile outerDiffData = DatFileTool.DiffNoDuplicatesDB(userInputDat, inputPaths);
+ DatFile outerDiffData = Diffing.NoDuplicates(userInputDat, inputPaths);
+ //DatFile outerDiffData = Diffing.NoDuplicatesDB(userInputDat, inputPaths);
InternalStopwatch watch = new("Outputting no duplicate DAT");
Writer.Write(outerDiffData, OutputDir, overwrite: false);
@@ -287,8 +287,8 @@ namespace SabreTools.Features
#endif
{
// Get all of the output DatFiles
- List datFiles = DatFileTool.DiffIndividuals(userInputDat, inputPaths);
- //List datFiles = DatFileTool.DiffIndividualsDB(userInputDat, inputPaths);
+ List datFiles = Diffing.Individuals(userInputDat, inputPaths);
+ //List datFiles = Diffing.IndividualsDB(userInputDat, inputPaths);
// Loop through and output the new DatFiles
InternalStopwatch watch = new("Outputting all individual DATs");
@@ -347,7 +347,7 @@ namespace SabreTools.Features
#endif
// Get all of the output DatFiles
- List datFiles = DatFileTool.DiffCascade(userInputDat, datHeaders);
+ List datFiles = Diffing.Cascade(userInputDat, datHeaders);
// Loop through and output the new DatFiles
InternalStopwatch watch = new("Outputting all created DATs");
@@ -403,7 +403,7 @@ namespace SabreTools.Features
Remover.ApplyRemovals(repDat);
// Now replace the fields from the base DatFile
- DatFileTool.DiffAgainst(userInputDat, repDat, GetBoolean(Features, ByGameValue));
+ Diffing.Against(userInputDat, repDat, GetBoolean(Features, ByGameValue));
// Finally output the diffed DatFile
string interOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue))!;