diff --git a/SabreTools.DatFiles/DatTool.Updating.cs b/SabreTools.DatFiles/DatTool.Updating.cs
deleted file mode 100644
index ddab8090..00000000
--- a/SabreTools.DatFiles/DatTool.Updating.cs
+++ /dev/null
@@ -1,494 +0,0 @@
-using System.Collections.Generic;
-using System.IO;
-using System.Linq;
-using System.Threading.Tasks;
-
-using SabreTools.Core;
-using SabreTools.DatItems;
-using SabreTools.IO;
-using SabreTools.Logging;
-
-// This file represents all methods related to converting and updating DatFiles
-namespace SabreTools.DatFiles
-{
- public partial class DatTool
- {
- ///
- /// Replace item values from the base set represented by the current DAT
- ///
- /// Current DatFile object to use for updating
- /// DatFile to replace the values in
- /// List of Fields representing what should be updated
- /// True if descriptions should only be replaced if the game name is the same, false otherwise
- public static void BaseReplace(DatFile datFile, DatFile intDat, List updateFields, bool onlySame)
- {
- logger.User($"Replacing items in '{intDat.Header.FileName}' from the base DAT");
-
- // If we are matching based on DatItem fields of any sort
- if (updateFields.Intersect(DatItem.DatItemFields).Any())
- {
- // For comparison's sake, we want to use CRC as the base bucketing
- datFile.Items.BucketBy(Field.DatItem_CRC, DedupeType.Full);
- intDat.Items.BucketBy(Field.DatItem_CRC, DedupeType.None);
-
- // Then we do a hashwise comparison against the base DAT
- Parallel.ForEach(intDat.Items.Keys, Globals.ParallelOptions, key =>
- {
- List datItems = intDat.Items[key];
- List newDatItems = new List();
- foreach (DatItem datItem in datItems)
- {
- List dupes = datFile.Items.GetDuplicates(datItem, sorted: true);
- DatItem newDatItem = datItem.Clone() as DatItem;
-
- // Replace fields from the first duplicate, if we have one
- if (dupes.Count > 0)
- newDatItem.ReplaceFields(dupes.First(), updateFields);
-
- newDatItems.Add(newDatItem);
- }
-
- // Now add the new list to the key
- intDat.Items.Remove(key);
- intDat.Items.AddRange(key, newDatItems);
- });
- }
-
- // If we are matching based on Machine fields of any sort
- if (updateFields.Intersect(DatItem.MachineFields).Any())
- {
- // For comparison's sake, we want to use Machine Name as the base bucketing
- datFile.Items.BucketBy(Field.Machine_Name, DedupeType.Full);
- intDat.Items.BucketBy(Field.Machine_Name, DedupeType.None);
-
- // Then we do a namewise comparison against the base DAT
- Parallel.ForEach(intDat.Items.Keys, Globals.ParallelOptions, key =>
- {
- List datItems = intDat.Items[key];
- List newDatItems = new List();
- foreach (DatItem datItem in datItems)
- {
- DatItem newDatItem = datItem.Clone() as DatItem;
- if (datFile.Items.ContainsKey(key) && datFile.Items[key].Count() > 0)
- newDatItem.Machine.ReplaceFields(datFile.Items[key][0].Machine, updateFields, onlySame);
-
- newDatItems.Add(newDatItem);
- }
-
- // Now add the new list to the key
- intDat.Items.Remove(key);
- intDat.Items.AddRange(key, newDatItems);
- });
- }
- }
-
- ///
- /// Output diffs against a base set represented by the current DAT
- ///
- /// Current DatFile object to use for updating
- /// DatFile to replace the values in
- /// True to diff using games, false to use hashes
- public static void DiffAgainst(DatFile datFile, DatFile intDat, bool useGames)
- {
- // For comparison's sake, we want to use a base ordering
- if (useGames)
- datFile.Items.BucketBy(Field.Machine_Name, DedupeType.None);
- else
- datFile.Items.BucketBy(Field.DatItem_CRC, DedupeType.None);
-
- logger.User($"Comparing '{intDat.Header.FileName}' to base DAT");
-
- // For comparison's sake, we want to a the base bucketing
- if (useGames)
- intDat.Items.BucketBy(Field.Machine_Name, DedupeType.None);
- else
- intDat.Items.BucketBy(Field.DatItem_CRC, DedupeType.Full);
-
- // Then we compare against the base DAT
- List keys = intDat.Items.Keys.ToList();
- Parallel.ForEach(keys, Globals.ParallelOptions, key =>
- {
- // Game Against uses game names
- if (useGames)
- {
- // If the base DAT doesn't contain the key, keep it
- if (!datFile.Items.ContainsKey(key))
- return;
-
- // If the number of items is different, then keep it
- if (datFile.Items[key].Count != intDat.Items[key].Count)
- return;
-
- // Otherwise, compare by name and hash the remaining files
- bool exactMatch = true;
- foreach (DatItem item in intDat.Items[key])
- {
- // TODO: Make this granular to name as well
- if (!datFile.Items[key].Contains(item))
- {
- exactMatch = false;
- break;
- }
- }
-
- // If we have an exact match, remove the game
- if (exactMatch)
- intDat.Items.Remove(key);
- }
-
- // Standard Against uses hashes
- else
- {
- List datItems = intDat.Items[key];
- List keepDatItems = new List();
- foreach (DatItem datItem in datItems)
- {
- if (!datFile.Items.HasDuplicates(datItem, true))
- keepDatItems.Add(datItem);
- }
-
- // Now add the new list to the key
- intDat.Items.Remove(key);
- intDat.Items.AddRange(key, keepDatItems);
- }
- });
- }
-
- ///
- /// Output cascading diffs
- ///
- /// Current DatFile object to use for updating
- /// Dat headers used optionally
- /// List of DatFiles representing the individually indexed items
- public static List DiffCascade(DatFile datFile, List datHeaders)
- {
- // Create a list of DatData objects representing output files
- List outDats = new List();
-
- // Ensure the current DatFile is sorted optimally
- datFile.Items.BucketBy(Field.DatItem_CRC, DedupeType.None);
-
- // Loop through each of the inputs and get or create a new DatData object
- InternalStopwatch watch = new InternalStopwatch("Initializing and filling all output DATs");
-
- // Create the DatFiles from the set of headers
- DatFile[] outDatsArray = new DatFile[datHeaders.Count];
- Parallel.For(0, datHeaders.Count, Globals.ParallelOptions, j =>
- {
- DatFile diffData = DatFile.Create(datHeaders[j]);
- diffData.Items = new ItemDictionary();
- FillWithSourceIndex(datFile, diffData, j);
- outDatsArray[j] = diffData;
- });
-
- outDats = outDatsArray.ToList();
- watch.Stop();
-
- return outDats;
- }
-
- ///
- /// Output duplicate item diff
- ///
- /// Current DatFile object to use for updating
- /// List of inputs to write out from
- public static DatFile DiffDuplicates(DatFile datFile, List inputs)
- {
- List paths = inputs.Select(i => new ParentablePath(i)).ToList();
- return DiffDuplicates(datFile, paths);
- }
-
- ///
- /// Output duplicate item diff
- ///
- /// Current DatFile object to use for updating
- /// List of inputs to write out from
- public static DatFile DiffDuplicates(DatFile datFile, List inputs)
- {
- InternalStopwatch watch = new InternalStopwatch("Initializing duplicate DAT");
-
- // Fill in any information not in the base DAT
- if (string.IsNullOrWhiteSpace(datFile.Header.FileName))
- datFile.Header.FileName = "All DATs";
-
- if (string.IsNullOrWhiteSpace(datFile.Header.Name))
- datFile.Header.Name = "datFile.All DATs";
-
- if (string.IsNullOrWhiteSpace(datFile.Header.Description))
- datFile.Header.Description = "datFile.All DATs";
-
- string post = " (Duplicates)";
- DatFile dupeData = DatFile.Create(datFile.Header);
- dupeData.Header.FileName += post;
- dupeData.Header.Name += post;
- dupeData.Header.Description += post;
- dupeData.Items = new ItemDictionary();
-
- watch.Stop();
-
- // Now, loop through the dictionary and populate the correct DATs
- watch.Start("Populating duplicate DAT");
-
- Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
- {
- List items = DatItem.Merge(datFile.Items[key]);
-
- // If the rom list is empty or null, just skip it
- if (items == null || items.Count == 0)
- return;
-
- // Loop through and add the items correctly
- foreach (DatItem item in items)
- {
- if (item.DupeType.HasFlag(DupeType.External))
- {
- DatItem newrom = item.Clone() as DatItem;
- newrom.Machine.Name += $" ({Path.GetFileNameWithoutExtension(inputs[item.Source.Index].CurrentPath)})";
-
- dupeData.Items.Add(key, newrom);
- }
- }
- });
-
- watch.Stop();
-
- return dupeData;
- }
-
- ///
- /// Output non-cascading diffs
- ///
- /// Current DatFile object to use for updating
- /// List of inputs to write out from
- public static List DiffIndividuals(DatFile datFile, List inputs)
- {
- List paths = inputs.Select(i => new ParentablePath(i)).ToList();
- return DiffIndividuals(datFile, paths);
- }
-
- ///
- /// Output non-cascading diffs
- ///
- /// Current DatFile object to use for updating
- /// List of inputs to write out from
- public static List DiffIndividuals(DatFile datFile, List inputs)
- {
- InternalStopwatch watch = new InternalStopwatch("Initializing all individual DATs");
-
- // Fill in any information not in the base DAT
- if (string.IsNullOrWhiteSpace(datFile.Header.FileName))
- datFile.Header.FileName = "All DATs";
-
- if (string.IsNullOrWhiteSpace(datFile.Header.Name))
- datFile.Header.Name = "All DATs";
-
- if (string.IsNullOrWhiteSpace(datFile.Header.Description))
- datFile.Header.Description = "All DATs";
-
- // Loop through each of the inputs and get or create a new DatData object
- DatFile[] outDatsArray = new DatFile[inputs.Count];
-
- Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
- {
- string innerpost = $" ({j} - {inputs[j].GetNormalizedFileName(true)} Only)";
- DatFile diffData = DatFile.Create(datFile.Header);
- diffData.Header.FileName += innerpost;
- diffData.Header.Name += innerpost;
- diffData.Header.Description += innerpost;
- diffData.Items = new ItemDictionary();
- outDatsArray[j] = diffData;
- });
-
- // Create a list of DatData objects representing individual output files
- List outDats = outDatsArray.ToList();
-
- watch.Stop();
-
- // Now, loop through the dictionary and populate the correct DATs
- watch.Start("Populating all individual DATs");
-
- Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
- {
- List items = DatItem.Merge(datFile.Items[key]);
-
- // If the rom list is empty or null, just skip it
- if (items == null || items.Count == 0)
- return;
-
- // Loop through and add the items correctly
- foreach (DatItem item in items)
- {
- if (item.DupeType.HasFlag(DupeType.Internal) || item.DupeType == 0x00)
- outDats[item.Source.Index].Items.Add(key, item);
- }
- });
-
- watch.Stop();
-
- return outDats.ToList();
- }
-
- ///
- /// Output non-duplicate item diff
- ///
- /// Current DatFile object to use for updating
- /// List of inputs to write out from
- public static DatFile DiffNoDuplicates(DatFile datFile, List inputs)
- {
- List paths = inputs.Select(i => new ParentablePath(i)).ToList();
- return DiffNoDuplicates(datFile, paths);
- }
-
- ///
- /// Output non-duplicate item diff
- ///
- /// Current DatFile object to use for updating
- /// List of inputs to write out from
- public static DatFile DiffNoDuplicates(DatFile datFile, List inputs)
- {
- InternalStopwatch watch = new InternalStopwatch("Initializing no duplicate DAT");
-
- // Fill in any information not in the base DAT
- if (string.IsNullOrWhiteSpace(datFile.Header.FileName))
- datFile.Header.FileName = "All DATs";
-
- if (string.IsNullOrWhiteSpace(datFile.Header.Name))
- datFile.Header.Name = "All DATs";
-
- if (string.IsNullOrWhiteSpace(datFile.Header.Description))
- datFile.Header.Description = "All DATs";
-
- string post = " (No Duplicates)";
- DatFile outerDiffData = DatFile.Create(datFile.Header);
- outerDiffData.Header.FileName += post;
- outerDiffData.Header.Name += post;
- outerDiffData.Header.Description += post;
- outerDiffData.Items = new ItemDictionary();
-
- watch.Stop();
-
- // Now, loop through the dictionary and populate the correct DATs
- watch.Start("Populating no duplicate DAT");
-
- Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
- {
- List items = DatItem.Merge(datFile.Items[key]);
-
- // If the rom list is empty or null, just skip it
- if (items == null || items.Count == 0)
- return;
-
- // Loop through and add the items correctly
- foreach (DatItem item in items)
- {
- if (item.DupeType.HasFlag(DupeType.Internal) || item.DupeType == 0x00)
- {
- DatItem newrom = item.Clone() as DatItem;
- newrom.Machine.Name += $" ({Path.GetFileNameWithoutExtension(inputs[item.Source.Index].CurrentPath)})";
- outerDiffData.Items.Add(key, newrom);
- }
- }
- });
-
- watch.Stop();
-
- return outerDiffData;
- }
-
- ///
- /// Fill a DatFile with all items with a particular source index ID
- ///
- /// Current DatFile object to use for updating
- /// DatFile to add found items to
- /// Source index ID to retrieve items for
- /// DatFile containing all items with the source index ID/returns>
- public static void FillWithSourceIndex(DatFile datFile, DatFile indexDat, int index)
- {
- // Loop through and add the items for this index to the output
- Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
- {
- List items = DatItem.Merge(datFile.Items[key]);
-
- // If the rom list is empty or null, just skip it
- if (items == null || items.Count == 0)
- return;
-
- foreach (DatItem item in items)
- {
- if (item.Source.Index == index)
- indexDat.Items.Add(key, item);
- }
- });
- }
-
- ///
- /// Populate from multiple paths while returning the invividual headers
- ///
- /// Current DatFile object to use for updating
- /// Paths to DATs to parse
- /// List of DatHeader objects representing headers
- public static List PopulateUserData(DatFile datFile, List inputs)
- {
- List paths = inputs.Select(i => new ParentablePath(i)).ToList();
- return PopulateUserData(datFile, paths);
- }
-
- ///
- /// Populate from multiple paths while returning the invividual headers
- ///
- /// Current DatFile object to use for updating
- /// Paths to DATs to parse
- /// List of DatHeader objects representing headers
- public static List PopulateUserData(DatFile datFile, List inputs)
- {
- DatFile[] datFiles = new DatFile[inputs.Count];
- InternalStopwatch watch = new InternalStopwatch("Processing individual DATs");
-
- // Parse all of the DATs into their own DatFiles in the array
- Parallel.For(0, inputs.Count, Globals.ParallelOptions, i =>
- {
- var input = inputs[i];
- logger.User($"Adding DAT: {input.CurrentPath}");
- datFiles[i] = DatFile.Create(datFile.Header.CloneFiltering());
- Parser.ParseInto(datFiles[i], input, i, keep: true);
- });
-
- watch.Stop();
-
- watch.Start("Populating internal DAT");
- for (int i = 0; i < inputs.Count; i++)
- {
- AddFromExisting(datFile, datFiles[i], true);
- }
-
- watch.Stop();
-
- return datFiles.Select(d => d.Header).ToList();
- }
-
- ///
- /// Add items from another DatFile to the existing DatFile
- ///
- /// DatFile to add to
- /// DatFile to add from
- /// If items should be deleted from the source DatFile
- private static void AddFromExisting(DatFile addTo, DatFile addFrom, bool delete = false)
- {
- // Get the list of keys from the DAT
- var keys = addFrom.Items.Keys.ToList();
- foreach (string key in keys)
- {
- // Add everything from the key to the internal DAT
- addTo.Items.AddRange(key, addFrom.Items[key]);
-
- // Now remove the key from the source DAT
- if (delete)
- addFrom.Items.Remove(key);
- }
-
- // Now remove the file dictionary from the source DAT
- if (delete)
- addFrom.Items = null;
- }
- }
-}
\ No newline at end of file
diff --git a/SabreTools.DatFiles/DatTool.cs b/SabreTools.DatFiles/DatTool.cs
index c3b15330..e4d0ac30 100644
--- a/SabreTools.DatFiles/DatTool.cs
+++ b/SabreTools.DatFiles/DatTool.cs
@@ -1,13 +1,18 @@
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Threading.Tasks;
+
+using SabreTools.Core;
+using SabreTools.DatItems;
+using SabreTools.IO;
using SabreTools.Logging;
// TODO: Should each of the individual pieces of partial classes be their own classes?
-// TODO: Is a single, consistent internal state needed for this class at all?
namespace SabreTools.DatFiles
{
- ///
- /// Represents a format-agnostic DAT
- ///
- public partial class DatTool
+ // This file represents all methods related to converting and updating DatFiles
+ public class DatTool
{
#region Logging
@@ -17,5 +22,483 @@ namespace SabreTools.DatFiles
private static readonly Logger logger = new Logger();
#endregion
+
+ ///
+ /// Replace item values from the base set represented by the current DAT
+ ///
+ /// Current DatFile object to use for updating
+ /// DatFile to replace the values in
+ /// List of Fields representing what should be updated
+ /// True if descriptions should only be replaced if the game name is the same, false otherwise
+ public static void BaseReplace(DatFile datFile, DatFile intDat, List updateFields, bool onlySame)
+ {
+ logger.User($"Replacing items in '{intDat.Header.FileName}' from the base DAT");
+
+ // If we are matching based on DatItem fields of any sort
+ if (updateFields.Intersect(DatItem.DatItemFields).Any())
+ {
+ // For comparison's sake, we want to use CRC as the base bucketing
+ datFile.Items.BucketBy(Field.DatItem_CRC, DedupeType.Full);
+ intDat.Items.BucketBy(Field.DatItem_CRC, DedupeType.None);
+
+ // Then we do a hashwise comparison against the base DAT
+ Parallel.ForEach(intDat.Items.Keys, Globals.ParallelOptions, key =>
+ {
+ List datItems = intDat.Items[key];
+ List newDatItems = new List();
+ foreach (DatItem datItem in datItems)
+ {
+ List dupes = datFile.Items.GetDuplicates(datItem, sorted: true);
+ DatItem newDatItem = datItem.Clone() as DatItem;
+
+ // Replace fields from the first duplicate, if we have one
+ if (dupes.Count > 0)
+ newDatItem.ReplaceFields(dupes.First(), updateFields);
+
+ newDatItems.Add(newDatItem);
+ }
+
+ // Now add the new list to the key
+ intDat.Items.Remove(key);
+ intDat.Items.AddRange(key, newDatItems);
+ });
+ }
+
+ // If we are matching based on Machine fields of any sort
+ if (updateFields.Intersect(DatItem.MachineFields).Any())
+ {
+ // For comparison's sake, we want to use Machine Name as the base bucketing
+ datFile.Items.BucketBy(Field.Machine_Name, DedupeType.Full);
+ intDat.Items.BucketBy(Field.Machine_Name, DedupeType.None);
+
+ // Then we do a namewise comparison against the base DAT
+ Parallel.ForEach(intDat.Items.Keys, Globals.ParallelOptions, key =>
+ {
+ List datItems = intDat.Items[key];
+ List newDatItems = new List();
+ foreach (DatItem datItem in datItems)
+ {
+ DatItem newDatItem = datItem.Clone() as DatItem;
+ if (datFile.Items.ContainsKey(key) && datFile.Items[key].Count() > 0)
+ newDatItem.Machine.ReplaceFields(datFile.Items[key][0].Machine, updateFields, onlySame);
+
+ newDatItems.Add(newDatItem);
+ }
+
+ // Now add the new list to the key
+ intDat.Items.Remove(key);
+ intDat.Items.AddRange(key, newDatItems);
+ });
+ }
+ }
+
+ ///
+ /// Output diffs against a base set represented by the current DAT
+ ///
+ /// Current DatFile object to use for updating
+ /// DatFile to replace the values in
+ /// True to diff using games, false to use hashes
+ public static void DiffAgainst(DatFile datFile, DatFile intDat, bool useGames)
+ {
+ // For comparison's sake, we want to use a base ordering
+ if (useGames)
+ datFile.Items.BucketBy(Field.Machine_Name, DedupeType.None);
+ else
+ datFile.Items.BucketBy(Field.DatItem_CRC, DedupeType.None);
+
+ logger.User($"Comparing '{intDat.Header.FileName}' to base DAT");
+
+ // For comparison's sake, we want to a the base bucketing
+ if (useGames)
+ intDat.Items.BucketBy(Field.Machine_Name, DedupeType.None);
+ else
+ intDat.Items.BucketBy(Field.DatItem_CRC, DedupeType.Full);
+
+ // Then we compare against the base DAT
+ List keys = intDat.Items.Keys.ToList();
+ Parallel.ForEach(keys, Globals.ParallelOptions, key =>
+ {
+ // Game Against uses game names
+ if (useGames)
+ {
+ // If the base DAT doesn't contain the key, keep it
+ if (!datFile.Items.ContainsKey(key))
+ return;
+
+ // If the number of items is different, then keep it
+ if (datFile.Items[key].Count != intDat.Items[key].Count)
+ return;
+
+ // Otherwise, compare by name and hash the remaining files
+ bool exactMatch = true;
+ foreach (DatItem item in intDat.Items[key])
+ {
+ // TODO: Make this granular to name as well
+ if (!datFile.Items[key].Contains(item))
+ {
+ exactMatch = false;
+ break;
+ }
+ }
+
+ // If we have an exact match, remove the game
+ if (exactMatch)
+ intDat.Items.Remove(key);
+ }
+
+ // Standard Against uses hashes
+ else
+ {
+ List datItems = intDat.Items[key];
+ List keepDatItems = new List();
+ foreach (DatItem datItem in datItems)
+ {
+ if (!datFile.Items.HasDuplicates(datItem, true))
+ keepDatItems.Add(datItem);
+ }
+
+ // Now add the new list to the key
+ intDat.Items.Remove(key);
+ intDat.Items.AddRange(key, keepDatItems);
+ }
+ });
+ }
+
+ ///
+ /// Output cascading diffs
+ ///
+ /// Current DatFile object to use for updating
+ /// Dat headers used optionally
+ /// List of DatFiles representing the individually indexed items
+ public static List DiffCascade(DatFile datFile, List datHeaders)
+ {
+ // Create a list of DatData objects representing output files
+ List outDats = new List();
+
+ // Ensure the current DatFile is sorted optimally
+ datFile.Items.BucketBy(Field.DatItem_CRC, DedupeType.None);
+
+ // Loop through each of the inputs and get or create a new DatData object
+ InternalStopwatch watch = new InternalStopwatch("Initializing and filling all output DATs");
+
+ // Create the DatFiles from the set of headers
+ DatFile[] outDatsArray = new DatFile[datHeaders.Count];
+ Parallel.For(0, datHeaders.Count, Globals.ParallelOptions, j =>
+ {
+ DatFile diffData = DatFile.Create(datHeaders[j]);
+ diffData.Items = new ItemDictionary();
+ FillWithSourceIndex(datFile, diffData, j);
+ outDatsArray[j] = diffData;
+ });
+
+ outDats = outDatsArray.ToList();
+ watch.Stop();
+
+ return outDats;
+ }
+
+ ///
+ /// Output duplicate item diff
+ ///
+ /// Current DatFile object to use for updating
+ /// List of inputs to write out from
+ public static DatFile DiffDuplicates(DatFile datFile, List inputs)
+ {
+ List paths = inputs.Select(i => new ParentablePath(i)).ToList();
+ return DiffDuplicates(datFile, paths);
+ }
+
+ ///
+ /// Output duplicate item diff
+ ///
+ /// Current DatFile object to use for updating
+ /// List of inputs to write out from
+ public static DatFile DiffDuplicates(DatFile datFile, List inputs)
+ {
+ InternalStopwatch watch = new InternalStopwatch("Initializing duplicate DAT");
+
+ // Fill in any information not in the base DAT
+ if (string.IsNullOrWhiteSpace(datFile.Header.FileName))
+ datFile.Header.FileName = "All DATs";
+
+ if (string.IsNullOrWhiteSpace(datFile.Header.Name))
+ datFile.Header.Name = "datFile.All DATs";
+
+ if (string.IsNullOrWhiteSpace(datFile.Header.Description))
+ datFile.Header.Description = "datFile.All DATs";
+
+ string post = " (Duplicates)";
+ DatFile dupeData = DatFile.Create(datFile.Header);
+ dupeData.Header.FileName += post;
+ dupeData.Header.Name += post;
+ dupeData.Header.Description += post;
+ dupeData.Items = new ItemDictionary();
+
+ watch.Stop();
+
+ // Now, loop through the dictionary and populate the correct DATs
+ watch.Start("Populating duplicate DAT");
+
+ Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
+ {
+ List items = DatItem.Merge(datFile.Items[key]);
+
+ // If the rom list is empty or null, just skip it
+ if (items == null || items.Count == 0)
+ return;
+
+ // Loop through and add the items correctly
+ foreach (DatItem item in items)
+ {
+ if (item.DupeType.HasFlag(DupeType.External))
+ {
+ DatItem newrom = item.Clone() as DatItem;
+ newrom.Machine.Name += $" ({Path.GetFileNameWithoutExtension(inputs[item.Source.Index].CurrentPath)})";
+
+ dupeData.Items.Add(key, newrom);
+ }
+ }
+ });
+
+ watch.Stop();
+
+ return dupeData;
+ }
+
+ ///
+ /// Output non-cascading diffs
+ ///
+ /// Current DatFile object to use for updating
+ /// List of inputs to write out from
+ public static List DiffIndividuals(DatFile datFile, List inputs)
+ {
+ List paths = inputs.Select(i => new ParentablePath(i)).ToList();
+ return DiffIndividuals(datFile, paths);
+ }
+
+ ///
+ /// Output non-cascading diffs
+ ///
+ /// Current DatFile object to use for updating
+ /// List of inputs to write out from
+ public static List DiffIndividuals(DatFile datFile, List inputs)
+ {
+ InternalStopwatch watch = new InternalStopwatch("Initializing all individual DATs");
+
+ // Fill in any information not in the base DAT
+ if (string.IsNullOrWhiteSpace(datFile.Header.FileName))
+ datFile.Header.FileName = "All DATs";
+
+ if (string.IsNullOrWhiteSpace(datFile.Header.Name))
+ datFile.Header.Name = "All DATs";
+
+ if (string.IsNullOrWhiteSpace(datFile.Header.Description))
+ datFile.Header.Description = "All DATs";
+
+ // Loop through each of the inputs and get or create a new DatData object
+ DatFile[] outDatsArray = new DatFile[inputs.Count];
+
+ Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
+ {
+ string innerpost = $" ({j} - {inputs[j].GetNormalizedFileName(true)} Only)";
+ DatFile diffData = DatFile.Create(datFile.Header);
+ diffData.Header.FileName += innerpost;
+ diffData.Header.Name += innerpost;
+ diffData.Header.Description += innerpost;
+ diffData.Items = new ItemDictionary();
+ outDatsArray[j] = diffData;
+ });
+
+ // Create a list of DatData objects representing individual output files
+ List outDats = outDatsArray.ToList();
+
+ watch.Stop();
+
+ // Now, loop through the dictionary and populate the correct DATs
+ watch.Start("Populating all individual DATs");
+
+ Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
+ {
+ List items = DatItem.Merge(datFile.Items[key]);
+
+ // If the rom list is empty or null, just skip it
+ if (items == null || items.Count == 0)
+ return;
+
+ // Loop through and add the items correctly
+ foreach (DatItem item in items)
+ {
+ if (item.DupeType.HasFlag(DupeType.Internal) || item.DupeType == 0x00)
+ outDats[item.Source.Index].Items.Add(key, item);
+ }
+ });
+
+ watch.Stop();
+
+ return outDats.ToList();
+ }
+
+ ///
+ /// Output non-duplicate item diff
+ ///
+ /// Current DatFile object to use for updating
+ /// List of inputs to write out from
+ public static DatFile DiffNoDuplicates(DatFile datFile, List inputs)
+ {
+ List paths = inputs.Select(i => new ParentablePath(i)).ToList();
+ return DiffNoDuplicates(datFile, paths);
+ }
+
+ ///
+ /// Output non-duplicate item diff
+ ///
+ /// Current DatFile object to use for updating
+ /// List of inputs to write out from
+ public static DatFile DiffNoDuplicates(DatFile datFile, List inputs)
+ {
+ InternalStopwatch watch = new InternalStopwatch("Initializing no duplicate DAT");
+
+ // Fill in any information not in the base DAT
+ if (string.IsNullOrWhiteSpace(datFile.Header.FileName))
+ datFile.Header.FileName = "All DATs";
+
+ if (string.IsNullOrWhiteSpace(datFile.Header.Name))
+ datFile.Header.Name = "All DATs";
+
+ if (string.IsNullOrWhiteSpace(datFile.Header.Description))
+ datFile.Header.Description = "All DATs";
+
+ string post = " (No Duplicates)";
+ DatFile outerDiffData = DatFile.Create(datFile.Header);
+ outerDiffData.Header.FileName += post;
+ outerDiffData.Header.Name += post;
+ outerDiffData.Header.Description += post;
+ outerDiffData.Items = new ItemDictionary();
+
+ watch.Stop();
+
+ // Now, loop through the dictionary and populate the correct DATs
+ watch.Start("Populating no duplicate DAT");
+
+ Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
+ {
+ List items = DatItem.Merge(datFile.Items[key]);
+
+ // If the rom list is empty or null, just skip it
+ if (items == null || items.Count == 0)
+ return;
+
+ // Loop through and add the items correctly
+ foreach (DatItem item in items)
+ {
+ if (item.DupeType.HasFlag(DupeType.Internal) || item.DupeType == 0x00)
+ {
+ DatItem newrom = item.Clone() as DatItem;
+ newrom.Machine.Name += $" ({Path.GetFileNameWithoutExtension(inputs[item.Source.Index].CurrentPath)})";
+ outerDiffData.Items.Add(key, newrom);
+ }
+ }
+ });
+
+ watch.Stop();
+
+ return outerDiffData;
+ }
+
+ ///
+ /// Fill a DatFile with all items with a particular source index ID
+ ///
+ /// Current DatFile object to use for updating
+ /// DatFile to add found items to
+ /// Source index ID to retrieve items for
+ /// DatFile containing all items with the source index ID/returns>
+ public static void FillWithSourceIndex(DatFile datFile, DatFile indexDat, int index)
+ {
+ // Loop through and add the items for this index to the output
+ Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
+ {
+ List items = DatItem.Merge(datFile.Items[key]);
+
+ // If the rom list is empty or null, just skip it
+ if (items == null || items.Count == 0)
+ return;
+
+ foreach (DatItem item in items)
+ {
+ if (item.Source.Index == index)
+ indexDat.Items.Add(key, item);
+ }
+ });
+ }
+
+ ///
+ /// Populate from multiple paths while returning the invividual headers
+ ///
+ /// Current DatFile object to use for updating
+ /// Paths to DATs to parse
+ /// List of DatHeader objects representing headers
+ public static List PopulateUserData(DatFile datFile, List inputs)
+ {
+ List paths = inputs.Select(i => new ParentablePath(i)).ToList();
+ return PopulateUserData(datFile, paths);
+ }
+
+ ///
+ /// Populate from multiple paths while returning the invividual headers
+ ///
+ /// Current DatFile object to use for updating
+ /// Paths to DATs to parse
+ /// List of DatHeader objects representing headers
+ public static List PopulateUserData(DatFile datFile, List inputs)
+ {
+ DatFile[] datFiles = new DatFile[inputs.Count];
+ InternalStopwatch watch = new InternalStopwatch("Processing individual DATs");
+
+ // Parse all of the DATs into their own DatFiles in the array
+ Parallel.For(0, inputs.Count, Globals.ParallelOptions, i =>
+ {
+ var input = inputs[i];
+ logger.User($"Adding DAT: {input.CurrentPath}");
+ datFiles[i] = DatFile.Create(datFile.Header.CloneFiltering());
+ Parser.ParseInto(datFiles[i], input, i, keep: true);
+ });
+
+ watch.Stop();
+
+ watch.Start("Populating internal DAT");
+ for (int i = 0; i < inputs.Count; i++)
+ {
+ AddFromExisting(datFile, datFiles[i], true);
+ }
+
+ watch.Stop();
+
+ return datFiles.Select(d => d.Header).ToList();
+ }
+
+ ///
+ /// Add items from another DatFile to the existing DatFile
+ ///
+ /// DatFile to add to
+ /// DatFile to add from
+ /// If items should be deleted from the source DatFile
+ private static void AddFromExisting(DatFile addTo, DatFile addFrom, bool delete = false)
+ {
+ // Get the list of keys from the DAT
+ var keys = addFrom.Items.Keys.ToList();
+ foreach (string key in keys)
+ {
+ // Add everything from the key to the internal DAT
+ addTo.Items.AddRange(key, addFrom.Items[key]);
+
+ // Now remove the key from the source DAT
+ if (delete)
+ addFrom.Items.Remove(key);
+ }
+
+ // Now remove the file dictionary from the source DAT
+ if (delete)
+ addFrom.Items = null;
+ }
}
}